You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@kylin.apache.org by 和风 <36...@qq.com> on 2016/01/05 04:20:12 UTC

org.apache.hadoop.hive.ql.metadata.HiveException

hi:
  execution "bulid" cube, jobs exception : org.apache.hadoop.hive.ql.metadata.HiveException: org.apache.hadoop.hive.ql.metadata.HiveException


logs:


OS command error exit with 2 -- hive  -e "USE default;
DROP TABLE IF EXISTS kylin_intermediate_learn_kylin_two_20131229000000_20160112000000_d22e7c10_032a_4d22_a802_3b74937e86db;


CREATE EXTERNAL TABLE IF NOT EXISTS kylin_intermediate_learn_kylin_two_20131229000000_20160112000000_d22e7c10_032a_4d22_a802_3b74937e86db
(
DEFAULT_KYLIN_CAL_DT_AGE_FOR_QTR_ID smallint
,DEFAULT_KYLIN_CAL_DT_AGE_FOR_MONTH_ID smallint
,DEFAULT_KYLIN_CAL_DT_AGE_FOR_DT_ID smallint
,DEFAULT_KYLIN_CAL_DT_AGE_FOR_RTL_MONTH_ID smallint
,DEFAULT_KYLIN_CAL_DT_AGE_FOR_CS_WEEK_ID smallint
,DEFAULT_KYLIN_CAL_DT_YEAR_ID string
)
ROW FORMAT DELIMITED FIELDS TERMINATED BY '\177'
STORED AS SEQUENCEFILE
LOCATION '/kylin/kylin_metadata/kylin-d22e7c10-032a-4d22-a802-3b74937e86db/kylin_intermediate_learn_kylin_two_20131229000000_20160112000000_d22e7c10_032a_4d22_a802_3b74937e86db';


SET mapreduce.job.split.metainfo.maxsize=-1;
SET mapred.compress.map.output=true;
SET mapred.map.output.compression.codec=org.apache.hadoop.io.compress.SnappyCodec;
SET mapred.output.compress=true;
SET mapred.output.compression.codec=org.apache.hadoop.io.compress.SnappyCodec;
SET mapred.output.compression.type=BLOCK;
SET mapreduce.job.max.split.locations=2000;
SET dfs.replication=2;
SET hive.merge.mapfiles=true;
SET hive.merge.mapredfiles=true;
SET hive.merge.size.per.task=268435456;
SET hive.support.concurrency=false;
SET hive.exec.compress.output=true;
SET hive.auto.convert.join.noconditionaltask = true;
SET hive.auto.convert.join.noconditionaltask.size = 300000000;
INSERT OVERWRITE TABLE kylin_intermediate_learn_kylin_two_20131229000000_20160112000000_d22e7c10_032a_4d22_a802_3b74937e86db SELECT
KYLIN_CAL_DT.AGE_FOR_QTR_ID
,KYLIN_CAL_DT.AGE_FOR_MONTH_ID
,KYLIN_CAL_DT.AGE_FOR_DT_ID
,KYLIN_CAL_DT.AGE_FOR_RTL_MONTH_ID
,KYLIN_CAL_DT.AGE_FOR_CS_WEEK_ID
,KYLIN_CAL_DT.YEAR_ID
FROM DEFAULT.KYLIN_CAL_DT as KYLIN_CAL_DT 
WHERE (KYLIN_CAL_DT.CAL_DT >= '2013-12-29' AND KYLIN_CAL_DT.CAL_DT < '2016-01-12')
;


"


Logging initialized using configuration in jar:file:/usr/local/hive/lib/hive-common-1.2.1.jar!/hive-log4j.properties
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/usr/local/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/usr/local/hive/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
OK
Time taken: 0.936 seconds
OK
Time taken: 0.112 seconds
OK
Time taken: 0.438 seconds
Query ID = root_20160105105405_88149f4a-a970-47d0-ba32-9a21ee5afde3
Total jobs = 3
Launching Job 1 out of 3
Number of reduce tasks is set to 0 since there's no reduce operator
Starting Job = job_1449731904014_1636, Tracking URL = http://cloud001:8088/proxy/application_1449731904014_1636/
Kill Command = /usr/local/hadoop/bin/hadoop job  -kill job_1449731904014_1636
Hadoop job information for Stage-1: number of mappers: 1; number of reducers: 0
2016-01-05 10:54:26,177 Stage-1 map = 0%,  reduce = 0%
2016-01-05 10:54:27,236 Stage-1 map = 100%,  reduce = 0%
Ended Job = job_1449731904014_1636 with errors
Error during job, obtaining debugging information...
Examining task ID: task_1449731904014_1636_m_000000 (and more) from job job_1449731904014_1636


Task with the most failures(1): 
-----
Task ID:
  task_1449731904014_1636_m_000000


URL:
  http://0.0.0.0:8088/taskdetails.jsp?jobid=job_1449731904014_1636&tipid=task_1449731904014_1636_m_000000
-----
Diagnostic Messages for this Task:
java.lang.RuntimeException: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"cal_dt":"2013-12-31","year_beg_dt":"2013-01-01","qtr_beg_dt":"2013-10-01","month_beg_dt":"2013-12-01","week_beg_dt":"2013-12-29","age_for_year_id":0,"age_for_qtr_id":0,"age_for_month_id":1,"age_for_week_id":5,"age_for_dt_id":34,"age_for_rtl_year_id":1,"age_for_rtl_qtr_id":1,"age_for_rtl_month_id":1,"age_for_rtl_week_id":5,"age_for_cs_week_id":5,"day_of_cal_id":41638,"day_of_year_id":365,"day_of_qtr_id":92,"day_of_month_id":31,"day_of_week_id":3,"week_of_year_id":53,"week_of_cal_id":5948,"month_of_qtr_id":3,"month_of_year_id":12,"month_of_cal_id":1368,"qtr_of_year_id":4,"qtr_of_cal_id":456,"year_of_cal_id":114,"year_end_dt":"2013-12-31","qtr_end_dt":"2013-12-31","month_end_dt":"2013-12-31","week_end_dt":"2013-12-31","cal_dt_name":"31-Dec-2013","cal_dt_desc":"Dec 31st 2013","cal_dt_short_name":"Tue 12-31-13","ytd_yn_id":0,"qtd_yn_id":0,"mtd_yn_id":0,"wtd_yn_id":0,"season_beg_dt":"2013-12-21","day_in_year_count":365,"day_in_qtr_count":92,"day_in_month_count":31,"day_in_week_count":3,"rtl_year_beg_dt":"2013-12-29","rtl_qtr_beg_dt":"2013-12-29","rtl_month_beg_dt":"2013-12-29","rtl_week_beg_dt":"2013-12-29","cs_week_beg_dt":"2013-12-30","cal_date":"2013-12-31","day_of_week":"Tue       ","month_id":"2013M12","prd_desc":"Dec-2013","prd_flag":"N","prd_id":"2013M12   ","prd_ind":"N","qtr_desc":"Year 2013 - Quarter 04","qtr_id":"2013Q04   ","qtr_ind":"N","retail_week":"1","retail_year":"2014","retail_start_date":"2013-12-29","retail_wk_end_date":"2014-01-04","week_ind":"N","week_num_desc":"Wk.53 - 13","week_beg_date":"2013-12-29 00:00:00","week_end_date":"2013-12-31 00:00:00","week_in_year_id":"2013W53   ","week_id":"2013W53   ","week_beg_end_desc_mdy":"12/29/13 - 12/31/13","week_beg_end_desc_md":"12/29 - 12/31","year_id":"2013","year_ind":"N","cal_dt_mns_1year_dt":"2012-12-31","cal_dt_mns_2year_dt":"2011-12-31","cal_dt_mns_1qtr_dt":"2013-09-30","cal_dt_mns_2qtr_dt":"2013-06-30","cal_dt_mns_1month_dt":"2013-11-30","cal_dt_mns_2month_dt":"2013-10-31","cal_dt_mns_1week_dt":"2013-12-24","cal_dt_mns_2week_dt":"2013-12-17","curr_cal_dt_mns_1year_yn_id":0,"curr_cal_dt_mns_2year_yn_id":0,"curr_cal_dt_mns_1qtr_yn_id":0,"curr_cal_dt_mns_2qtr_yn_id":0,"curr_cal_dt_mns_1month_yn_id":0,"curr_cal_dt_mns_2month_yn_id":0,"curr_cal_dt_mns_1week_yn_ind":0,"curr_cal_dt_mns_2week_yn_ind":0,"rtl_month_of_rtl_year_id":"1","rtl_qtr_of_rtl_year_id":1,"rtl_week_of_rtl_year_id":1,"season_of_year_id":1,"ytm_yn_id":0,"ytq_yn_id":1,"ytw_yn_id":0,"cre_date":"2005-09-07","cre_user":"USER_X  ","upd_date":"2013-11-27 00:16:56","upd_user":"USER_X"}
	at org.apache.hadoop.hive.ql.exec.mr.ExecMapper.map(ExecMapper.java:172)
	at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
	at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
	at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
	at org.apache.hadoop.mapred.LocalContainerLauncher$EventHandler.runSubtask(LocalContainerLauncher.java:380)
	at org.apache.hadoop.mapred.LocalContainerLauncher$EventHandler.runTask(LocalContainerLauncher.java:301)
	at org.apache.hadoop.mapred.LocalContainerLauncher$EventHandler.access$200(LocalContainerLauncher.java:187)
	at org.apache.hadoop.mapred.LocalContainerLauncher$EventHandler$1.run(LocalContainerLauncher.java:230)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row {"cal_dt":"2013-12-31","year_beg_dt":"2013-01-01","qtr_beg_dt":"2013-10-01","month_beg_dt":"2013-12-01","week_beg_dt":"2013-12-29","age_for_year_id":0,"age_for_qtr_id":0,"age_for_month_id":1,"age_for_week_id":5,"age_for_dt_id":34,"age_for_rtl_year_id":1,"age_for_rtl_qtr_id":1,"age_for_rtl_month_id":1,"age_for_rtl_week_id":5,"age_for_cs_week_id":5,"day_of_cal_id":41638,"day_of_year_id":365,"day_of_qtr_id":92,"day_of_month_id":31,"day_of_week_id":3,"week_of_year_id":53,"week_of_cal_id":5948,"month_of_qtr_id":3,"month_of_year_id":12,"month_of_cal_id":1368,"qtr_of_year_id":4,"qtr_of_cal_id":456,"year_of_cal_id":114,"year_end_dt":"2013-12-31","qtr_end_dt":"2013-12-31","month_end_dt":"2013-12-31","week_end_dt":"2013-12-31","cal_dt_name":"31-Dec-2013","cal_dt_desc":"Dec 31st 2013","cal_dt_short_name":"Tue 12-31-13","ytd_yn_id":0,"qtd_yn_id":0,"mtd_yn_id":0,"wtd_yn_id":0,"season_beg_dt":"2013-12-21","day_in_year_count":365,"day_in_qtr_count":92,"day_in_month_count":31,"day_in_week_count":3,"rtl_year_beg_dt":"2013-12-29","rtl_qtr_beg_dt":"2013-12-29","rtl_month_beg_dt":"2013-12-29","rtl_week_beg_dt":"2013-12-29","cs_week_beg_dt":"2013-12-30","cal_date":"2013-12-31","day_of_week":"Tue       ","month_id":"2013M12","prd_desc":"Dec-2013","prd_flag":"N","prd_id":"2013M12   ","prd_ind":"N","qtr_desc":"Year 2013 - Quarter 04","qtr_id":"2013Q04   ","qtr_ind":"N","retail_week":"1","retail_year":"2014","retail_start_date":"2013-12-29","retail_wk_end_date":"2014-01-04","week_ind":"N","week_num_desc":"Wk.53 - 13","week_beg_date":"2013-12-29 00:00:00","week_end_date":"2013-12-31 00:00:00","week_in_year_id":"2013W53   ","week_id":"2013W53   ","week_beg_end_desc_mdy":"12/29/13 - 12/31/13","week_beg_end_desc_md":"12/29 - 12/31","year_id":"2013","year_ind":"N","cal_dt_mns_1year_dt":"2012-12-31","cal_dt_mns_2year_dt":"2011-12-31","cal_dt_mns_1qtr_dt":"2013-09-30","cal_dt_mns_2qtr_dt":"2013-06-30","cal_dt_mns_1month_dt":"2013-11-30","cal_dt_mns_2month_dt":"2013-10-31","cal_dt_mns_1week_dt":"2013-12-24","cal_dt_mns_2week_dt":"2013-12-17","curr_cal_dt_mns_1year_yn_id":0,"curr_cal_dt_mns_2year_yn_id":0,"curr_cal_dt_mns_1qtr_yn_id":0,"curr_cal_dt_mns_2qtr_yn_id":0,"curr_cal_dt_mns_1month_yn_id":0,"curr_cal_dt_mns_2month_yn_id":0,"curr_cal_dt_mns_1week_yn_ind":0,"curr_cal_dt_mns_2week_yn_ind":0,"rtl_month_of_rtl_year_id":"1","rtl_qtr_of_rtl_year_id":1,"rtl_week_of_rtl_year_id":1,"season_of_year_id":1,"ytm_yn_id":0,"ytq_yn_id":1,"ytw_yn_id":0,"cre_date":"2005-09-07","cre_user":"USER_X  ","upd_date":"2013-11-27 00:16:56","upd_user":"USER_X"}
	at org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:518)
	at org.apache.hadoop.hive.ql.exec.mr.ExecMapper.map(ExecMapper.java:163)
	... 12 more
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support.
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketFiles(FileSinkOperator.java:577)
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.process(FileSinkOperator.java:675)
	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837)
	at org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:88)
	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837)
	at org.apache.hadoop.hive.ql.exec.FilterOperator.process(FilterOperator.java:122)
	at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837)
	at org.apache.hadoop.hive.ql.exec.TableScanOperator.process(TableScanOperator.java:97)
	at org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.forward(MapOperator.java:162)
	at org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:508)
	... 13 more
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support.
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:249)
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketForFileIdx(FileSinkOperator.java:622)
	at org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketFiles(FileSinkOperator.java:566)
	... 22 more
Caused by: java.lang.RuntimeException: native snappy library not available: this version of libhadoop was built without snappy support.
	at org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65)
	at org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134)
	at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150)
	at org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165)
	at org.apache.hadoop.io.SequenceFile$Writer.init(SequenceFile.java:1201)
	at org.apache.hadoop.io.SequenceFile$Writer.<init>(SequenceFile.java:1094)
	at org.apache.hadoop.io.SequenceFile$BlockCompressWriter.<init>(SequenceFile.java:1444)
	at org.apache.hadoop.io.SequenceFile.createWriter(SequenceFile.java:277)
	at org.apache.hadoop.io.SequenceFile.createWriter(SequenceFile.java:530)
	at org.apache.hadoop.hive.ql.exec.Utilities.createSequenceWriter(Utilities.java:1508)
	at org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat.getHiveRecordWriter(HiveSequenceFileOutputFormat.java:64)
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getRecordWriter(HiveFileFormatUtils.java:261)
	at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:246)
	... 24 more




FAILED: Execution Error, return code 2 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask
MapReduce Jobs Launched: 
Stage-Stage-1: Map: 1   HDFS Read: 0 HDFS Write: 0 FAIL
Total MapReduce CPU Time Spent: 0 msec

Re: org.apache.hadoop.hive.ql.metadata.HiveException

Posted by hongbin ma <ma...@apache.org>.
agree with Sai

On Tue, Jan 5, 2016 at 12:24 PM, Kiriti Sai <ki...@gmail.com>
wrote:

> Hi,
> This error is caused because there is no Snappy compression codec available
> in your setup and Kylin expects it by default.
> As a work around, you can disable the use of snappy in the configuration
> files of Kylin.
> > Comment the compression.codec line in kylin.properties
> > comment the properties in the kylin_job_conf.xml which are related to
> compression. I guess there are around 4 properties to be commented.
>
> This was the work around I used for a while but its recommended to use
> compression techniques to minimize the memory shuffling between reducers.
>
> Thank you.
> Sai Kiriti B
> On Jan 5, 2016 12:31 PM, "和风" <36...@qq.com> wrote:
>
> > hi:
> >   execution "bulid" cube, jobs exception :
> > org.apache.hadoop.hive.ql.metadata.HiveException:
> > org.apache.hadoop.hive.ql.metadata.HiveException
> >
> >
> > logs:
> >
> >
> > OS command error exit with 2 -- hive  -e "USE default;
> > DROP TABLE IF EXISTS
> >
> kylin_intermediate_learn_kylin_two_20131229000000_20160112000000_d22e7c10_032a_4d22_a802_3b74937e86db;
> >
> >
> > CREATE EXTERNAL TABLE IF NOT EXISTS
> >
> kylin_intermediate_learn_kylin_two_20131229000000_20160112000000_d22e7c10_032a_4d22_a802_3b74937e86db
> > (
> > DEFAULT_KYLIN_CAL_DT_AGE_FOR_QTR_ID smallint
> > ,DEFAULT_KYLIN_CAL_DT_AGE_FOR_MONTH_ID smallint
> > ,DEFAULT_KYLIN_CAL_DT_AGE_FOR_DT_ID smallint
> > ,DEFAULT_KYLIN_CAL_DT_AGE_FOR_RTL_MONTH_ID smallint
> > ,DEFAULT_KYLIN_CAL_DT_AGE_FOR_CS_WEEK_ID smallint
> > ,DEFAULT_KYLIN_CAL_DT_YEAR_ID string
> > )
> > ROW FORMAT DELIMITED FIELDS TERMINATED BY '\177'
> > STORED AS SEQUENCEFILE
> > LOCATION
> >
> '/kylin/kylin_metadata/kylin-d22e7c10-032a-4d22-a802-3b74937e86db/kylin_intermediate_learn_kylin_two_20131229000000_20160112000000_d22e7c10_032a_4d22_a802_3b74937e86db';
> >
> >
> > SET mapreduce.job.split.metainfo.maxsize=-1;
> > SET mapred.compress.map.output=true;
> > SET
> >
> mapred.map.output.compression.codec=org.apache.hadoop.io.compress.SnappyCodec;
> > SET mapred.output.compress=true;
> > SET
> >
> mapred.output.compression.codec=org.apache.hadoop.io.compress.SnappyCodec;
> > SET mapred.output.compression.type=BLOCK;
> > SET mapreduce.job.max.split.locations=2000;
> > SET dfs.replication=2;
> > SET hive.merge.mapfiles=true;
> > SET hive.merge.mapredfiles=true;
> > SET hive.merge.size.per.task=268435456;
> > SET hive.support.concurrency=false;
> > SET hive.exec.compress.output=true;
> > SET hive.auto.convert.join.noconditionaltask = true;
> > SET hive.auto.convert.join.noconditionaltask.size = 300000000;
> > INSERT OVERWRITE TABLE
> >
> kylin_intermediate_learn_kylin_two_20131229000000_20160112000000_d22e7c10_032a_4d22_a802_3b74937e86db
> > SELECT
> > KYLIN_CAL_DT.AGE_FOR_QTR_ID
> > ,KYLIN_CAL_DT.AGE_FOR_MONTH_ID
> > ,KYLIN_CAL_DT.AGE_FOR_DT_ID
> > ,KYLIN_CAL_DT.AGE_FOR_RTL_MONTH_ID
> > ,KYLIN_CAL_DT.AGE_FOR_CS_WEEK_ID
> > ,KYLIN_CAL_DT.YEAR_ID
> > FROM DEFAULT.KYLIN_CAL_DT as KYLIN_CAL_DT
> > WHERE (KYLIN_CAL_DT.CAL_DT >= '2013-12-29' AND KYLIN_CAL_DT.CAL_DT <
> > '2016-01-12')
> > ;
> >
> >
> > "
> >
> >
> > Logging initialized using configuration in
> > jar:file:/usr/local/hive/lib/hive-common-1.2.1.jar!/hive-log4j.properties
> > SLF4J: Class path contains multiple SLF4J bindings.
> > SLF4J: Found binding in
> >
> [jar:file:/usr/local/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> > SLF4J: Found binding in
> >
> [jar:file:/usr/local/hive/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> > SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
> > explanation.
> > SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
> > OK
> > Time taken: 0.936 seconds
> > OK
> > Time taken: 0.112 seconds
> > OK
> > Time taken: 0.438 seconds
> > Query ID = root_20160105105405_88149f4a-a970-47d0-ba32-9a21ee5afde3
> > Total jobs = 3
> > Launching Job 1 out of 3
> > Number of reduce tasks is set to 0 since there's no reduce operator
> > Starting Job = job_1449731904014_1636, Tracking URL =
> > http://cloud001:8088/proxy/application_1449731904014_1636/
> > Kill Command = /usr/local/hadoop/bin/hadoop job  -kill
> > job_1449731904014_1636
> > Hadoop job information for Stage-1: number of mappers: 1; number of
> > reducers: 0
> > 2016-01-05 10:54:26,177 Stage-1 map = 0%,  reduce = 0%
> > 2016-01-05 10:54:27,236 Stage-1 map = 100%,  reduce = 0%
> > Ended Job = job_1449731904014_1636 with errors
> > Error during job, obtaining debugging information...
> > Examining task ID: task_1449731904014_1636_m_000000 (and more) from job
> > job_1449731904014_1636
> >
> >
> > Task with the most failures(1):
> > -----
> > Task ID:
> >   task_1449731904014_1636_m_000000
> >
> >
> > URL:
> >
> >
> http://0.0.0.0:8088/taskdetails.jsp?jobid=job_1449731904014_1636&tipid=task_1449731904014_1636_m_000000
> > -----
> > Diagnostic Messages for this Task:
> > java.lang.RuntimeException:
> > org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error
> while
> > processing row
> >
> {"cal_dt":"2013-12-31","year_beg_dt":"2013-01-01","qtr_beg_dt":"2013-10-01","month_beg_dt":"2013-12-01","week_beg_dt":"2013-12-29","age_for_year_id":0,"age_for_qtr_id":0,"age_for_month_id":1,"age_for_week_id":5,"age_for_dt_id":34,"age_for_rtl_year_id":1,"age_for_rtl_qtr_id":1,"age_for_rtl_month_id":1,"age_for_rtl_week_id":5,"age_for_cs_week_id":5,"day_of_cal_id":41638,"day_of_year_id":365,"day_of_qtr_id":92,"day_of_month_id":31,"day_of_week_id":3,"week_of_year_id":53,"week_of_cal_id":5948,"month_of_qtr_id":3,"month_of_year_id":12,"month_of_cal_id":1368,"qtr_of_year_id":4,"qtr_of_cal_id":456,"year_of_cal_id":114,"year_end_dt":"2013-12-31","qtr_end_dt":"2013-12-31","month_end_dt":"2013-12-31","week_end_dt":"2013-12-31","cal_dt_name":"31-Dec-2013","cal_dt_desc":"Dec
> > 31st 2013","cal_dt_short_name":"Tue
> >
> 12-31-13","ytd_yn_id":0,"qtd_yn_id":0,"mtd_yn_id":0,"wtd_yn_id":0,"season_beg_dt":"2013-12-21","day_in_year_count":365,"day_in_qtr_count":92,"day_in_month_count":31,"day_in_week_count":3,"rtl_year_beg_dt":"2013-12-29","rtl_qtr_beg_dt":"2013-12-29","rtl_month_beg_dt":"2013-12-29","rtl_week_beg_dt":"2013-12-29","cs_week_beg_dt":"2013-12-30","cal_date":"2013-12-31","day_of_week":"Tue
> >
> >
> ","month_id":"2013M12","prd_desc":"Dec-2013","prd_flag":"N","prd_id":"2013M12
> >  ","prd_ind":"N","qtr_desc":"Year 2013 - Quarter 04","qtr_id":"2013Q04
> >
> ","qtr_ind":"N","retail_week":"1","retail_year":"2014","retail_start_date":"2013-12-29","retail_wk_end_date":"2014-01-04","week_ind":"N","week_num_desc":"Wk.53
> > - 13","week_beg_date":"2013-12-29 00:00:00","week_end_date":"2013-12-31
> > 00:00:00","week_in_year_id":"2013W53   ","week_id":"2013W53
> >  ","week_beg_end_desc_mdy":"12/29/13 -
> > 12/31/13","week_beg_end_desc_md":"12/29 -
> >
> 12/31","year_id":"2013","year_ind":"N","cal_dt_mns_1year_dt":"2012-12-31","cal_dt_mns_2year_dt":"2011-12-31","cal_dt_mns_1qtr_dt":"2013-09-30","cal_dt_mns_2qtr_dt":"2013-06-30","cal_dt_mns_1month_dt":"2013-11-30","cal_dt_mns_2month_dt":"2013-10-31","cal_dt_mns_1week_dt":"2013-12-24","cal_dt_mns_2week_dt":"2013-12-17","curr_cal_dt_mns_1year_yn_id":0,"curr_cal_dt_mns_2year_yn_id":0,"curr_cal_dt_mns_1qtr_yn_id":0,"curr_cal_dt_mns_2qtr_yn_id":0,"curr_cal_dt_mns_1month_yn_id":0,"curr_cal_dt_mns_2month_yn_id":0,"curr_cal_dt_mns_1week_yn_ind":0,"curr_cal_dt_mns_2week_yn_ind":0,"rtl_month_of_rtl_year_id":"1","rtl_qtr_of_rtl_year_id":1,"rtl_week_of_rtl_year_id":1,"season_of_year_id":1,"ytm_yn_id":0,"ytq_yn_id":1,"ytw_yn_id":0,"cre_date":"2005-09-07","cre_user":"USER_X
> > ","upd_date":"2013-11-27 00:16:56","upd_user":"USER_X"}
> >         at
> > org.apache.hadoop.hive.ql.exec.mr.ExecMapper.map(ExecMapper.java:172)
> >         at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
> >         at
> org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
> >         at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
> >         at
> >
> org.apache.hadoop.mapred.LocalContainerLauncher$EventHandler.runSubtask(LocalContainerLauncher.java:380)
> >         at
> >
> org.apache.hadoop.mapred.LocalContainerLauncher$EventHandler.runTask(LocalContainerLauncher.java:301)
> >         at
> >
> org.apache.hadoop.mapred.LocalContainerLauncher$EventHandler.access$200(LocalContainerLauncher.java:187)
> >         at
> >
> org.apache.hadoop.mapred.LocalContainerLauncher$EventHandler$1.run(LocalContainerLauncher.java:230)
> >         at
> > java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
> >         at java.util.concurrent.FutureTask.run(FutureTask.java:262)
> >         at
> >
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> >         at
> >
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> >         at java.lang.Thread.run(Thread.java:745)
> > Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime
> > Error while processing row
> >
> {"cal_dt":"2013-12-31","year_beg_dt":"2013-01-01","qtr_beg_dt":"2013-10-01","month_beg_dt":"2013-12-01","week_beg_dt":"2013-12-29","age_for_year_id":0,"age_for_qtr_id":0,"age_for_month_id":1,"age_for_week_id":5,"age_for_dt_id":34,"age_for_rtl_year_id":1,"age_for_rtl_qtr_id":1,"age_for_rtl_month_id":1,"age_for_rtl_week_id":5,"age_for_cs_week_id":5,"day_of_cal_id":41638,"day_of_year_id":365,"day_of_qtr_id":92,"day_of_month_id":31,"day_of_week_id":3,"week_of_year_id":53,"week_of_cal_id":5948,"month_of_qtr_id":3,"month_of_year_id":12,"month_of_cal_id":1368,"qtr_of_year_id":4,"qtr_of_cal_id":456,"year_of_cal_id":114,"year_end_dt":"2013-12-31","qtr_end_dt":"2013-12-31","month_end_dt":"2013-12-31","week_end_dt":"2013-12-31","cal_dt_name":"31-Dec-2013","cal_dt_desc":"Dec
> > 31st 2013","cal_dt_short_name":"Tue
> >
> 12-31-13","ytd_yn_id":0,"qtd_yn_id":0,"mtd_yn_id":0,"wtd_yn_id":0,"season_beg_dt":"2013-12-21","day_in_year_count":365,"day_in_qtr_count":92,"day_in_month_count":31,"day_in_week_count":3,"rtl_year_beg_dt":"2013-12-29","rtl_qtr_beg_dt":"2013-12-29","rtl_month_beg_dt":"2013-12-29","rtl_week_beg_dt":"2013-12-29","cs_week_beg_dt":"2013-12-30","cal_date":"2013-12-31","day_of_week":"Tue
> >
> >
> ","month_id":"2013M12","prd_desc":"Dec-2013","prd_flag":"N","prd_id":"2013M12
> >  ","prd_ind":"N","qtr_desc":"Year 2013 - Quarter 04","qtr_id":"2013Q04
> >
> ","qtr_ind":"N","retail_week":"1","retail_year":"2014","retail_start_date":"2013-12-29","retail_wk_end_date":"2014-01-04","week_ind":"N","week_num_desc":"Wk.53
> > - 13","week_beg_date":"2013-12-29 00:00:00","week_end_date":"2013-12-31
> > 00:00:00","week_in_year_id":"2013W53   ","week_id":"2013W53
> >  ","week_beg_end_desc_mdy":"12/29/13 -
> > 12/31/13","week_beg_end_desc_md":"12/29 -
> >
> 12/31","year_id":"2013","year_ind":"N","cal_dt_mns_1year_dt":"2012-12-31","cal_dt_mns_2year_dt":"2011-12-31","cal_dt_mns_1qtr_dt":"2013-09-30","cal_dt_mns_2qtr_dt":"2013-06-30","cal_dt_mns_1month_dt":"2013-11-30","cal_dt_mns_2month_dt":"2013-10-31","cal_dt_mns_1week_dt":"2013-12-24","cal_dt_mns_2week_dt":"2013-12-17","curr_cal_dt_mns_1year_yn_id":0,"curr_cal_dt_mns_2year_yn_id":0,"curr_cal_dt_mns_1qtr_yn_id":0,"curr_cal_dt_mns_2qtr_yn_id":0,"curr_cal_dt_mns_1month_yn_id":0,"curr_cal_dt_mns_2month_yn_id":0,"curr_cal_dt_mns_1week_yn_ind":0,"curr_cal_dt_mns_2week_yn_ind":0,"rtl_month_of_rtl_year_id":"1","rtl_qtr_of_rtl_year_id":1,"rtl_week_of_rtl_year_id":1,"season_of_year_id":1,"ytm_yn_id":0,"ytq_yn_id":1,"ytw_yn_id":0,"cre_date":"2005-09-07","cre_user":"USER_X
> > ","upd_date":"2013-11-27 00:16:56","upd_user":"USER_X"}
> >         at
> > org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:518)
> >         at
> > org.apache.hadoop.hive.ql.exec.mr.ExecMapper.map(ExecMapper.java:163)
> >         ... 12 more
> > Caused by: org.apache.hadoop.hive.ql.metadata.HiveException:
> > org.apache.hadoop.hive.ql.metadata.HiveException:
> > java.lang.RuntimeException: native snappy library not available: this
> > version of libhadoop was built without snappy support.
> >         at
> >
> org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketFiles(FileSinkOperator.java:577)
> >         at
> >
> org.apache.hadoop.hive.ql.exec.FileSinkOperator.process(FileSinkOperator.java:675)
> >         at
> > org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837)
> >         at
> >
> org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:88)
> >         at
> > org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837)
> >         at
> >
> org.apache.hadoop.hive.ql.exec.FilterOperator.process(FilterOperator.java:122)
> >         at
> > org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837)
> >         at
> >
> org.apache.hadoop.hive.ql.exec.TableScanOperator.process(TableScanOperator.java:97)
> >         at
> >
> org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.forward(MapOperator.java:162)
> >         at
> > org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:508)
> >         ... 13 more
> > Caused by: org.apache.hadoop.hive.ql.metadata.HiveException:
> > java.lang.RuntimeException: native snappy library not available: this
> > version of libhadoop was built without snappy support.
> >         at
> >
> org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:249)
> >         at
> >
> org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketForFileIdx(FileSinkOperator.java:622)
> >         at
> >
> org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketFiles(FileSinkOperator.java:566)
> >         ... 22 more
> > Caused by: java.lang.RuntimeException: native snappy library not
> > available: this version of libhadoop was built without snappy support.
> >         at
> >
> org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65)
> >         at
> >
> org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134)
> >         at
> > org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150)
> >         at
> > org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165)
> >         at
> > org.apache.hadoop.io.SequenceFile$Writer.init(SequenceFile.java:1201)
> >         at
> > org.apache.hadoop.io.SequenceFile$Writer.<init>(SequenceFile.java:1094)
> >         at
> >
> org.apache.hadoop.io.SequenceFile$BlockCompressWriter.<init>(SequenceFile.java:1444)
> >         at
> > org.apache.hadoop.io.SequenceFile.createWriter(SequenceFile.java:277)
> >         at
> > org.apache.hadoop.io.SequenceFile.createWriter(SequenceFile.java:530)
> >         at
> >
> org.apache.hadoop.hive.ql.exec.Utilities.createSequenceWriter(Utilities.java:1508)
> >         at
> >
> org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat.getHiveRecordWriter(HiveSequenceFileOutputFormat.java:64)
> >         at
> >
> org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getRecordWriter(HiveFileFormatUtils.java:261)
> >         at
> >
> org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:246)
> >         ... 24 more
> >
> >
> >
> >
> > FAILED: Execution Error, return code 2 from
> > org.apache.hadoop.hive.ql.exec.mr.MapRedTask
> > MapReduce Jobs Launched:
> > Stage-Stage-1: Map: 1   HDFS Read: 0 HDFS Write: 0 FAIL
> > Total MapReduce CPU Time Spent: 0 msec
>



-- 
Regards,

*Bin Mahone | 马洪宾*
Apache Kylin: http://kylin.io
Github: https://github.com/binmahone

Re: org.apache.hadoop.hive.ql.metadata.HiveException

Posted by Kiriti Sai <ki...@gmail.com>.
Hi,
This error is caused because there is no Snappy compression codec available
in your setup and Kylin expects it by default.
As a work around, you can disable the use of snappy in the configuration
files of Kylin.
> Comment the compression.codec line in kylin.properties
> comment the properties in the kylin_job_conf.xml which are related to
compression. I guess there are around 4 properties to be commented.

This was the work around I used for a while but its recommended to use
compression techniques to minimize the memory shuffling between reducers.

Thank you.
Sai Kiriti B
On Jan 5, 2016 12:31 PM, "和风" <36...@qq.com> wrote:

> hi:
>   execution "bulid" cube, jobs exception :
> org.apache.hadoop.hive.ql.metadata.HiveException:
> org.apache.hadoop.hive.ql.metadata.HiveException
>
>
> logs:
>
>
> OS command error exit with 2 -- hive  -e "USE default;
> DROP TABLE IF EXISTS
> kylin_intermediate_learn_kylin_two_20131229000000_20160112000000_d22e7c10_032a_4d22_a802_3b74937e86db;
>
>
> CREATE EXTERNAL TABLE IF NOT EXISTS
> kylin_intermediate_learn_kylin_two_20131229000000_20160112000000_d22e7c10_032a_4d22_a802_3b74937e86db
> (
> DEFAULT_KYLIN_CAL_DT_AGE_FOR_QTR_ID smallint
> ,DEFAULT_KYLIN_CAL_DT_AGE_FOR_MONTH_ID smallint
> ,DEFAULT_KYLIN_CAL_DT_AGE_FOR_DT_ID smallint
> ,DEFAULT_KYLIN_CAL_DT_AGE_FOR_RTL_MONTH_ID smallint
> ,DEFAULT_KYLIN_CAL_DT_AGE_FOR_CS_WEEK_ID smallint
> ,DEFAULT_KYLIN_CAL_DT_YEAR_ID string
> )
> ROW FORMAT DELIMITED FIELDS TERMINATED BY '\177'
> STORED AS SEQUENCEFILE
> LOCATION
> '/kylin/kylin_metadata/kylin-d22e7c10-032a-4d22-a802-3b74937e86db/kylin_intermediate_learn_kylin_two_20131229000000_20160112000000_d22e7c10_032a_4d22_a802_3b74937e86db';
>
>
> SET mapreduce.job.split.metainfo.maxsize=-1;
> SET mapred.compress.map.output=true;
> SET
> mapred.map.output.compression.codec=org.apache.hadoop.io.compress.SnappyCodec;
> SET mapred.output.compress=true;
> SET
> mapred.output.compression.codec=org.apache.hadoop.io.compress.SnappyCodec;
> SET mapred.output.compression.type=BLOCK;
> SET mapreduce.job.max.split.locations=2000;
> SET dfs.replication=2;
> SET hive.merge.mapfiles=true;
> SET hive.merge.mapredfiles=true;
> SET hive.merge.size.per.task=268435456;
> SET hive.support.concurrency=false;
> SET hive.exec.compress.output=true;
> SET hive.auto.convert.join.noconditionaltask = true;
> SET hive.auto.convert.join.noconditionaltask.size = 300000000;
> INSERT OVERWRITE TABLE
> kylin_intermediate_learn_kylin_two_20131229000000_20160112000000_d22e7c10_032a_4d22_a802_3b74937e86db
> SELECT
> KYLIN_CAL_DT.AGE_FOR_QTR_ID
> ,KYLIN_CAL_DT.AGE_FOR_MONTH_ID
> ,KYLIN_CAL_DT.AGE_FOR_DT_ID
> ,KYLIN_CAL_DT.AGE_FOR_RTL_MONTH_ID
> ,KYLIN_CAL_DT.AGE_FOR_CS_WEEK_ID
> ,KYLIN_CAL_DT.YEAR_ID
> FROM DEFAULT.KYLIN_CAL_DT as KYLIN_CAL_DT
> WHERE (KYLIN_CAL_DT.CAL_DT >= '2013-12-29' AND KYLIN_CAL_DT.CAL_DT <
> '2016-01-12')
> ;
>
>
> "
>
>
> Logging initialized using configuration in
> jar:file:/usr/local/hive/lib/hive-common-1.2.1.jar!/hive-log4j.properties
> SLF4J: Class path contains multiple SLF4J bindings.
> SLF4J: Found binding in
> [jar:file:/usr/local/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: Found binding in
> [jar:file:/usr/local/hive/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class]
> SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an
> explanation.
> SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
> OK
> Time taken: 0.936 seconds
> OK
> Time taken: 0.112 seconds
> OK
> Time taken: 0.438 seconds
> Query ID = root_20160105105405_88149f4a-a970-47d0-ba32-9a21ee5afde3
> Total jobs = 3
> Launching Job 1 out of 3
> Number of reduce tasks is set to 0 since there's no reduce operator
> Starting Job = job_1449731904014_1636, Tracking URL =
> http://cloud001:8088/proxy/application_1449731904014_1636/
> Kill Command = /usr/local/hadoop/bin/hadoop job  -kill
> job_1449731904014_1636
> Hadoop job information for Stage-1: number of mappers: 1; number of
> reducers: 0
> 2016-01-05 10:54:26,177 Stage-1 map = 0%,  reduce = 0%
> 2016-01-05 10:54:27,236 Stage-1 map = 100%,  reduce = 0%
> Ended Job = job_1449731904014_1636 with errors
> Error during job, obtaining debugging information...
> Examining task ID: task_1449731904014_1636_m_000000 (and more) from job
> job_1449731904014_1636
>
>
> Task with the most failures(1):
> -----
> Task ID:
>   task_1449731904014_1636_m_000000
>
>
> URL:
>
> http://0.0.0.0:8088/taskdetails.jsp?jobid=job_1449731904014_1636&tipid=task_1449731904014_1636_m_000000
> -----
> Diagnostic Messages for this Task:
> java.lang.RuntimeException:
> org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while
> processing row
> {"cal_dt":"2013-12-31","year_beg_dt":"2013-01-01","qtr_beg_dt":"2013-10-01","month_beg_dt":"2013-12-01","week_beg_dt":"2013-12-29","age_for_year_id":0,"age_for_qtr_id":0,"age_for_month_id":1,"age_for_week_id":5,"age_for_dt_id":34,"age_for_rtl_year_id":1,"age_for_rtl_qtr_id":1,"age_for_rtl_month_id":1,"age_for_rtl_week_id":5,"age_for_cs_week_id":5,"day_of_cal_id":41638,"day_of_year_id":365,"day_of_qtr_id":92,"day_of_month_id":31,"day_of_week_id":3,"week_of_year_id":53,"week_of_cal_id":5948,"month_of_qtr_id":3,"month_of_year_id":12,"month_of_cal_id":1368,"qtr_of_year_id":4,"qtr_of_cal_id":456,"year_of_cal_id":114,"year_end_dt":"2013-12-31","qtr_end_dt":"2013-12-31","month_end_dt":"2013-12-31","week_end_dt":"2013-12-31","cal_dt_name":"31-Dec-2013","cal_dt_desc":"Dec
> 31st 2013","cal_dt_short_name":"Tue
> 12-31-13","ytd_yn_id":0,"qtd_yn_id":0,"mtd_yn_id":0,"wtd_yn_id":0,"season_beg_dt":"2013-12-21","day_in_year_count":365,"day_in_qtr_count":92,"day_in_month_count":31,"day_in_week_count":3,"rtl_year_beg_dt":"2013-12-29","rtl_qtr_beg_dt":"2013-12-29","rtl_month_beg_dt":"2013-12-29","rtl_week_beg_dt":"2013-12-29","cs_week_beg_dt":"2013-12-30","cal_date":"2013-12-31","day_of_week":"Tue
>
>  ","month_id":"2013M12","prd_desc":"Dec-2013","prd_flag":"N","prd_id":"2013M12
>  ","prd_ind":"N","qtr_desc":"Year 2013 - Quarter 04","qtr_id":"2013Q04
>  ","qtr_ind":"N","retail_week":"1","retail_year":"2014","retail_start_date":"2013-12-29","retail_wk_end_date":"2014-01-04","week_ind":"N","week_num_desc":"Wk.53
> - 13","week_beg_date":"2013-12-29 00:00:00","week_end_date":"2013-12-31
> 00:00:00","week_in_year_id":"2013W53   ","week_id":"2013W53
>  ","week_beg_end_desc_mdy":"12/29/13 -
> 12/31/13","week_beg_end_desc_md":"12/29 -
> 12/31","year_id":"2013","year_ind":"N","cal_dt_mns_1year_dt":"2012-12-31","cal_dt_mns_2year_dt":"2011-12-31","cal_dt_mns_1qtr_dt":"2013-09-30","cal_dt_mns_2qtr_dt":"2013-06-30","cal_dt_mns_1month_dt":"2013-11-30","cal_dt_mns_2month_dt":"2013-10-31","cal_dt_mns_1week_dt":"2013-12-24","cal_dt_mns_2week_dt":"2013-12-17","curr_cal_dt_mns_1year_yn_id":0,"curr_cal_dt_mns_2year_yn_id":0,"curr_cal_dt_mns_1qtr_yn_id":0,"curr_cal_dt_mns_2qtr_yn_id":0,"curr_cal_dt_mns_1month_yn_id":0,"curr_cal_dt_mns_2month_yn_id":0,"curr_cal_dt_mns_1week_yn_ind":0,"curr_cal_dt_mns_2week_yn_ind":0,"rtl_month_of_rtl_year_id":"1","rtl_qtr_of_rtl_year_id":1,"rtl_week_of_rtl_year_id":1,"season_of_year_id":1,"ytm_yn_id":0,"ytq_yn_id":1,"ytw_yn_id":0,"cre_date":"2005-09-07","cre_user":"USER_X
> ","upd_date":"2013-11-27 00:16:56","upd_user":"USER_X"}
>         at
> org.apache.hadoop.hive.ql.exec.mr.ExecMapper.map(ExecMapper.java:172)
>         at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
>         at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:453)
>         at org.apache.hadoop.mapred.MapTask.run(MapTask.java:343)
>         at
> org.apache.hadoop.mapred.LocalContainerLauncher$EventHandler.runSubtask(LocalContainerLauncher.java:380)
>         at
> org.apache.hadoop.mapred.LocalContainerLauncher$EventHandler.runTask(LocalContainerLauncher.java:301)
>         at
> org.apache.hadoop.mapred.LocalContainerLauncher$EventHandler.access$200(LocalContainerLauncher.java:187)
>         at
> org.apache.hadoop.mapred.LocalContainerLauncher$EventHandler$1.run(LocalContainerLauncher.java:230)
>         at
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
>         at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>         at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>         at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>         at java.lang.Thread.run(Thread.java:745)
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime
> Error while processing row
> {"cal_dt":"2013-12-31","year_beg_dt":"2013-01-01","qtr_beg_dt":"2013-10-01","month_beg_dt":"2013-12-01","week_beg_dt":"2013-12-29","age_for_year_id":0,"age_for_qtr_id":0,"age_for_month_id":1,"age_for_week_id":5,"age_for_dt_id":34,"age_for_rtl_year_id":1,"age_for_rtl_qtr_id":1,"age_for_rtl_month_id":1,"age_for_rtl_week_id":5,"age_for_cs_week_id":5,"day_of_cal_id":41638,"day_of_year_id":365,"day_of_qtr_id":92,"day_of_month_id":31,"day_of_week_id":3,"week_of_year_id":53,"week_of_cal_id":5948,"month_of_qtr_id":3,"month_of_year_id":12,"month_of_cal_id":1368,"qtr_of_year_id":4,"qtr_of_cal_id":456,"year_of_cal_id":114,"year_end_dt":"2013-12-31","qtr_end_dt":"2013-12-31","month_end_dt":"2013-12-31","week_end_dt":"2013-12-31","cal_dt_name":"31-Dec-2013","cal_dt_desc":"Dec
> 31st 2013","cal_dt_short_name":"Tue
> 12-31-13","ytd_yn_id":0,"qtd_yn_id":0,"mtd_yn_id":0,"wtd_yn_id":0,"season_beg_dt":"2013-12-21","day_in_year_count":365,"day_in_qtr_count":92,"day_in_month_count":31,"day_in_week_count":3,"rtl_year_beg_dt":"2013-12-29","rtl_qtr_beg_dt":"2013-12-29","rtl_month_beg_dt":"2013-12-29","rtl_week_beg_dt":"2013-12-29","cs_week_beg_dt":"2013-12-30","cal_date":"2013-12-31","day_of_week":"Tue
>
>  ","month_id":"2013M12","prd_desc":"Dec-2013","prd_flag":"N","prd_id":"2013M12
>  ","prd_ind":"N","qtr_desc":"Year 2013 - Quarter 04","qtr_id":"2013Q04
>  ","qtr_ind":"N","retail_week":"1","retail_year":"2014","retail_start_date":"2013-12-29","retail_wk_end_date":"2014-01-04","week_ind":"N","week_num_desc":"Wk.53
> - 13","week_beg_date":"2013-12-29 00:00:00","week_end_date":"2013-12-31
> 00:00:00","week_in_year_id":"2013W53   ","week_id":"2013W53
>  ","week_beg_end_desc_mdy":"12/29/13 -
> 12/31/13","week_beg_end_desc_md":"12/29 -
> 12/31","year_id":"2013","year_ind":"N","cal_dt_mns_1year_dt":"2012-12-31","cal_dt_mns_2year_dt":"2011-12-31","cal_dt_mns_1qtr_dt":"2013-09-30","cal_dt_mns_2qtr_dt":"2013-06-30","cal_dt_mns_1month_dt":"2013-11-30","cal_dt_mns_2month_dt":"2013-10-31","cal_dt_mns_1week_dt":"2013-12-24","cal_dt_mns_2week_dt":"2013-12-17","curr_cal_dt_mns_1year_yn_id":0,"curr_cal_dt_mns_2year_yn_id":0,"curr_cal_dt_mns_1qtr_yn_id":0,"curr_cal_dt_mns_2qtr_yn_id":0,"curr_cal_dt_mns_1month_yn_id":0,"curr_cal_dt_mns_2month_yn_id":0,"curr_cal_dt_mns_1week_yn_ind":0,"curr_cal_dt_mns_2week_yn_ind":0,"rtl_month_of_rtl_year_id":"1","rtl_qtr_of_rtl_year_id":1,"rtl_week_of_rtl_year_id":1,"season_of_year_id":1,"ytm_yn_id":0,"ytq_yn_id":1,"ytw_yn_id":0,"cre_date":"2005-09-07","cre_user":"USER_X
> ","upd_date":"2013-11-27 00:16:56","upd_user":"USER_X"}
>         at
> org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:518)
>         at
> org.apache.hadoop.hive.ql.exec.mr.ExecMapper.map(ExecMapper.java:163)
>         ... 12 more
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException:
> org.apache.hadoop.hive.ql.metadata.HiveException:
> java.lang.RuntimeException: native snappy library not available: this
> version of libhadoop was built without snappy support.
>         at
> org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketFiles(FileSinkOperator.java:577)
>         at
> org.apache.hadoop.hive.ql.exec.FileSinkOperator.process(FileSinkOperator.java:675)
>         at
> org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837)
>         at
> org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:88)
>         at
> org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837)
>         at
> org.apache.hadoop.hive.ql.exec.FilterOperator.process(FilterOperator.java:122)
>         at
> org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:837)
>         at
> org.apache.hadoop.hive.ql.exec.TableScanOperator.process(TableScanOperator.java:97)
>         at
> org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.forward(MapOperator.java:162)
>         at
> org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:508)
>         ... 13 more
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException:
> java.lang.RuntimeException: native snappy library not available: this
> version of libhadoop was built without snappy support.
>         at
> org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:249)
>         at
> org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketForFileIdx(FileSinkOperator.java:622)
>         at
> org.apache.hadoop.hive.ql.exec.FileSinkOperator.createBucketFiles(FileSinkOperator.java:566)
>         ... 22 more
> Caused by: java.lang.RuntimeException: native snappy library not
> available: this version of libhadoop was built without snappy support.
>         at
> org.apache.hadoop.io.compress.SnappyCodec.checkNativeCodeLoaded(SnappyCodec.java:65)
>         at
> org.apache.hadoop.io.compress.SnappyCodec.getCompressorType(SnappyCodec.java:134)
>         at
> org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:150)
>         at
> org.apache.hadoop.io.compress.CodecPool.getCompressor(CodecPool.java:165)
>         at
> org.apache.hadoop.io.SequenceFile$Writer.init(SequenceFile.java:1201)
>         at
> org.apache.hadoop.io.SequenceFile$Writer.<init>(SequenceFile.java:1094)
>         at
> org.apache.hadoop.io.SequenceFile$BlockCompressWriter.<init>(SequenceFile.java:1444)
>         at
> org.apache.hadoop.io.SequenceFile.createWriter(SequenceFile.java:277)
>         at
> org.apache.hadoop.io.SequenceFile.createWriter(SequenceFile.java:530)
>         at
> org.apache.hadoop.hive.ql.exec.Utilities.createSequenceWriter(Utilities.java:1508)
>         at
> org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat.getHiveRecordWriter(HiveSequenceFileOutputFormat.java:64)
>         at
> org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getRecordWriter(HiveFileFormatUtils.java:261)
>         at
> org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveRecordWriter(HiveFileFormatUtils.java:246)
>         ... 24 more
>
>
>
>
> FAILED: Execution Error, return code 2 from
> org.apache.hadoop.hive.ql.exec.mr.MapRedTask
> MapReduce Jobs Launched:
> Stage-Stage-1: Map: 1   HDFS Read: 0 HDFS Write: 0 FAIL
> Total MapReduce CPU Time Spent: 0 msec