You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@kylin.apache.org by Krishna Bandaru <Kr...@scanbuy.com> on 2019/07/17 16:17:03 UTC

regards connection exception: java.net.ConnectException

Hi,

I successfully did cube build. but I got few logs error please help me why i got those and how to get rid off those.

1)

2019-07-17 15:15:34,411 WARN  [Scheduler 1724205462 Job 6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f-127] ipc.Client:920 : Failed to connect to server: ip-10-0-0-61.ec2.internal/10.0.0.61:8032: retries get failed due to exceeded maximum allowed retries number: 0

java.net.ConnectException: Connection refused

        at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)

        at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)

        at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)

        at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)

        at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:685)

        at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:788)

        at org.apache.hadoop.ipc.Client$Connection.access$3500(Client.java:410)

        at org.apache.hadoop.ipc.Client.getConnection(Client.java:1550)

        at org.apache.hadoop.ipc.Client.call(Client.java:1381)

        at org.apache.hadoop.ipc.Client.call(Client.java:1345)

        at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:227)

        at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)

        at com.sun.proxy.$Proxy91.getNewApplication(Unknown Source)

        at org.apache.hadoop.yarn.api.impl.pb.client.ApplicationClientProtocolPBClientImpl.getNewApplication(ApplicationClientProtocolPBClientImpl.java:258)

        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

        at java.lang.reflect.Method.invoke(Method.java:498)

        at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:409)

        at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:163)

        at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:155)

        at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)

        at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:346)

        at com.sun.proxy.$Proxy92.getNewApplication(Unknown Source)

        at org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.getNewApplication(YarnClientImpl.java:224)

        at org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.createApplication(YarnClientImpl.java:232)

        at org.apache.hadoop.mapred.ResourceMgrDelegate.getNewJobID(ResourceMgrDelegate.java:193)

        at org.apache.hadoop.mapred.YARNRunner.getNewJobID(YARNRunner.java:241)

        at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:155)

        at org.apache.hadoop.mapreduce.Job$11.run(Job.java:1341)

        at org.apache.hadoop.mapreduce.Job$11.run(Job.java:1338)

        at java.security.AccessController.doPrivileged(Native Method)

        at javax.security.auth.Subject.doAs(Subject.java:422)

        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1844)

        at org.apache.hadoop.mapreduce.Job.submit(Job.java:1338)

        at org.apache.kylin.engine.mr.common.AbstractHadoopJob.waitForCompletion(AbstractHadoopJob.java:192)

        at org.apache.kylin.engine.mr.steps.FactDistinctColumnsJob.run(FactDistinctColumnsJob.java:111)

        at org.apache.kylin.engine.mr.common.MapReduceExecutable.doWork(MapReduceExecutable.java:131)

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:163)

        at org.apache.kylin.job.execution.DefaultChainedExecutable.doWork(DefaultChainedExecutable.java:69)

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:163)

        at org.apache.kylin.job.impl.threadpool.DistributedScheduler$JobRunner.run(DistributedScheduler.java:111)

        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)

        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)

        at java.lang.Thread.run(Thread.java:748)

2019-07-17 15:15:34,414 INFO  [Scheduler 1724205462 Job 6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f-127] client.ConfiguredRMFailoverProxyProvider:100 : Failing over to rm2

2019-07-17 15:15:34,415 WARN  [Scheduler 1724205462 Job 6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f-127] ipc.Client:920 : Failed to connect to server: ip-10-0-0-56.ec2.internal/10.0.0.56:8032: retries get failed due to exceeded maximum allowed retries number: 0

java.net.ConnectException: Connection refused

        at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)

        at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)

        at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)

        at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)

        at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:685)

        at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:788)

        at org.apache.hadoop.ipc.Client$Connection.access$3500(Client.java:410)

        at org.apache.hadoop.ipc.Client.getConnection(Client.java:1550)

        at org.apache.hadoop.ipc.Client.call(Client.java:1381)

        at org.apache.hadoop.ipc.Client.call(Client.java:1345)

        at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:227)

        at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)

        at com.sun.proxy.$Proxy91.getNewApplication(Unknown Source)

        at org.apache.hadoop.yarn.api.impl.pb.client.ApplicationClientProtocolPBClientImpl.getNewApplication(ApplicationClientProtocolPBClientImpl.java:258)

        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

        at java.lang.reflect.Method.invoke(Method.java:498)

        at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:409)

        at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:163)

        at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:155)

        at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)

        at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:346)

        at com.sun.proxy.$Proxy92.getNewApplication(Unknown Source)

        at org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.getNewApplication(YarnClientImpl.java:224)

        at org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.createApplication(YarnClientImpl.java:232)

        at org.apache.hadoop.mapred.ResourceMgrDelegate.getNewJobID(ResourceMgrDelegate.java:193)

        at org.apache.hadoop.mapred.YARNRunner.getNewJobID(YARNRunner.java:241)

        at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:155)

        at org.apache.hadoop.mapreduce.Job$11.run(Job.java:1341)

        at org.apache.hadoop.mapreduce.Job$11.run(Job.java:1338)

        at java.security.AccessController.doPrivileged(Native Method)

        at javax.security.auth.Subject.doAs(Subject.java:422)

        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1844)

        at org.apache.hadoop.mapreduce.Job.submit(Job.java:1338)

        at org.apache.kylin.engine.mr.common.AbstractHadoopJob.waitForCompletion(AbstractHadoopJob.java:192)

        at org.apache.kylin.engine.mr.steps.FactDistinctColumnsJob.run(FactDistinctColumnsJob.java:111)

        at org.apache.kylin.engine.mr.common.MapReduceExecutable.doWork(MapReduceExecutable.java:131)

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:163)

        at org.apache.kylin.job.execution.DefaultChainedExecutable.doWork(DefaultChainedExecutable.java:69)

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:163)

        at org.apache.kylin.job.impl.threadpool.DistributedScheduler$JobRunner.run(DistributedScheduler.java:111)

        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)

        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)

        at java.lang.Thread.run(Thread.java:748)

2019-07-17 15:15:34,416 INFO  [Scheduler 1724205462 Job 6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f-127] retry.RetryInvocationHandler:398 : Exception while invoking ApplicationClientProtocolPBClientImpl.getNewApplication over rm2 after 1 failover attempts. Trying to failover after sleeping for 40688ms.

java.net.ConnectException: Call From ip-10-0-0-56/10.0.0.56 to ip-10-0-0-56.ec2.internal:8032 failed on connection exception: java.net.ConnectException: Connection refused; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused

     at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)

        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)

        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)

        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)

        at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:801)

        at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:732)

        at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1493)

        at org.apache.hadoop.ipc.Client.call(Client.java:1435)

        at org.apache.hadoop.ipc.Client.call(Client.java:1345)

        at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:227)

        at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)

        at com.sun.proxy.$Proxy91.getNewApplication(Unknown Source)

        at org.apache.hadoop.yarn.api.impl.pb.client.ApplicationClientProtocolPBClientImpl.getNewApplication(ApplicationClientProtocolPBClientImpl.java:258)

        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

        at java.lang.reflect.Method.invoke(Method.java:498)

        at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:409)

        at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:163)

        at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:155)

        at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)

        at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:346)

        at com.sun.proxy.$Proxy92.getNewApplication(Unknown Source)

        at org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.getNewApplication(YarnClientImpl.java:224)

        at org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.createApplication(YarnClientImpl.java:232)

        at org.apache.hadoop.mapred.ResourceMgrDelegate.getNewJobID(ResourceMgrDelegate.java:193)

        at org.apache.hadoop.mapred.YARNRunner.getNewJobID(YARNRunner.java:241)

        at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:155)

        at org.apache.hadoop.mapreduce.Job$11.run(Job.java:1341)

        at org.apache.hadoop.mapreduce.Job$11.run(Job.java:1338)

        at java.security.AccessController.doPrivileged(Native Method)

        at javax.security.auth.Subject.doAs(Subject.java:422)

        at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1844)

        at org.apache.hadoop.mapreduce.Job.submit(Job.java:1338)

        at org.apache.kylin.engine.mr.common.AbstractHadoopJob.waitForCompletion(AbstractHadoopJob.java:192)

        at org.apache.kylin.engine.mr.steps.FactDistinctColumnsJob.run(FactDistinctColumnsJob.java:111)

        at org.apache.kylin.engine.mr.common.MapReduceExecutable.doWork(MapReduceExecutable.java:131)

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:163)

        at org.apache.kylin.job.execution.DefaultChainedExecutable.doWork(DefaultChainedExecutable.java:69)

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:163)

        at org.apache.kylin.job.impl.threadpool.DistributedScheduler$JobRunner.run(DistributedScheduler.java:111)

        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)

        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)

        at java.lang.Thread.run(Thread.java:748)

Caused by: java.net.ConnectException: Connection refused

        at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)

        at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)

        at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)

        at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)

        at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:685)

        at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:788)

        at org.apache.hadoop.ipc.Client$Connection.access$3500(Client.java:410)

        at org.apache.hadoop.ipc.Client.getConnection(Client.java:1550)

        at org.apache.hadoop.ipc.Client.call(Client.java:1381)

        ... 36 more

2)
Actually, I have DEFAULT.KYLIN_CAL_DT table exists in the hive and my hive-site.xml having megastore uris


 <property>

    <name>hive.metastore.uris</name>

    <value>thrift://ip-10-0-0-61.ec2.internal:9083,thrift://ip-10-0-0-56.ec2.internal:9083,thrift://ip-10-0-0-55.ec2.internal:9083</value>

    <description>JDBC connect string for a JDBC metastore</description>

  </property>


2019-07-17 15:17:59,551 ERROR [Scheduler 1724205462 Job 6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f-203] common.HadoopShellExecutable:65 : error execute HadoopShellExecutable{id=6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f-02, name=Build Dimension Dictionary, state=RUNNING}

java.lang.RuntimeException: cannot get HiveTableMeta

        at org.apache.kylin.source.hive.HiveTable.<init>(HiveTable.java:50)

        at org.apache.kylin.source.hive.HiveSource.createReadableTable(HiveSource.java:68)

        at org.apache.kylin.source.SourceManager.createReadableTable(SourceManager.java:144)

        at org.apache.kylin.cube.CubeManager$DictionaryAssist.buildSnapshotTable(CubeManager.java:1115)

        at org.apache.kylin.cube.CubeManager.buildSnapshotTable(CubeManager.java:1032)

        at org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGeneratorCLI.java:90)

        at org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGeneratorCLI.java:49)

        at org.apache.kylin.engine.mr.steps.CreateDictionaryJob.run(CreateDictionaryJob.java:71)

        at org.apache.kylin.engine.mr.MRUtil.runMRJob(MRUtil.java:92)

        at org.apache.kylin.engine.mr.common.HadoopShellExecutable.doWork(HadoopShellExecutable.java:63)

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:163)

        at org.apache.kylin.job.execution.DefaultChainedExecutable.doWork(DefaultChainedExecutable.java:69)

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:163)

        at org.apache.kylin.job.impl.threadpool.DistributedScheduler$JobRunner.run(DistributedScheduler.java:111)

        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)

        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)

        at java.lang.Thread.run(Thread.java:748)

Caused by: NoSuchObjectException(message:DEFAULT.KYLIN_CAL_DT table not found)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_req_result$get_table_req_resultStandardScheme.read(ThriftHiveMetastore.java:55064)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_req_result$get_table_req_resultStandardScheme.read(ThriftHiveMetastore.java:55032)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_req_result.read(ThriftHiveMetastore.java:54963)

        at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:86)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_get_table_req(ThriftHiveMetastore.java:1563)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.get_table_req(ThriftHiveMetastore.java:1550)

        at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getTable(HiveMetaStoreClient.java:1344)

        at org.apache.kylin.source.hive.CLIHiveClient.getHiveTableMeta(CLIHiveClient.java:78)

        at org.apache.kylin.source.hive.HiveTable.<init>(HiveTable.java:48)

        ... 16 more

2019-07-17 15:17:59,559 INFO  [Scheduler 1724205462 Job 6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f-203] execution.ExecutableManager:434 : job id:6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f-02 from RUNNING to ERROR

2019-07-17 15:17:59,560 ERROR [Scheduler 1724205462 Job 6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f-203] execution.AbstractExecutable:165 : error running Executable: CubingJob{id=6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f, name=BUILD CUBE - kylin_sales_cube - 20120101000000_20190730235500 - GMT+08:00 2019-07-17 23:13:15, state=RUNNING}

2019-07-17 15:17:59,564 DEBUG [pool-5-thread-1] cachesync.Broadcaster:113 : Servers in the cluster: [ip-10-0-0-56.ec2.internal:7070, ip-10-0-0-55.ec2.internal:7070, ip-10-0-0-61.ec2.internal:7070]

2019-07-17 15:17:59,564 DEBUG [pool-5-thread-1] cachesync.Broadcaster:123 : Announcing new broadcast to all: BroadcastEvent{entity=execute_output, event=update, cacheKey=6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f}

2019-07-17 15:17:59,570 INFO  [Scheduler 1724205462 Job 6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f-203] execution.ExecutableManager:434 : job id:6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f from RUNNING to ERROR

2019-07-17 15:17:59,570 DEBUG [Scheduler 1724205462 Job 6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f-203] execution.AbstractExecutable:316 : no need to send email, user list is empty

2019-07-17 15:17:59,570 DEBUG [http-bio-7070-exec-4] cachesync.Broadcaster:247 : Broadcasting UPDATE, execute_output, 6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f

2019-07-17 15:17:59,570 DEBUG [pool-5-thread-1] cachesync.Broadcaster:113 : Servers in the cluster: [ip-10-0-0-56.ec2.internal:7070, ip-10-0-0-55.ec2.internal:7070, ip-10-0-0-61.ec2.internal:7070]

2019-07-17 15:17:59,572 DEBUG [pool-5-thread-1] cachesync.Broadcaster:123 : Announcing new broadcast to all: BroadcastEvent{entity=execute_output, event=update, cacheKey=6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f}

2019-07-17 15:17:59,573 DEBUG [http-bio-7070-exec-4] cachesync.Broadcaster:281 : Done broadcasting UPDATE, execute_output, 6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f

2019-07-17 15:17:59,575 ERROR [pool-11-thread-3] threadpool.DistributedScheduler:114 : ExecuteException job:6c84f9f5-d6c9-6b6f-2b77-39e63fe1885f in server: 3483@ip-10-0-0-56

org.apache.kylin.job.exception.ExecuteException: org.apache.kylin.job.exception.ExecuteException: org.apache.kylin.engine.mr.exception.HadoopShellException: java.lang.RuntimeException: cannot get HiveTableMeta

        at org.apache.kylin.source.hive.HiveTable.<init>(HiveTable.java:50)

        at org.apache.kylin.source.hive.HiveSource.createReadableTable(HiveSource.java:68)

        at org.apache.kylin.source.SourceManager.createReadableTable(SourceManager.java:144)

        at org.apache.kylin.cube.CubeManager$DictionaryAssist.buildSnapshotTable(CubeManager.java:1115)

        at org.apache.kylin.cube.CubeManager.buildSnapshotTable(CubeManager.java:1032)

        at org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGeneratorCLI.java:90)

        at org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGeneratorCLI.java:49)

        at org.apache.kylin.engine.mr.steps.CreateDictionaryJob.run(CreateDictionaryJob.java:71)

        at org.apache.kylin.engine.mr.MRUtil.runMRJob(MRUtil.java:92)

        at org.apache.kylin.engine.mr.common.HadoopShellExecutable.doWork(HadoopShellExecutable.java:63)

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:163)

        at org.apache.kylin.job.execution.DefaultChainedExecutable.doWork(DefaultChainedExecutable.java:69)

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:163)

        at org.apache.kylin.job.impl.threadpool.DistributedScheduler$JobRunner.run(DistributedScheduler.java:111)

        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)

        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)

        at java.lang.Thread.run(Thread.java:748)

Caused by: NoSuchObjectException(message:DEFAULT.KYLIN_CAL_DT table not found)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_req_result$get_table_req_resultStandardScheme.read(ThriftHiveMetastore.java:55064)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_req_result$get_table_req_resultStandardScheme.read(ThriftHiveMetastore.java:55032)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_req_result.read(ThriftHiveMetastore.java:54963)

        at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:86)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_get_table_req(ThriftHiveMetastore.java:1563)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.get_table_req(ThriftHiveMetastore.java:1550)

        at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getTable(HiveMetaStoreClient.java:1344)

        at org.apache.kylin.source.hive.CLIHiveClient.getHiveTableMeta(CLIHiveClient.java:78)

        at org.apache.kylin.source.hive.HiveTable.<init>(HiveTable.java:48)

        ... 16 more


result code:2

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:178)

        at org.apache.kylin.job.impl.threadpool.DistributedScheduler$JobRunner.run(DistributedScheduler.java:111)

        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)

        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)

        at java.lang.Thread.run(Thread.java:748)

Caused by: org.apache.kylin.job.exception.ExecuteException: org.apache.kylin.engine.mr.exception.HadoopShellException: java.lang.RuntimeException: cannot get HiveTableMeta

        at org.apache.kylin.source.hive.HiveTable.<init>(HiveTable.java:50)

        at org.apache.kylin.source.hive.HiveSource.createReadableTable(HiveSource.java:68)

        at org.apache.kylin.source.SourceManager.createReadableTable(SourceManager.java:144)

        at org.apache.kylin.cube.CubeManager$DictionaryAssist.buildSnapshotTable(CubeManager.java:1115)

        at org.apache.kylin.cube.CubeManager.buildSnapshotTable(CubeManager.java:1032)

        at org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGeneratorCLI.java:90)

        at org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGeneratorCLI.java:49)

        at org.apache.kylin.engine.mr.steps.CreateDictionaryJob.run(CreateDictionaryJob.java:71)

        at org.apache.kylin.engine.mr.MRUtil.runMRJob(MRUtil.java:92)

        at org.apache.kylin.engine.mr.common.HadoopShellExecutable.doWork(HadoopShellExecutable.java:63)

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:163)

        at org.apache.kylin.job.execution.DefaultChainedExecutable.doWork(DefaultChainedExecutable.java:69)

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:163)

        at org.apache.kylin.job.impl.threadpool.DistributedScheduler$JobRunner.run(DistributedScheduler.java:111)

        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)

        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)

        at java.lang.Thread.run(Thread.java:748)

Caused by: NoSuchObjectException(message:DEFAULT.KYLIN_CAL_DT table not found)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_req_result$get_table_req_resultStandardScheme.read(ThriftHiveMetastore.java:55064)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_req_result$get_table_req_resultStandardScheme.read(ThriftHiveMetastore.java:55032)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_req_result.read(ThriftHiveMetastore.java:54963)

        at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:86)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_get_table_req(ThriftHiveMetastore.java:1563)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.get_table_req(ThriftHiveMetastore.java:1550)

        at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getTable(HiveMetaStoreClient.java:1344)

        at org.apache.kylin.source.hive.CLIHiveClient.getHiveTableMeta(CLIHiveClient.java:78)

        at org.apache.kylin.source.hive.HiveTable.<init>(HiveTable.java:48)

        ... 16 more


result code:2

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:178)

        at org.apache.kylin.job.execution.DefaultChainedExecutable.doWork(DefaultChainedExecutable.java:69)

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:163)

        ... 4 more

Caused by: org.apache.kylin.engine.mr.exception.HadoopShellException: java.lang.RuntimeException: cannot get HiveTableMeta

        at org.apache.kylin.source.hive.HiveTable.<init>(HiveTable.java:50)

        at org.apache.kylin.source.hive.HiveSource.createReadableTable(HiveSource.java:68)

        at org.apache.kylin.source.SourceManager.createReadableTable(SourceManager.java:144)

        at org.apache.kylin.cube.CubeManager$DictionaryAssist.buildSnapshotTable(CubeManager.java:1115)

        at org.apache.kylin.cube.CubeManager.buildSnapshotTable(CubeManager.java:1032)

        at org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGeneratorCLI.java:90)

        at org.apache.kylin.cube.cli.DictionaryGeneratorCLI.processSegment(DictionaryGeneratorCLI.java:49)

        at org.apache.kylin.engine.mr.steps.CreateDictionaryJob.run(CreateDictionaryJob.java:71)

        at org.apache.kylin.engine.mr.MRUtil.runMRJob(MRUtil.java:92)

        at org.apache.kylin.engine.mr.common.HadoopShellExecutable.doWork(HadoopShellExecutable.java:63)

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:163)

        at org.apache.kylin.job.execution.DefaultChainedExecutable.doWork(DefaultChainedExecutable.java:69)

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:163)

        at org.apache.kylin.job.impl.threadpool.DistributedScheduler$JobRunner.run(DistributedScheduler.java:111)

        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)

        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)

        at java.lang.Thread.run(Thread.java:748)

Caused by: NoSuchObjectException(message:DEFAULT.KYLIN_CAL_DT table not found)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_req_result$get_table_req_resultStandardScheme.read(ThriftHiveMetastore.java:55064)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_req_result$get_table_req_resultStandardScheme.read(ThriftHiveMetastore.java:55032)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$get_table_req_result.read(ThriftHiveMetastore.java:54963)

        at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:86)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_get_table_req(ThriftHiveMetastore.java:1563)

        at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.get_table_req(ThriftHiveMetastore.java:1550)

        at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getTable(HiveMetaStoreClient.java:1344)

        at org.apache.kylin.source.hive.CLIHiveClient.getHiveTableMeta(CLIHiveClient.java:78)

        at org.apache.kylin.source.hive.HiveTable.<init>(HiveTable.java:48)

        ... 16 more


result code:2

        at org.apache.kylin.engine.mr.common.HadoopShellExecutable.doWork(HadoopShellExecutable.java:73)

        at org.apache.kylin.job.execution.AbstractExecutable.execute(AbstractExecutable.java:163)

        ... 6 more



Thanks,

Krishna