You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@chukwa.apache.org by 李洁 <wy...@gmail.com> on 2013/02/22 19:04:42 UTC

question of the installation environment chukwa

    Hi! I'm a chukwa user ,Recently I was configuration chukwa ,always
fail,Who can provide a list of the installation environment eg: Use what
operating system ; Use what Hadoop、HBase and Pig version ; How to set up
Time and time zone .

   According to the following log information, Can you give me some advises?
    the installation environment:Hadoop1.0.3+HBase 0.90.5+Chukwa0.5.0+Pig
0.9.2
2013-02-22 07:04:16,189 INFO btpool0-1 ZooKeeper - Initiating client
connection, connectString=dn2.dev:21818,dn1.dev:21818,hb.dev:21818
sessionTimeout=60000 watcher=hconnection
2013-02-22 07:04:16,204 INFO btpool0-1-SendThread() ClientCnxn - Opening
socket connection to server dn2.dev/192.168.13.38:21818
2013-02-22 07:04:16,206 INFO btpool0-1-SendThread(dn2.dev:21818) ClientCnxn
- Socket connection established to dn2.dev/192.168.13.38:21818, initiating
session
2013-02-22 07:04:16,216 INFO btpool0-1-SendThread(dn2.dev:21818) ClientCnxn
- Session establishment complete on server dn2.dev/192.168.13.38:21818,
sessionid = 0x23d006a2c990011, negotiated timeout = 60000
2013-02-22 07:04:21,694 WARN btpool0-1 HadoopMetricsProcessor - Wrong
format in HadoopMetricsProcessor [INFO chukwa.metrics.HadoopMetrics:
{"port":"51770","timestamp":1361516611630,"recordName":"rpc","context":"rpc","hostName":"dn1.dev","contextName":"rpc"}
]
java.text.ParseException: Unparseable date: "INFO chukwa.metrics.Ha"
    at java.text.DateFormat.parse(DateFormat.java:337)
    at
org.apache.hadoop.chukwa.extraction.demux.processor.mapper.HadoopMetricsProcessor.parse(HadoopMetricsProcessor.java:97)
    at
org.apache.hadoop.chukwa.extraction.demux.processor.mapper.AbstractProcessor.process(AbstractProcessor.java:81)
    at
org.apache.hadoop.chukwa.datacollection.writer.hbase.HBaseWriter.add(HBaseWriter.java:194)
    at
org.apache.hadoop.chukwa.datacollection.writer.PipelineStageWriter.add(PipelineStageWriter.java:40)
    at
org.apache.hadoop.chukwa.datacollection.collector.servlet.ServletCollector.accept(ServletCollector.java:159)
    at
org.apache.hadoop.chukwa.datacollection.collector.servlet.ServletCollector.doPost(ServletCollector.java:208)
    at javax.servlet.http.HttpServlet.service(HttpServlet.java:727)
    at javax.servlet.http.HttpServlet.service(HttpServlet.java:820)
    at
org.mortbay.jetty.servlet.ServletHolder.handle(ServletHolder.java:511)
    at
org.mortbay.jetty.servlet.ServletHandler.handle(ServletHandler.java:401)
    at
org.mortbay.jetty.servlet.SessionHandler.handle(SessionHandler.java:182)
    at
org.mortbay.jetty.handler.ContextHandler.handle(ContextHandler.java:766)
    at
org.mortbay.jetty.handler.HandlerWrapper.handle(HandlerWrapper.java:152)
    at org.mortbay.jetty.Server.handle(Server.java:326)
    at
org.mortbay.jetty.HttpConnection.handleRequest(HttpConnection.java:542)
    at
org.mortbay.jetty.HttpConnection$RequestHandler.content(HttpConnection.java:945)
    at org.mortbay.jetty.HttpParser.parseNext(HttpParser.java:756)
    at org.mortbay.jetty.HttpParser.parseAvailable(HttpParser.java:218)
    at org.mortbay.jetty.HttpConnection.handle(HttpConnection.java:404)
    at
org.mortbay.io.nio.SelectChannelEndPoint.run(SelectChannelEndPoint.java:410)
    at
org.mortbay.thread.BoundedThreadPool$PoolThread.run(BoundedThreadPool.java:451)
2013-02-22 07:04:21,724 WARN btpool0-1 HBaseWriter -
[row=dn1.dev-HadoopMetrics, families={(family=HadoopMetricsInError,
keyvalues=(dn1.dev-HadoopMetrics/HadoopMetricsInError:capp/1361516661696/Put/vlen=7,
dn1.dev-HadoopMetrics/HadoopMetricsInError:cchunkData/1361516661696/Put/vlen=232,
dn1.dev-HadoopMetrics/HadoopMetricsInError:cchunkException/1361516661696/Put/vlen=1880,
dn1.dev-HadoopMetrics/HadoopMetricsInError:csource/1361516661696/Put/vlen=7,
dn1.dev-HadoopMetrics/HadoopMetricsInError:ctags/1361516661696/Put/vlen=17)}]
2013-02-22 07:04:21,726 WARN btpool0-1 HBaseWriter -
org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException: Failed
1 action: org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException:
Column family HadoopMetricsInError does not exist in region
Hadoop,,1361512768478.b73e61dc5af76e9ce6283b43ded8713b. in table {NAME =>
'Hadoop', FAMILIES => [{NAME => 'ClientTrace', BLOOMFILTER => 'NONE',
REPLICATION_SCOPE => '0', COMPRESSION => 'NONE', VERSIONS => '65535', TTL
=> '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE =>
'true'}, {NAME => 'dfs_FSNamesystem', BLOOMFILTER => 'NONE',
REPLICATION_SCOPE => '0', COMPRESSION => 'NONE', VERSIONS => '65535', TTL
=> '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE =>
'true'}, {NAME => 'dfs_datanode', BLOOMFILTER => 'NONE', REPLICATION_SCOPE
=> '0', COMPRESSION => 'NONE', VERSIONS => '65535', TTL => '2147483647',
BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME =>
'dfs_namenode', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0',
COMPRESSION => 'NONE', VERSIONS => '65535', TTL => '2147483647', BLOCKSIZE
=> '65536', IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME =>
'jvm_metrics', BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', COMPRESSION
=> 'NONE', VERSIONS => '65535', TTL => '2147483647', BLOCKSIZE => '65536',
IN_MEMORY => 'false', BLOCKCACHE => 'true'}, {NAME => 'mapred_Queue',
BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', COMPRESSION => 'NONE',
VERSIONS => '65535', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY
=> 'false', BLOCKCACHE => 'true'}, {NAME => 'mapred_jobtracker',
BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', COMPRESSION => 'NONE',
VERSIONS => '65535', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY
=> 'false', BLOCKCACHE => 'true'}, {NAME => 'mapred_shuffleOutput',
BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', COMPRESSION => 'NONE',
VERSIONS => '65535', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY
=> 'false', BLOCKCACHE => 'true'}, {NAME => 'mapred_tasktracker',
BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', COMPRESSION => 'NONE',
VERSIONS => '65535', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY
=> 'false', BLOCKCACHE => 'true'}, {NAME => 'metricssystem_MetricsSystem',
BLOOMFILTER => 'NONE', REPLICATION_SCOPE => '0', COMPRESSION => 'NONE',
VERSIONS => '65535', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY
=> 'false', BLOCKCACHE => 'true'}, {NAME => 'rpc_rpc', BLOOMFILTER =>
'NONE', REPLICATION_SCOPE => '0', COMPRESSION => 'NONE', VERSIONS =>
'65535', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false',
BLOCKCACHE => 'true'}, {NAME => 'rpcdetailed_rpcdetailed', BLOOMFILTER =>
'NONE', REPLICATION_SCOPE => '0', COMPRESSION => 'NONE', VERSIONS =>
'65535', TTL => '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false',
BLOCKCACHE => 'true'}, {NAME => 'ugi_ugi', BLOOMFILTER => 'NONE',
REPLICATION_SCOPE => '0', COMPRESSION => 'NONE', VERSIONS => '65535', TTL
=> '2147483647', BLOCKSIZE => '65536', IN_MEMORY => 'false', BLOCKCACHE =>
'true'}]}
    at
org.apache.hadoop.hbase.regionserver.HRegionServer.multi(HRegionServer.java:2659)
    at sun.reflect.GeneratedMethodAccessor6.invoke(Unknown Source)
    at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    at java.lang.reflect.Method.invoke(Method.java:597)
    at org.apache.hadoop.hbase.ipc.HBaseRPC$Server.call(HBaseRPC.java:570)
    at
org.apache.hadoop.hbase.ipc.HBaseServer$Handler.run(HBaseServer.java:1039)
: 1 time, servers with issues: dn1.dev:60020,
    at
org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.processBatch(HConnectionManager.java:1424)
    at
org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.processBatchOfPuts(HConnectionManager.java:1438)
    at org.apache.hadoop.hbase.client.HTable.flushCommits(HTable.java:857)
    at org.apache.hadoop.hbase.client.HTable.doPut(HTable.java:713)
    at org.apache.hadoop.hbase.client.HTable.put(HTable.java:696)
    at
org.apache.hadoop.chukwa.datacollection.writer.hbase.HBaseWriter.add(HBaseWriter.java:195)
    at
org.apache.hadoop.chukwa.datacollection.writer.PipelineStageWriter.add(PipelineStageWriter.java:40)
    at
org.apache.hadoop.chukwa.datacollection.collector.servlet.ServletCollector.accept(ServletCollector.java:159)
    at
org.apache.hadoop.chukwa.datacollection.collector.servlet.ServletCollector.doPost(ServletCollector.java:208)
    at javax.servlet.http.HttpServlet.service(HttpServlet.java:727)
    at javax.servlet.http.HttpServlet.service(HttpServlet.java:820)
    at
org.mortbay.jetty.servlet.ServletHolder.handle(ServletHolder.java:511)
    at
org.mortbay.jetty.servlet.ServletHandler.handle(ServletHandler.java:401)
    at
org.mortbay.jetty.servlet.SessionHandler.handle(SessionHandler.java:182)
    at
org.mortbay.jetty.handler.ContextHandler.handle(ContextHandler.java:766)
    at
org.mortbay.jetty.handler.HandlerWrapper.handle(HandlerWrapper.java:152)
    at org.mortbay.jetty.Server.handle(Server.java:326)
    at
org.mortbay.jetty.HttpConnection.handleRequest(HttpConnection.java:542)
    at
org.mortbay.jetty.HttpConnection$RequestHandler.content(HttpConnection.java:945)
    at org.mortbay.jetty.HttpParser.parseNext(HttpParser.java:756)
    at org.mortbay.jetty.HttpParser.parseAvailable(HttpParser.java:218)
    at org.mortbay.jetty.HttpConnection.handle(HttpConnection.java:404)
    at
org.mortbay.io.nio.SelectChannelEndPoint.run(SelectChannelEndPoint.java:410)
    at
org.mortbay.thread.BoundedThreadPool$PoolThread.run(BoundedThreadPool.java:451)