You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@phoenix.apache.org by Tim Polach <tp...@salviol.com> on 2016/05/24 09:04:00 UTC

duplicated collumn name

Hi everyone,

we found a problem when creating a table with duplicated collumn name, create was done but now the table cannot be dropped or searched by.

Does anyone know how we could solve the problem or delete the problematic tables?

Thanks!

Using: phoenix-4.6.0 HBase-1.1

Logs:
Create:
2016-05-13 10:23:04,814 INFO (PhoenixUtils.java[createTableAndIndexForDate]:54) => Creating table in Phoenix...
2016-05-13 10:23:04,815 DEBUG (PhoenixUtils.java[generatePhoenixCreateTableStatement]:260) => CREATE TABLE IF NOT EXISTS TABLE_20160511 (c1 VARCHAR NOT NULL, c2 VARCHAR, c2 VARCHAR, CONSTRAINT PK PRIMARY KEY (c1))

Drop or select:
0: jdbc:phoenix:hadoop-node1> select * from TABLE_20160511;
16/05/23 22:56:17 WARN ipc.CoprocessorRpcChannel: Call failed on IOException
org.apache.hadoop.hbase.DoNotRetryIOException: org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
        at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
        at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
        at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
        at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException

        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
        at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
        at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
        at org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:322)
        at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1619)
        at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:92)
        at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:89)
        at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
        at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:95)
        at org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.getTable(MetaDataProtos.java:11769)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1303)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1290)
        at org.apache.hadoop.hbase.client.HTable$16.call(HTable.java:1727)
        at java.util.concurrent.FutureTask.run(FutureTask.java:262)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.DoNotRetryIOException): org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
        at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
        at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
        at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
        at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException

        at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1196)
        at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:213)
        at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:287)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:32675)
        at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1615)
        ... 13 more
16/05/23 22:56:17 WARN client.HTable: Error calling coprocessor service org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService for row \x00\x00TABLE_20160511
java.util.concurrent.ExecutionException: org.apache.hadoop.hbase.DoNotRetryIOException: org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
        at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
        at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
        at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
        at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException

        at java.util.concurrent.FutureTask.report(FutureTask.java:122)
        at java.util.concurrent.FutureTask.get(FutureTask.java:188)
        at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1739)
        at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1695)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1036)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1016)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1289)
        at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:446)
        at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:389)
        at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:385)
        at org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:409)
        at org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:284)
        at org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:185)
        at org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:376)
        at org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:357)
        at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:263)
        at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:258)
        at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
        at org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:257)
        at org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1438)
        at sqlline.Commands.execute(Commands.java:822)
        at sqlline.Commands.sql(Commands.java:732)
        at sqlline.SqlLine.dispatch(SqlLine.java:808)
        at sqlline.SqlLine.begin(SqlLine.java:681)
        at sqlline.SqlLine.start(SqlLine.java:398)
        at sqlline.SqlLine.main(SqlLine.java:292)
Caused by: org.apache.hadoop.hbase.DoNotRetryIOException: org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
        at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
        at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
        at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
        at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException

        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
        at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
        at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
        at org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:322)
        at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1619)
        at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:92)
        at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:89)
        at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
        at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:95)
        at org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.getTable(MetaDataProtos.java:11769)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1303)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1290)
        at org.apache.hadoop.hbase.client.HTable$16.call(HTable.java:1727)
        at java.util.concurrent.FutureTask.run(FutureTask.java:262)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.DoNotRetryIOException): org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
        at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
        at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
        at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
        at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException

        at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1196)
        at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:213)
        at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:287)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:32675)
        at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1615)
        ... 13 more
Error: org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
        at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
        at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
        at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
        at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException (state=08000,code=101)
org.apache.phoenix.exception.PhoenixIOException: org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
        at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
        at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
        at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
        at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException

        at org.apache.phoenix.util.ServerUtil.parseServerException(ServerUtil.java:108)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1053)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1016)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1289)
        at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:446)
        at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:389)
        at org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:385)
        at org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:409)
        at org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:284)
        at org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:185)
        at org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:376)
        at org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:357)
        at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:263)
        at org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:258)
        at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
        at org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:257)
        at org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1438)
        at sqlline.Commands.execute(Commands.java:822)
        at sqlline.Commands.sql(Commands.java:732)
        at sqlline.SqlLine.dispatch(SqlLine.java:808)
        at sqlline.SqlLine.begin(SqlLine.java:681)
        at sqlline.SqlLine.start(SqlLine.java:398)
        at sqlline.SqlLine.main(SqlLine.java:292)
Caused by: org.apache.hadoop.hbase.DoNotRetryIOException: org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
        at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
        at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
        at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
        at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException

        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
        at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
        at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
        at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
        at org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:322)
        at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1619)
        at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:92)
        at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:89)
        at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
        at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:95)
        at org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.getTable(MetaDataProtos.java:11769)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1303)
        at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1290)
        at org.apache.hadoop.hbase.client.HTable$16.call(HTable.java:1727)
        at java.util.concurrent.FutureTask.run(FutureTask.java:262)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.DoNotRetryIOException): org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
        at org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
        at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
        at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
        at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
        at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
        at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
        at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
        at org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
        at org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
        at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ArrayIndexOutOfBoundsException

        at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1196)
        at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:213)
        at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:287)
        at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:32675)
        at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1615)
        ... 13 more

Re: duplicated collumn name

Posted by Sergey Soldatov <se...@gmail.com>.
Hi,
Since the table was not properly created, the only reasonable solution is
to delete it:
delete from SYSTEM.CATALOG where TABLE_NAME='TABLE_20160511';

And in hbase shell
disable 'TABLE_20160511'
drop 'TABLE_20160511'

Thanks,
Sergey

On Tue, May 24, 2016 at 2:04 AM, Tim Polach <tp...@salviol.com> wrote:

> Hi everyone,
>
>
>
> we found a problem when creating a table with duplicated collumn name,
> create was done but now the table cannot be dropped or searched by.
>
>
>
> Does anyone know how we could solve the problem or delete the problematic
> tables?
>
>
>
> Thanks!
>
>
>
> Using: phoenix-4.6.0 HBase-1.1
>
>
>
> Logs:
>
> Create:
>
> 2016-05-13 10:23:04,814 INFO
> (PhoenixUtils.java[createTableAndIndexForDate]:54) => Creating table in
> Phoenix...
>
> 2016-05-13 10:23:04,815 DEBUG
> (PhoenixUtils.java[generatePhoenixCreateTableStatement]:260) => CREATE
> TABLE IF NOT EXISTS TABLE_20160511 (c1 VARCHAR NOT NULL, c2 VARCHAR, c2
> VARCHAR, CONSTRAINT PK PRIMARY KEY (c1))
>
>
>
> Drop or select:
>
> 0: jdbc:phoenix:hadoop-node1> select * from TABLE_20160511;
>
> 16/05/23 22:56:17 WARN ipc.CoprocessorRpcChannel: Call failed on
> IOException
>
> org.apache.hadoop.hbase.DoNotRetryIOException:
> org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
>
>         at
> org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
>
>         at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
>
>         at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
>
>         at
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
>
>         at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
>
>         at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
>
>         at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
>
>         at java.lang.Thread.run(Thread.java:745)
>
> Caused by: java.lang.ArrayIndexOutOfBoundsException
>
>
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
>
>         at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
>
>         at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>
>         at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>
>         at
> org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
>
>         at
> org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
>
>         at
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:322)
>
>         at
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1619)
>
>         at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:92)
>
>         at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:89)
>
>         at
> org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
>
>         at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:95)
>
>         at
> org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
>
>         at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.getTable(MetaDataProtos.java:11769)
>
>         at
> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1303)
>
>         at
> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1290)
>
>         at org.apache.hadoop.hbase.client.HTable$16.call(HTable.java:1727)
>
>         at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>
>         at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>
>         at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>
>         at java.lang.Thread.run(Thread.java:745)
>
> Caused by:
> org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.DoNotRetryIOException):
> org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
>
>         at
> org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
>
>         at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
>
>         at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
>
>         at
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
>
>         at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
>
>         at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
>
>         at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
>
>         at java.lang.Thread.run(Thread.java:745)
>
> Caused by: java.lang.ArrayIndexOutOfBoundsException
>
>
>
>         at
> org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1196)
>
>         at
> org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:213)
>
>         at
> org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:287)
>
>         at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:32675)
>
>         at
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1615)
>
>         ... 13 more
>
> 16/05/23 22:56:17 WARN client.HTable: Error calling coprocessor service
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService for
> row \x00\x00TABLE_20160511
>
> java.util.concurrent.ExecutionException:
> org.apache.hadoop.hbase.DoNotRetryIOException:
> org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
>
>         at
> org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
>
>         at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
>
>         at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
>
>         at
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
>
>         at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
>
>         at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
>
>         at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
>
>         at java.lang.Thread.run(Thread.java:745)
>
> Caused by: java.lang.ArrayIndexOutOfBoundsException
>
>
>
>         at java.util.concurrent.FutureTask.report(FutureTask.java:122)
>
>         at java.util.concurrent.FutureTask.get(FutureTask.java:188)
>
>         at
> org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1739)
>
>         at
> org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1695)
>
>         at
> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1036)
>
>         at
> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1016)
>
>         at
> org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1289)
>
>         at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:446)
>
>         at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:389)
>
>         at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:385)
>
>         at
> org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:409)
>
>         at
> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:284)
>
>         at
> org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:185)
>
>         at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:376)
>
>         at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:357)
>
>         at
> org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:263)
>
>         at
> org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:258)
>
>         at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
>
>         at
> org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:257)
>
>         at
> org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1438)
>
>         at sqlline.Commands.execute(Commands.java:822)
>
>         at sqlline.Commands.sql(Commands.java:732)
>
>         at sqlline.SqlLine.dispatch(SqlLine.java:808)
>
>         at sqlline.SqlLine.begin(SqlLine.java:681)
>
>         at sqlline.SqlLine.start(SqlLine.java:398)
>
>         at sqlline.SqlLine.main(SqlLine.java:292)
>
> Caused by: org.apache.hadoop.hbase.DoNotRetryIOException:
> org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
>
>         at
> org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
>
>         at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
>
>         at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
>
>         at
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
>
>         at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
>
>         at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
>
>         at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
>
>         at java.lang.Thread.run(Thread.java:745)
>
> Caused by: java.lang.ArrayIndexOutOfBoundsException
>
>
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
>
>         at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
>
>         at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>
>         at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>
>         at
> org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
>
>         at
> org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
>
>         at
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:322)
>
>         at
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1619)
>
>         at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:92)
>
>         at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:89)
>
>         at
> org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
>
>         at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:95)
>
>         at
> org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
>
>         at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.getTable(MetaDataProtos.java:11769)
>
>         at
> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1303)
>
>         at
> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1290)
>
>         at org.apache.hadoop.hbase.client.HTable$16.call(HTable.java:1727)
>
>         at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>
>         at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>
>         at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>
>         at java.lang.Thread.run(Thread.java:745)
>
> Caused by:
> org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.DoNotRetryIOException):
> org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
>
>         at
> org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
>
>         at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
>
>         at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
>
>         at
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
>
>         at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
>
>         at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
>
>         at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
>
>         at java.lang.Thread.run(Thread.java:745)
>
> Caused by: java.lang.ArrayIndexOutOfBoundsException
>
>
>
>         at
> org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1196)
>
>         at
> org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:213)
>
>         at
> org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:287)
>
>         at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:32675)
>
>         at
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1615)
>
>         ... 13 more
>
> Error: org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
>
>         at
> org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
>
>         at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
>
>         at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
>
>         at
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
>
>         at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
>
>         at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
>
>         at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
>
>         at java.lang.Thread.run(Thread.java:745)
>
> Caused by: java.lang.ArrayIndexOutOfBoundsException (state=08000,code=101)
>
> org.apache.phoenix.exception.PhoenixIOException:
> org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
>
>         at
> org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
>
>         at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
>
>         at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
>
>         at
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
>
>         at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
>
>         at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
>
>         at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
>
>         at java.lang.Thread.run(Thread.java:745)
>
> Caused by: java.lang.ArrayIndexOutOfBoundsException
>
>
>
>         at
> org.apache.phoenix.util.ServerUtil.parseServerException(ServerUtil.java:108)
>
>         at
> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1053)
>
>         at
> org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1016)
>
>         at
> org.apache.phoenix.query.ConnectionQueryServicesImpl.getTable(ConnectionQueryServicesImpl.java:1289)
>
>         at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:446)
>
>         at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:389)
>
>         at
> org.apache.phoenix.schema.MetaDataClient.updateCache(MetaDataClient.java:385)
>
>         at
> org.apache.phoenix.compile.FromCompiler$BaseColumnResolver.createTableRef(FromCompiler.java:409)
>
>         at
> org.apache.phoenix.compile.FromCompiler$SingleTableColumnResolver.<init>(FromCompiler.java:284)
>
>         at
> org.apache.phoenix.compile.FromCompiler.getResolverForQuery(FromCompiler.java:185)
>
>         at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:376)
>
>         at
> org.apache.phoenix.jdbc.PhoenixStatement$ExecutableSelectStatement.compilePlan(PhoenixStatement.java:357)
>
>         at
> org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:263)
>
>         at
> org.apache.phoenix.jdbc.PhoenixStatement$1.call(PhoenixStatement.java:258)
>
>         at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
>
>         at
> org.apache.phoenix.jdbc.PhoenixStatement.executeQuery(PhoenixStatement.java:257)
>
>         at
> org.apache.phoenix.jdbc.PhoenixStatement.execute(PhoenixStatement.java:1438)
>
>         at sqlline.Commands.execute(Commands.java:822)
>
>         at sqlline.Commands.sql(Commands.java:732)
>
>         at sqlline.SqlLine.dispatch(SqlLine.java:808)
>
>         at sqlline.SqlLine.begin(SqlLine.java:681)
>
>         at sqlline.SqlLine.start(SqlLine.java:398)
>
>         at sqlline.SqlLine.main(SqlLine.java:292)
>
> Caused by: org.apache.hadoop.hbase.DoNotRetryIOException:
> org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
>
>         at
> org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
>
>         at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
>
>         at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
>
>         at
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
>
>         at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
>
>         at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
>
>         at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
>
>         at java.lang.Thread.run(Thread.java:745)
>
> Caused by: java.lang.ArrayIndexOutOfBoundsException
>
>
>
>         at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
>
>         at
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
>
>         at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>
>         at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
>
>         at
> org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
>
>         at
> org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
>
>         at
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:322)
>
>         at
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1619)
>
>         at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:92)
>
>         at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:89)
>
>         at
> org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126)
>
>         at
> org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:95)
>
>         at
> org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
>
>         at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.getTable(MetaDataProtos.java:11769)
>
>         at
> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1303)
>
>         at
> org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1290)
>
>         at org.apache.hadoop.hbase.client.HTable$16.call(HTable.java:1727)
>
>         at java.util.concurrent.FutureTask.run(FutureTask.java:262)
>
>         at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
>
>         at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>
>         at java.lang.Thread.run(Thread.java:745)
>
> Caused by:
> org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.DoNotRetryIOException):
> org.apache.hadoop.hbase.DoNotRetryIOException: TABLE_20160511: null
>
>         at
> org.apache.phoenix.util.ServerUtil.createIOException(ServerUtil.java:84)
>
>         at
> org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:457)
>
>         at
> org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:11609)
>
>         at
> org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7390)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1873)
>
>         at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1855)
>
>         at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32209)
>
>         at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2112)
>
>         at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:101)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor.consumerLoop(RpcExecutor.java:130)
>
>         at
> org.apache.hadoop.hbase.ipc.RpcExecutor$1.run(RpcExecutor.java:107)
>
>         at java.lang.Thread.run(Thread.java:745)
>
> Caused by: java.lang.ArrayIndexOutOfBoundsException
>
>
>
>         at
> org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1196)
>
>         at
> org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:213)
>
>         at
> org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:287)
>
>         at
> org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:32675)
>
>         at
> org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1615)
>
>         ... 13 more
>