You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@phoenix.apache.org by "Salvatore LaMendola (JIRA)" <ji...@apache.org> on 2018/03/30 02:08:00 UTC

[jira] [Updated] (PHOENIX-4681) Test existence of SYSTEM:CATALOG before attempting to create it

     [ https://issues.apache.org/jira/browse/PHOENIX-4681?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

Salvatore LaMendola updated PHOENIX-4681:
-----------------------------------------
    Description: 
Getting WARN stacktraces similar to the one below when starting SQLLine after enabling Phoenix Namespace support. After speaking with [~elserj], it became apparent this is a bug that may have already been fixed. However, [~smayani] and I were unable to find an existing JIRA for this issue. I propose performing a test for the existence of {{SYSTEM:CATALOG}} before attempting to create it, so that this stacktrace isn't printed at each startup (until someone finally caves and applies {{CREATE}} permissions on the entire 
{{@SYSTEM}} namespace). If I can find the time, I'd like to attempt to create a patch, but I'd like to get community input first on the desired means to fix this.


{code:java}
18/03/29 19:29:21 WARN ipc.CoprocessorRpcChannel: Call failed on IOException
org.apache.hadoop.hbase.security.AccessDeniedException: org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient permissions (user=user01@EXAMPLE.COM, scope=SYSTEM, params=[namespace=SYSTEM,table=SYSTEM:CATALOG],action=CREATE)
	at org.apache.hadoop.hbase.security.access.AccessController.requireNamespacePermission(AccessController.java:628)
	at org.apache.hadoop.hbase.security.access.AccessController.preCreateTable(AccessController.java:996)
	at org.apache.phoenix.coprocessor.PhoenixAccessController.preCreateTable(PhoenixAccessController.java:152)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost$2.call(PhoenixMetaDataCoprocessorHost.java:167)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.execOperation(PhoenixMetaDataCoprocessorHost.java:80)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.preCreateTable(PhoenixMetaDataCoprocessorHost.java:163)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:1375)
	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:14332)
	at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7853)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1980)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1962)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2150)
	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:112)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:187)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:167)

	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
	at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:335)
	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1625)
	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:100)
	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:90)
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:136)
	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:104)
	at org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.createTable(MetaDataProtos.java:14549)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1470)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1457)
	at org.apache.hadoop.hbase.client.HTable$15.call(HTable.java:1736)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.security.AccessDeniedException): org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient permissions (user=user01@EXAMPLE.COM, scope=SYSTEM, params=[namespace=SYSTEM,table=SYSTEM:CATALOG],action=CREATE)
	at org.apache.hadoop.hbase.security.access.AccessController.requireNamespacePermission(AccessController.java:628)
	at org.apache.hadoop.hbase.security.access.AccessController.preCreateTable(AccessController.java:996)
	at org.apache.phoenix.coprocessor.PhoenixAccessController.preCreateTable(PhoenixAccessController.java:152)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost$2.call(PhoenixMetaDataCoprocessorHost.java:167)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.execOperation(PhoenixMetaDataCoprocessorHost.java:80)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.preCreateTable(PhoenixMetaDataCoprocessorHost.java:163)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:1375)
	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:14332)
	at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7853)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1980)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1962)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2150)
	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:112)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:187)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:167)

	at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1227)
	at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:218)
	at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:292)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:32855)
	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1622)
	... 13 more
18/03/29 19:29:21 WARN client.HTable: Error calling coprocessor service org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService for row \x00SYSTEM\x00CATALOG
java.util.concurrent.ExecutionException: org.apache.hadoop.hbase.security.AccessDeniedException: org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient permissions (user=user01@EXAMPLE.COM, scope=SYSTEM, params=[namespace=SYSTEM,table=SYSTEM:CATALOG],action=CREATE)
	at org.apache.hadoop.hbase.security.access.AccessController.requireNamespacePermission(AccessController.java:628)
	at org.apache.hadoop.hbase.security.access.AccessController.preCreateTable(AccessController.java:996)
	at org.apache.phoenix.coprocessor.PhoenixAccessController.preCreateTable(PhoenixAccessController.java:152)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost$2.call(PhoenixMetaDataCoprocessorHost.java:167)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.execOperation(PhoenixMetaDataCoprocessorHost.java:80)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.preCreateTable(PhoenixMetaDataCoprocessorHost.java:163)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:1375)
	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:14332)
	at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7853)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1980)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1962)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2150)
	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:112)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:187)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:167)

	at java.util.concurrent.FutureTask.report(FutureTask.java:122)
	at java.util.concurrent.FutureTask.get(FutureTask.java:192)
	at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1748)
	at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1704)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1279)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1260)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl.createTable(ConnectionQueryServicesImpl.java:1456)
	at org.apache.phoenix.schema.MetaDataClient.createTableInternal(MetaDataClient.java:2190)
	at org.apache.phoenix.schema.MetaDataClient.createTable(MetaDataClient.java:872)
	at org.apache.phoenix.compile.CreateTableCompiler$2.execute(CreateTableCompiler.java:194)
	at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:343)
	at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:331)
	at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
	at org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:330)
	at org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1421)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl$13.call(ConnectionQueryServicesImpl.java:2382)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl$13.call(ConnectionQueryServicesImpl.java:2330)
	at org.apache.phoenix.util.PhoenixContextExecutor.call(PhoenixContextExecutor.java:78)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl.init(ConnectionQueryServicesImpl.java:2330)
	at org.apache.phoenix.jdbc.PhoenixDriver.getConnectionQueryServices(PhoenixDriver.java:237)
	at org.apache.phoenix.jdbc.PhoenixEmbeddedDriver.createConnection(PhoenixEmbeddedDriver.java:150)
	at org.apache.phoenix.jdbc.PhoenixDriver.connect(PhoenixDriver.java:205)
	at sqlline.DatabaseConnection.connect(DatabaseConnection.java:157)
	at sqlline.DatabaseConnection.getConnection(DatabaseConnection.java:203)
	at sqlline.Commands.connect(Commands.java:1064)
	at sqlline.Commands.connect(Commands.java:996)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at sqlline.ReflectiveCommandHandler.execute(ReflectiveCommandHandler.java:36)
	at sqlline.SqlLine.dispatch(SqlLine.java:804)
	at sqlline.SqlLine.initArgs(SqlLine.java:588)
	at sqlline.SqlLine.begin(SqlLine.java:656)
	at sqlline.SqlLine.start(SqlLine.java:398)
	at sqlline.SqlLine.main(SqlLine.java:292)
Caused by: org.apache.hadoop.hbase.security.AccessDeniedException: org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient permissions (user=user01@EXAMPLE.COM, scope=SYSTEM, params=[namespace=SYSTEM,table=SYSTEM:CATALOG],action=CREATE)
	at org.apache.hadoop.hbase.security.access.AccessController.requireNamespacePermission(AccessController.java:628)
	at org.apache.hadoop.hbase.security.access.AccessController.preCreateTable(AccessController.java:996)
	at org.apache.phoenix.coprocessor.PhoenixAccessController.preCreateTable(PhoenixAccessController.java:152)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost$2.call(PhoenixMetaDataCoprocessorHost.java:167)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.execOperation(PhoenixMetaDataCoprocessorHost.java:80)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.preCreateTable(PhoenixMetaDataCoprocessorHost.java:163)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:1375)
	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:14332)
	at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7853)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1980)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1962)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2150)
	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:112)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:187)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:167)

	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
	at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:335)
	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1625)
	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:100)
	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:90)
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:136)
	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:104)
	at org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.createTable(MetaDataProtos.java:14549)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1470)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1457)
	at org.apache.hadoop.hbase.client.HTable$15.call(HTable.java:1736)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.security.AccessDeniedException): org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient permissions (user=user01@EXAMPLE.COM, scope=SYSTEM, params=[namespace=SYSTEM,table=SYSTEM:CATALOG],action=CREATE)
	at org.apache.hadoop.hbase.security.access.AccessController.requireNamespacePermission(AccessController.java:628)
	at org.apache.hadoop.hbase.security.access.AccessController.preCreateTable(AccessController.java:996)
	at org.apache.phoenix.coprocessor.PhoenixAccessController.preCreateTable(PhoenixAccessController.java:152)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost$2.call(PhoenixMetaDataCoprocessorHost.java:167)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.execOperation(PhoenixMetaDataCoprocessorHost.java:80)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.preCreateTable(PhoenixMetaDataCoprocessorHost.java:163)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:1375)
	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:14332)
	at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7853)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1980)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1962)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2150)
	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:112)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:187)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:167)

	at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1227)
	at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:218)
	at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:292)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:32855)
	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1622)
	... 13 more
18/03/29 19:29:21 WARN query.ConnectionQueryServicesImpl: Could not check for Phoenix SYSTEM tables, assuming they exist and are properly configured{code}

  was:
Getting WARN stacktraces similar to the one below when starting SQLLine after enabling Phoenix Namespace support. After speaking with [~elserj], it became apparent this is a bug that may have already been fixed. However, [~smayani] and I were unable to find an existing JIRA for this issue. I propose performing a test for the existence of {{SYSTEM:CATALOG}} before attempting to create it, so that this stacktrace isn't printed at each startup (until someone finally caves and applies {{CREATE}} permissions on the entire 
{{@SYSTEM}} namespace). If I can find the time, I'd like to attempt to create a patch, but I'd like to get community input first on the desired means to fix this.


{code:java}
18/03/29 19:29:21 WARN ipc.CoprocessorRpcChannel: Call failed on IOException
org.apache.hadoop.hbase.security.AccessDeniedException: org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient permissions (user=user01t01@HORTONWORKS.COM, scope=SYSTEM, params=[namespace=SYSTEM,table=SYSTEM:CATALOG],action=CREATE)
	at org.apache.hadoop.hbase.security.access.AccessController.requireNamespacePermission(AccessController.java:628)
	at org.apache.hadoop.hbase.security.access.AccessController.preCreateTable(AccessController.java:996)
	at org.apache.phoenix.coprocessor.PhoenixAccessController.preCreateTable(PhoenixAccessController.java:152)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost$2.call(PhoenixMetaDataCoprocessorHost.java:167)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.execOperation(PhoenixMetaDataCoprocessorHost.java:80)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.preCreateTable(PhoenixMetaDataCoprocessorHost.java:163)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:1375)
	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:14332)
	at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7853)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1980)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1962)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2150)
	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:112)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:187)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:167)

	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
	at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:335)
	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1625)
	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:100)
	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:90)
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:136)
	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:104)
	at org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.createTable(MetaDataProtos.java:14549)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1470)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1457)
	at org.apache.hadoop.hbase.client.HTable$15.call(HTable.java:1736)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.security.AccessDeniedException): org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient permissions (user=user01t01@HORTONWORKS.COM, scope=SYSTEM, params=[namespace=SYSTEM,table=SYSTEM:CATALOG],action=CREATE)
	at org.apache.hadoop.hbase.security.access.AccessController.requireNamespacePermission(AccessController.java:628)
	at org.apache.hadoop.hbase.security.access.AccessController.preCreateTable(AccessController.java:996)
	at org.apache.phoenix.coprocessor.PhoenixAccessController.preCreateTable(PhoenixAccessController.java:152)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost$2.call(PhoenixMetaDataCoprocessorHost.java:167)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.execOperation(PhoenixMetaDataCoprocessorHost.java:80)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.preCreateTable(PhoenixMetaDataCoprocessorHost.java:163)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:1375)
	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:14332)
	at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7853)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1980)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1962)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2150)
	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:112)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:187)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:167)

	at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1227)
	at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:218)
	at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:292)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:32855)
	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1622)
	... 13 more
18/03/29 19:29:21 WARN client.HTable: Error calling coprocessor service org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService for row \x00SYSTEM\x00CATALOG
java.util.concurrent.ExecutionException: org.apache.hadoop.hbase.security.AccessDeniedException: org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient permissions (user=user01t01@HORTONWORKS.COM, scope=SYSTEM, params=[namespace=SYSTEM,table=SYSTEM:CATALOG],action=CREATE)
	at org.apache.hadoop.hbase.security.access.AccessController.requireNamespacePermission(AccessController.java:628)
	at org.apache.hadoop.hbase.security.access.AccessController.preCreateTable(AccessController.java:996)
	at org.apache.phoenix.coprocessor.PhoenixAccessController.preCreateTable(PhoenixAccessController.java:152)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost$2.call(PhoenixMetaDataCoprocessorHost.java:167)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.execOperation(PhoenixMetaDataCoprocessorHost.java:80)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.preCreateTable(PhoenixMetaDataCoprocessorHost.java:163)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:1375)
	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:14332)
	at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7853)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1980)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1962)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2150)
	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:112)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:187)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:167)

	at java.util.concurrent.FutureTask.report(FutureTask.java:122)
	at java.util.concurrent.FutureTask.get(FutureTask.java:192)
	at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1748)
	at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1704)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1279)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1260)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl.createTable(ConnectionQueryServicesImpl.java:1456)
	at org.apache.phoenix.schema.MetaDataClient.createTableInternal(MetaDataClient.java:2190)
	at org.apache.phoenix.schema.MetaDataClient.createTable(MetaDataClient.java:872)
	at org.apache.phoenix.compile.CreateTableCompiler$2.execute(CreateTableCompiler.java:194)
	at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:343)
	at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:331)
	at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
	at org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:330)
	at org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1421)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl$13.call(ConnectionQueryServicesImpl.java:2382)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl$13.call(ConnectionQueryServicesImpl.java:2330)
	at org.apache.phoenix.util.PhoenixContextExecutor.call(PhoenixContextExecutor.java:78)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl.init(ConnectionQueryServicesImpl.java:2330)
	at org.apache.phoenix.jdbc.PhoenixDriver.getConnectionQueryServices(PhoenixDriver.java:237)
	at org.apache.phoenix.jdbc.PhoenixEmbeddedDriver.createConnection(PhoenixEmbeddedDriver.java:150)
	at org.apache.phoenix.jdbc.PhoenixDriver.connect(PhoenixDriver.java:205)
	at sqlline.DatabaseConnection.connect(DatabaseConnection.java:157)
	at sqlline.DatabaseConnection.getConnection(DatabaseConnection.java:203)
	at sqlline.Commands.connect(Commands.java:1064)
	at sqlline.Commands.connect(Commands.java:996)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at sqlline.ReflectiveCommandHandler.execute(ReflectiveCommandHandler.java:36)
	at sqlline.SqlLine.dispatch(SqlLine.java:804)
	at sqlline.SqlLine.initArgs(SqlLine.java:588)
	at sqlline.SqlLine.begin(SqlLine.java:656)
	at sqlline.SqlLine.start(SqlLine.java:398)
	at sqlline.SqlLine.main(SqlLine.java:292)
Caused by: org.apache.hadoop.hbase.security.AccessDeniedException: org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient permissions (user=user01t01@HORTONWORKS.COM, scope=SYSTEM, params=[namespace=SYSTEM,table=SYSTEM:CATALOG],action=CREATE)
	at org.apache.hadoop.hbase.security.access.AccessController.requireNamespacePermission(AccessController.java:628)
	at org.apache.hadoop.hbase.security.access.AccessController.preCreateTable(AccessController.java:996)
	at org.apache.phoenix.coprocessor.PhoenixAccessController.preCreateTable(PhoenixAccessController.java:152)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost$2.call(PhoenixMetaDataCoprocessorHost.java:167)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.execOperation(PhoenixMetaDataCoprocessorHost.java:80)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.preCreateTable(PhoenixMetaDataCoprocessorHost.java:163)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:1375)
	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:14332)
	at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7853)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1980)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1962)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2150)
	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:112)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:187)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:167)

	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
	at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:335)
	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1625)
	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:100)
	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:90)
	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:136)
	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:104)
	at org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.createTable(MetaDataProtos.java:14549)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1470)
	at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1457)
	at org.apache.hadoop.hbase.client.HTable$15.call(HTable.java:1736)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.security.AccessDeniedException): org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient permissions (user=user01t01@HORTONWORKS.COM, scope=SYSTEM, params=[namespace=SYSTEM,table=SYSTEM:CATALOG],action=CREATE)
	at org.apache.hadoop.hbase.security.access.AccessController.requireNamespacePermission(AccessController.java:628)
	at org.apache.hadoop.hbase.security.access.AccessController.preCreateTable(AccessController.java:996)
	at org.apache.phoenix.coprocessor.PhoenixAccessController.preCreateTable(PhoenixAccessController.java:152)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost$2.call(PhoenixMetaDataCoprocessorHost.java:167)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.execOperation(PhoenixMetaDataCoprocessorHost.java:80)
	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.preCreateTable(PhoenixMetaDataCoprocessorHost.java:163)
	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:1375)
	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:14332)
	at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7853)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1980)
	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1962)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2150)
	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:112)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:187)
	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:167)

	at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1227)
	at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:218)
	at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:292)
	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:32855)
	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1622)
	... 13 more
18/03/29 19:29:21 WARN query.ConnectionQueryServicesImpl: Could not check for Phoenix SYSTEM tables, assuming they exist and are properly configured{code}


> Test existence of SYSTEM:CATALOG before attempting to create it
> ---------------------------------------------------------------
>
>                 Key: PHOENIX-4681
>                 URL: https://issues.apache.org/jira/browse/PHOENIX-4681
>             Project: Phoenix
>          Issue Type: Bug
>    Affects Versions: 4.7.0
>            Reporter: Salvatore LaMendola
>            Priority: Minor
>
> Getting WARN stacktraces similar to the one below when starting SQLLine after enabling Phoenix Namespace support. After speaking with [~elserj], it became apparent this is a bug that may have already been fixed. However, [~smayani] and I were unable to find an existing JIRA for this issue. I propose performing a test for the existence of {{SYSTEM:CATALOG}} before attempting to create it, so that this stacktrace isn't printed at each startup (until someone finally caves and applies {{CREATE}} permissions on the entire 
> {{@SYSTEM}} namespace). If I can find the time, I'd like to attempt to create a patch, but I'd like to get community input first on the desired means to fix this.
> {code:java}
> 18/03/29 19:29:21 WARN ipc.CoprocessorRpcChannel: Call failed on IOException
> org.apache.hadoop.hbase.security.AccessDeniedException: org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient permissions (user=user01@EXAMPLE.COM, scope=SYSTEM, params=[namespace=SYSTEM,table=SYSTEM:CATALOG],action=CREATE)
> 	at org.apache.hadoop.hbase.security.access.AccessController.requireNamespacePermission(AccessController.java:628)
> 	at org.apache.hadoop.hbase.security.access.AccessController.preCreateTable(AccessController.java:996)
> 	at org.apache.phoenix.coprocessor.PhoenixAccessController.preCreateTable(PhoenixAccessController.java:152)
> 	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost$2.call(PhoenixMetaDataCoprocessorHost.java:167)
> 	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.execOperation(PhoenixMetaDataCoprocessorHost.java:80)
> 	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.preCreateTable(PhoenixMetaDataCoprocessorHost.java:163)
> 	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:1375)
> 	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:14332)
> 	at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7853)
> 	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1980)
> 	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1962)
> 	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
> 	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2150)
> 	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:112)
> 	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:187)
> 	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:167)
> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
> 	at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
> 	at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
> 	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:335)
> 	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1625)
> 	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:100)
> 	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:90)
> 	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:136)
> 	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:104)
> 	at org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
> 	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.createTable(MetaDataProtos.java:14549)
> 	at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1470)
> 	at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1457)
> 	at org.apache.hadoop.hbase.client.HTable$15.call(HTable.java:1736)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
> 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
> 	at java.lang.Thread.run(Thread.java:748)
> Caused by: org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.security.AccessDeniedException): org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient permissions (user=user01@EXAMPLE.COM, scope=SYSTEM, params=[namespace=SYSTEM,table=SYSTEM:CATALOG],action=CREATE)
> 	at org.apache.hadoop.hbase.security.access.AccessController.requireNamespacePermission(AccessController.java:628)
> 	at org.apache.hadoop.hbase.security.access.AccessController.preCreateTable(AccessController.java:996)
> 	at org.apache.phoenix.coprocessor.PhoenixAccessController.preCreateTable(PhoenixAccessController.java:152)
> 	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost$2.call(PhoenixMetaDataCoprocessorHost.java:167)
> 	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.execOperation(PhoenixMetaDataCoprocessorHost.java:80)
> 	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.preCreateTable(PhoenixMetaDataCoprocessorHost.java:163)
> 	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:1375)
> 	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:14332)
> 	at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7853)
> 	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1980)
> 	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1962)
> 	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
> 	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2150)
> 	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:112)
> 	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:187)
> 	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:167)
> 	at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1227)
> 	at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:218)
> 	at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:292)
> 	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:32855)
> 	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1622)
> 	... 13 more
> 18/03/29 19:29:21 WARN client.HTable: Error calling coprocessor service org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService for row \x00SYSTEM\x00CATALOG
> java.util.concurrent.ExecutionException: org.apache.hadoop.hbase.security.AccessDeniedException: org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient permissions (user=user01@EXAMPLE.COM, scope=SYSTEM, params=[namespace=SYSTEM,table=SYSTEM:CATALOG],action=CREATE)
> 	at org.apache.hadoop.hbase.security.access.AccessController.requireNamespacePermission(AccessController.java:628)
> 	at org.apache.hadoop.hbase.security.access.AccessController.preCreateTable(AccessController.java:996)
> 	at org.apache.phoenix.coprocessor.PhoenixAccessController.preCreateTable(PhoenixAccessController.java:152)
> 	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost$2.call(PhoenixMetaDataCoprocessorHost.java:167)
> 	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.execOperation(PhoenixMetaDataCoprocessorHost.java:80)
> 	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.preCreateTable(PhoenixMetaDataCoprocessorHost.java:163)
> 	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:1375)
> 	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:14332)
> 	at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7853)
> 	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1980)
> 	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1962)
> 	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
> 	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2150)
> 	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:112)
> 	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:187)
> 	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:167)
> 	at java.util.concurrent.FutureTask.report(FutureTask.java:122)
> 	at java.util.concurrent.FutureTask.get(FutureTask.java:192)
> 	at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1748)
> 	at org.apache.hadoop.hbase.client.HTable.coprocessorService(HTable.java:1704)
> 	at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1279)
> 	at org.apache.phoenix.query.ConnectionQueryServicesImpl.metaDataCoprocessorExec(ConnectionQueryServicesImpl.java:1260)
> 	at org.apache.phoenix.query.ConnectionQueryServicesImpl.createTable(ConnectionQueryServicesImpl.java:1456)
> 	at org.apache.phoenix.schema.MetaDataClient.createTableInternal(MetaDataClient.java:2190)
> 	at org.apache.phoenix.schema.MetaDataClient.createTable(MetaDataClient.java:872)
> 	at org.apache.phoenix.compile.CreateTableCompiler$2.execute(CreateTableCompiler.java:194)
> 	at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:343)
> 	at org.apache.phoenix.jdbc.PhoenixStatement$2.call(PhoenixStatement.java:331)
> 	at org.apache.phoenix.call.CallRunner.run(CallRunner.java:53)
> 	at org.apache.phoenix.jdbc.PhoenixStatement.executeMutation(PhoenixStatement.java:330)
> 	at org.apache.phoenix.jdbc.PhoenixStatement.executeUpdate(PhoenixStatement.java:1421)
> 	at org.apache.phoenix.query.ConnectionQueryServicesImpl$13.call(ConnectionQueryServicesImpl.java:2382)
> 	at org.apache.phoenix.query.ConnectionQueryServicesImpl$13.call(ConnectionQueryServicesImpl.java:2330)
> 	at org.apache.phoenix.util.PhoenixContextExecutor.call(PhoenixContextExecutor.java:78)
> 	at org.apache.phoenix.query.ConnectionQueryServicesImpl.init(ConnectionQueryServicesImpl.java:2330)
> 	at org.apache.phoenix.jdbc.PhoenixDriver.getConnectionQueryServices(PhoenixDriver.java:237)
> 	at org.apache.phoenix.jdbc.PhoenixEmbeddedDriver.createConnection(PhoenixEmbeddedDriver.java:150)
> 	at org.apache.phoenix.jdbc.PhoenixDriver.connect(PhoenixDriver.java:205)
> 	at sqlline.DatabaseConnection.connect(DatabaseConnection.java:157)
> 	at sqlline.DatabaseConnection.getConnection(DatabaseConnection.java:203)
> 	at sqlline.Commands.connect(Commands.java:1064)
> 	at sqlline.Commands.connect(Commands.java:996)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:498)
> 	at sqlline.ReflectiveCommandHandler.execute(ReflectiveCommandHandler.java:36)
> 	at sqlline.SqlLine.dispatch(SqlLine.java:804)
> 	at sqlline.SqlLine.initArgs(SqlLine.java:588)
> 	at sqlline.SqlLine.begin(SqlLine.java:656)
> 	at sqlline.SqlLine.start(SqlLine.java:398)
> 	at sqlline.SqlLine.main(SqlLine.java:292)
> Caused by: org.apache.hadoop.hbase.security.AccessDeniedException: org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient permissions (user=user01@EXAMPLE.COM, scope=SYSTEM, params=[namespace=SYSTEM,table=SYSTEM:CATALOG],action=CREATE)
> 	at org.apache.hadoop.hbase.security.access.AccessController.requireNamespacePermission(AccessController.java:628)
> 	at org.apache.hadoop.hbase.security.access.AccessController.preCreateTable(AccessController.java:996)
> 	at org.apache.phoenix.coprocessor.PhoenixAccessController.preCreateTable(PhoenixAccessController.java:152)
> 	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost$2.call(PhoenixMetaDataCoprocessorHost.java:167)
> 	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.execOperation(PhoenixMetaDataCoprocessorHost.java:80)
> 	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.preCreateTable(PhoenixMetaDataCoprocessorHost.java:163)
> 	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:1375)
> 	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:14332)
> 	at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7853)
> 	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1980)
> 	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1962)
> 	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
> 	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2150)
> 	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:112)
> 	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:187)
> 	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:167)
> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> 	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> 	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> 	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
> 	at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
> 	at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
> 	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.getRemoteException(ProtobufUtil.java:335)
> 	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1625)
> 	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:100)
> 	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:90)
> 	at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:136)
> 	at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:104)
> 	at org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56)
> 	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.createTable(MetaDataProtos.java:14549)
> 	at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1470)
> 	at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1457)
> 	at org.apache.hadoop.hbase.client.HTable$15.call(HTable.java:1736)
> 	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
> 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
> 	at java.lang.Thread.run(Thread.java:748)
> Caused by: org.apache.hadoop.hbase.ipc.RemoteWithExtrasException(org.apache.hadoop.hbase.security.AccessDeniedException): org.apache.hadoop.hbase.security.AccessDeniedException: Insufficient permissions (user=user01@EXAMPLE.COM, scope=SYSTEM, params=[namespace=SYSTEM,table=SYSTEM:CATALOG],action=CREATE)
> 	at org.apache.hadoop.hbase.security.access.AccessController.requireNamespacePermission(AccessController.java:628)
> 	at org.apache.hadoop.hbase.security.access.AccessController.preCreateTable(AccessController.java:996)
> 	at org.apache.phoenix.coprocessor.PhoenixAccessController.preCreateTable(PhoenixAccessController.java:152)
> 	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost$2.call(PhoenixMetaDataCoprocessorHost.java:167)
> 	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.execOperation(PhoenixMetaDataCoprocessorHost.java:80)
> 	at org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.preCreateTable(PhoenixMetaDataCoprocessorHost.java:163)
> 	at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.createTable(MetaDataEndpointImpl.java:1375)
> 	at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService.callMethod(MetaDataProtos.java:14332)
> 	at org.apache.hadoop.hbase.regionserver.HRegion.execService(HRegion.java:7853)
> 	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execServiceOnRegion(RSRpcServices.java:1980)
> 	at org.apache.hadoop.hbase.regionserver.RSRpcServices.execService(RSRpcServices.java:1962)
> 	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:32389)
> 	at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:2150)
> 	at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:112)
> 	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:187)
> 	at org.apache.hadoop.hbase.ipc.RpcExecutor$Handler.run(RpcExecutor.java:167)
> 	at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1227)
> 	at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:218)
> 	at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:292)
> 	at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:32855)
> 	at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1622)
> 	... 13 more
> 18/03/29 19:29:21 WARN query.ConnectionQueryServicesImpl: Could not check for Phoenix SYSTEM tables, assuming they exist and are properly configured{code}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)