You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@hbase.apache.org by ch huang <ju...@gmail.com> on 2013/07/12 05:26:35 UTC

problem in testing coprocessor endpoint

i am testing coprocessor endpoint function, here is my testing process ,and
error i get ,hope any expert on coprocessor can help me out


# vi ColumnAggregationProtocol.java

import java.io.IOException;
import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
// A sample protocol for performing aggregation at regions.
public interface ColumnAggregationProtocol
extends CoprocessorProtocol {
// Perform aggregation for a given column at the region. The aggregation
// will include all the rows inside the region. It can be extended to
// allow passing start and end rows for a fine-grained aggregation.
   public long sum(byte[] family, byte[] qualifier) throws IOException;
}


# vi ColumnAggregationEndpoint.java


import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.ipc.ProtocolSignature;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.util.Bytes;

//Aggregation implementation at a region.

public class ColumnAggregationEndpoint extends BaseEndpointCoprocessor
  implements ColumnAggregationProtocol {
     @Override
     public long sum(byte[] family, byte[] qualifier)
     throws IOException {
       // aggregate at each region
         Scan scan = new Scan();
         scan.addColumn(family, qualifier);
         long sumResult = 0;

         CoprocessorEnvironment ce = getEnvironment();
         HRegion hr = ((RegionCoprocessorEnvironment)ce).getRegion();
         InternalScanner scanner = hr.getScanner(scan);

         try {
           List<KeyValue> curVals = new ArrayList<KeyValue>();
           boolean hasMore = false;
           do {
         curVals.clear();
         hasMore = scanner.next(curVals);
         KeyValue kv = curVals.get(0);
         sumResult += Long.parseLong(Bytes.toString(kv.getValue()));

           } while (hasMore);
         } finally {
             scanner.close();
         }
         return sumResult;
      }

      @Override
      public long getProtocolVersion(String protocol, long clientVersion)
             throws IOException {
         // TODO Auto-generated method stub
         return 0;
      }

      @Override

      public ProtocolSignature getProtocolSignature(String protocol,
             long clientVersion, int clientMethodsHash) throws IOException
{
          // TODO Auto-generated method stub
          return null;
      }
}

i compile and pack the two into test.jar,and put it into my HDFS filesystem

and load it into my test table

hbase(main):006:0> alter 'mytest', METHOD =>
'table_att','coprocessor'=>'hdfs:///
192.168.10.22:9000/alex/test.jar|ColumnAggregationEndpoint|1001'

here is my testing java code

package com.testme.demo;
import java.io.IOException;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol;
import org.apache.hadoop.hbase.util.*;
import org.apache.hadoop.hbase.client.coprocessor.Batch.Call;;

public class TestCop {
   private static Configuration conf =null;
   private static String TEST_TABLE = "mytest";
   private static String TEST_FAMILY = "myfl";
   private static String TEST_QUALIFIER = "myqf";
 /**
  * @param args
  */
   static {
          conf = HBaseConfiguration.create();
          conf.addResource( "hbase-site.xml");
   }

 public static void main(String[] args) throws IOException,Throwable{
  // TODO Auto-generated method stub
  conf = HBaseConfiguration.create();

   HTable table = new HTable(conf,TEST_TABLE);
 //  HTableDescriptor htd = table.getTableDescriptor();

   Scan scan = new Scan();
   Map<byte[], Long> results;

   results = table.coprocessorExec(ColumnAggregationProtocol.class,
"1".getBytes(),"5".getBytes(), new Call<ColumnAggregationProtocol,Long>(){
     public Long call(ColumnAggregationProtocol instance)throws IOException{
       return (Long) instance.sum(TEST_FAMILY.getBytes(),
TEST_QUALIFIER.getBytes());
    }});

   long sumResult = 0;
   long expectedResult = 0;
   for (Map.Entry<byte[], Long> e:results.entrySet()){
    sumResult += e.getValue();
   }
   System.out.println(sumResult);
 }
}
when i run it i get error
Exception in thread "main"
org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException:
org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException: No matching
handler for protocol
org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol in region
mytest,,1373597714844.e11ad2263faf89b5865ae98f524e3fb9.
 at org.apache.hadoop.hbase.regionserver.HRegion.exec(HRegion.java:5463)
 at
org.apache.hadoop.hbase.regionserver.HRegionServer.execCoprocessor(HRegionServer.java:3720)
 at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
 at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
 at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
 at java.lang.reflect.Method.invoke(Method.java:597)
 at
org.apache.hadoop.hbase.ipc.WritableRpcEngine$Server.call(WritableRpcEngine.java:320)
 at
org.apache.hadoop.hbase.ipc.HBaseServer$Handler.run(HBaseServer.java:1426)
 at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
 at sun.reflect.NativeConstructorAccessorImpl.newInstance(Unknown Source)
 at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(Unknown
Source)
 at java.lang.reflect.Constructor.newInstance(Unknown Source)
 at
org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:90)
 at
org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:79)
 at
org.apache.hadoop.hbase.client.ServerCallable.translateException(ServerCallable.java:228)
 at
org.apache.hadoop.hbase.client.ServerCallable.withRetries(ServerCallable.java:166)
 at
org.apache.hadoop.hbase.ipc.ExecRPCInvoker.invoke(ExecRPCInvoker.java:79)
 at com.sun.proxy.$Proxy8.sum(Unknown Source)
 at com.testme.demo.TestCop$1.call(TestCop.java:41)
 at com.testme.demo.TestCop$1.call(TestCop.java:1)
 at
org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation$4.call(HConnectionManager.java:1466)
 at java.util.concurrent.FutureTask$Sync.innerRun(Unknown Source)
 at java.util.concurrent.FutureTask.run(Unknown Source)
 at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
 at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
 at java.lang.Thread.run(Unknown Source)
Caused by:
org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException):
org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException: No matching
handler for protocol
org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol in region
mytest,,1373597714844.e11ad2263faf89b5865ae98f524e3fb9.
 at org.apache.hadoop.hbase.regionserver.HRegion.exec(HRegion.java:5463)
 at
org.apache.hadoop.hbase.regionserver.HRegionServer.execCoprocessor(HRegionServer.java:3720)
 at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
 at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
 at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
 at java.lang.reflect.Method.invoke(Method.java:597)
 at
org.apache.hadoop.hbase.ipc.WritableRpcEngine$Server.call(WritableRpcEngine.java:320)
 at
org.apache.hadoop.hbase.ipc.HBaseServer$Handler.run(HBaseServer.java:1426)
 at org.apache.hadoop.hbase.ipc.HBaseClient.call(HBaseClient.java:995)
 at
org.apache.hadoop.hbase.ipc.WritableRpcEngine$Invoker.invoke(WritableRpcEngine.java:86)
 at com.sun.proxy.$Proxy7.execCoprocessor(Unknown Source)
 at
org.apache.hadoop.hbase.ipc.ExecRPCInvoker$1.call(ExecRPCInvoker.java:75)
 at
org.apache.hadoop.hbase.ipc.ExecRPCInvoker$1.call(ExecRPCInvoker.java:73)
 at
org.apache.hadoop.hbase.client.ServerCallable.withRetries(ServerCallable.java:163)
 ... 10 more

hbase(main):020:0> describe
'mytest'
DESCRIPTION                                          ENABLED
 {NAME => 'mytest', coprocessor$1 => 'hdfs:///192.16 true
 8.10.22:9000/alex/test.jar|ColumnAggregationEndpoin
 t|1001', FAMILIES => [{NAME => 'myfl'
 , DATA_BLOCK_ENCODING => 'NONE', BLOOMFILTER => 'NO
 NE', REPLICATION_SCOPE => '0', VERSIONS => '3', COM
 PRESSION => 'NONE', MIN_VERSIONS => '0', TTL => '21
 47483647', KEEP_DELETED_CELLS => 'false', BLOCKSIZE
  => '65536', IN_MEMORY => 'false', ENCODE_ON_DISK =
 > 'true', BLOCKCACHE => 'true'}]}
1 row(s) in 0.0920 seconds

Re: problem in testing coprocessor endpoint

Posted by Gary Helmling <gh...@gmail.com>.
Kim, Asaf,

I don't know where this conception comes from that endpoint coprocessors
must be loaded globally, but it is simply not true.  If you would like to
see how endpoints are registered, see RegionCoprocessorHost.java:

  @Override
  public RegionEnvironment createEnvironment(Class<?> implClass,
      Coprocessor instance, int priority, int seq, Configuration conf) {
    // Check if it's an Endpoint.
    // Due to current dynamic protocol design, Endpoint
    // uses a different way to be registered and executed.
    // It uses a visitor pattern to invoke registered Endpoint
    // method.
    for (Class c : implClass.getInterfaces()) {
      if (CoprocessorProtocol.class.isAssignableFrom(c)) {
        region.registerProtocol(c, (CoprocessorProtocol)instance);
        break;
      }
    }


If you would like some trivial test code that demonstates invoking an
endpoint coprocessor configured on only a single table (coprocessor jar
loaded from HDFS), just let me know and I will send it to you.

--gh


On Fri, Jul 12, 2013 at 10:06 AM, Kim Chew <kc...@gmail.com> wrote:

> No, Endpoint processor can be deployed via configuration only.
> In hbase-site.xml, there should be an entry like this,
>
> <property>
>   <name>hbase.coprocessor.region.classes</name>
>   <value>myEndpointImpl</value>
> </property>
>
> Also, you have to let HBase know where to find your class, so in
> hbase-env.sh
>
>     export HBASE_CLASSPATH=${HBASE_HOME}/lib/AggregateCounterEndpoint.jar
>
>
> The trouble is you will need to restart RS. It would be nice to have APIs
> to load the Endpoint coprocessor dynamically.
>
> Kim
>
>
> On Fri, Jul 12, 2013 at 9:18 AM, Gary Helmling <gh...@gmail.com>
> wrote:
>
> > Endpoint coprocessors can be loaded on a single table.  They are no
> > different from RegionObservers in this regard.  Both are instantiated per
> > region by RegionCoprocessorHost.  You should be able to load the
> > coprocessor by setting it as a table attribute.  If it doesn't seem to be
> > loading, check the region server logs after you re-enable the table where
> > you have added it.  Do you see any log messages from
> RegionCoprocessorHost?
> >
> >
> > On Fri, Jul 12, 2013 at 4:33 AM, Asaf Mesika <as...@gmail.com>
> > wrote:
> >
> > > You can't register and end point just for one table. It's like a stored
> > > procedure - you choose to run it and pass parameters to it.
> > >
> > > On Friday, July 12, 2013, ch huang wrote:
> > >
> > > > what your describe is how to load endpoint coprocessor for every
> region
> > > in
> > > > the hbase, what i want to do is just load it into my test table ,only
> > for
> > > > the regions of the table
> > > >
> > > > On Fri, Jul 12, 2013 at 12:07 PM, Asaf Mesika <asaf.mesika@gmail.com
> >
> > > > wrote:
> > > >
> > > > > The only way to register endpoint coprocessor jars is by placing
> them
> > > in
> > > > > lib dir if hbase and modifying hbase-site.xml to point to it under
> a
> > > > > property name I forgot at the moment.
> > > > > What you described is a way to register an Observer type
> coprocessor.
> > > > >
> > > > >
> > > > > On Friday, July 12, 2013, ch huang wrote:
> > > > >
> > > > > > i am testing coprocessor endpoint function, here is my testing
> > > process
> > > > > ,and
> > > > > > error i get ,hope any expert on coprocessor can help me out
> > > > > >
> > > > > >
> > > > > > # vi ColumnAggregationProtocol.java
> > > > > >
> > > > > > import java.io.IOException;
> > > > > > import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
> > > > > > // A sample protocol for performing aggregation at regions.
> > > > > > public interface ColumnAggregationProtocol
> > > > > > extends CoprocessorProtocol {
> > > > > > // Perform aggregation for a given column at the region. The
> > > > aggregation
> > > > > > // will include all the rows inside the region. It can be
> extended
> > to
> > > > > > // allow passing start and end rows for a fine-grained
> aggregation.
> > > > > >    public long sum(byte[] family, byte[] qualifier) throws
> > > IOException;
> > > > > > }
> > > > > >
> > > > > >
> > > > > > # vi ColumnAggregationEndpoint.java
> > > > > >
> > > > > >
> > > > > > import java.io.FileWriter;
> > > > > > import java.io.IOException;
> > > > > > import java.util.ArrayList;
> > > > > > import java.util.List;
> > > > > > import org.apache.hadoop.hbase.CoprocessorEnvironment;
> > > > > > import org.apache.hadoop.hbase.KeyValue;
> > > > > > import org.apache.hadoop.hbase.client.Scan;
> > > > > > import
> org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor;
> > > > > > import
> > > > org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
> > > > > > import org.apache.hadoop.hbase.ipc.ProtocolSignature;
> > > > > > import org.apache.hadoop.hbase.regionserver.HRegion;
> > > > > > import org.apache.hadoop.hbase.regionserver.InternalScanner;
> > > > > > import org.apache.hadoop.hbase.util.Bytes;
> > > > > >
> > > > > > //Aggregation implementation at a region.
> > > > > >
> > > > > > public class ColumnAggregationEndpoint extends
> > > BaseEndpointCoprocessor
> > > > > >   implements ColumnAggregationProtocol {
> > > > > >      @Override
> > > > > >      public long sum(byte[] family, byte[] qualifier)
> > > > > >      throws IOException {
> > > > > >        // aggregate at each region
> > > > > >          Scan scan = new Scan();
> > > > > >          scan.addColumn(family, qualifier);
> > > > > >          long sumResult = 0;
> > > > > >
> > > > > >          CoprocessorEnvironment ce = getEnvironment();
> > > > > >          HRegion hr =
> > ((RegionCoprocessorEnvironment)ce).getRegion();
> > > > > >          InternalScanner scanner = hr.getScanner(scan);
> > > > > >
> > > > > >          try {
> > > > > >            List<KeyValue> curVals = new ArrayList<KeyValue>();
> > > > > >            boolean hasMore = false;
> > > > > >            do {
> > > > > >          curVals.clear();
> > > > > >          hasMore = scanner.next(curVals);
> > > > > >          KeyValue kv = curVals.get(0);
> > > > > >          sumResult +=
> > Long.parseLong(Bytes.toString(kv.getValue()));
> > > > > >
> > > > > >            } while (hasMore);
> > > > > >          } finally {
> > > > > >              scanner.close();
> > > > > >          }
> > > > > >          return sumResult;
> > > > > >       }
> > > > > >
> > > > > >       @Override
> > > > > >       public long getProtocolVersion(String protocol, long
> > > > clientVersion)
> > > > > >              throws IOException {
> > > > > >          // TODO Auto-generated method stub
> > > > > >          return 0;
> > > > > >       }
> > > > > >
> > > > > > >
> 192.168.10.22:9000/alex/test.jar|ColumnAggregationEndpoint|1001<
> > > >
> > >
> >
> http://192.168.10.22:9000/alex/test.jar%7CColumnAggregationEndpoint%7C1001
> > > > >
> > > > > '
> > > > > >
> > > > > > here is my testing java code
> > > > > >
> > > > > > package com.testme.demo;
> > > > > > import java.io.IOException;
> > > > > > import java.util.Map;
> > > > > > import org.apache.hadoop.conf.Configuration;
> > > > > > import org.apache.hadoop.hbase.HBaseConfiguration;
> > > > > > import org.apache.hadoop.hbase.HTableDescriptor;
> > > > > > import org.apache.hadoop.hbase.client.*;
> > > > > > import
> > org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol;
> > > > > > import org.apache.hadoop.hbase.util.*;
> > > > > > import org.apache.hadoop.hbase.client.coprocessor.Batch.Call;;
> > > > > >
> > > > > > public class TestCop {
> > > > > >    private static Configuration conf =null;
> > > > > >    private static String TEST_TABLE = "mytest";
> > > > > >    private static String TEST_FAMILY = "myfl";
> > > > > >    private static String TEST_QUALIFIER = "myqf";
> > > > > >  /**
> > > > > >   * @param args
> > > > > >   */
> > > > > >    static {
> > > > > >           conf = HBaseConfiguration.create();
> > > > > >           conf.addResource( "hbase-site.xml");
> > > > > >    }
> > > > > >
> > > > > >  public static void main(String[] args) throws
> > IOException,Throwable{
> > > > > >   // TODO Auto-generated method stub
> > > > > >   conf = HBaseConfiguration.create();
> > > > > >
> > > > > >    HTable table = new HTable(conf,TEST_TABLE);
> > > > > >  //  HTableDescriptor htd = table.getTableDescriptor();
> > > > > >
> > > > > >    Scan scan = new Scan();
> > > > > >    Map<byte[], Long> results;
> > > > > >
> > > > > >    results =
> table.coprocessorExec(ColumnAggregationProtocol.class,
> > > > > > "1".getBytes(),"5".getBytes(), new
> > > > > Call<ColumnAggregationProtocol,Long>(){
> > > > > >      public Long call(ColumnAggregationProtocol instance)throws
> > > > > > IOException{
> > > > > >        return (Long) instance.sum(TEST_FAMILY.getBytes(),
> > > > > > TEST_QUALIFIER.getBytes());
> > > > > >     }});
> > > > > >
> > > > > >    long sumResult = 0;
> > > > > >    long expectedResult = 0;
> > > > > >    for (Map.Entry<byte[], Long> e:results.entrySet()){
> > > > > >     sumResult += e.getValue();
> > > > > >    }
> > > > > >    System.out.println(sumResult);
> > > > > >  }
> > > > > > }
> > > > > > when i run it i get error
> > > > > > Exception in thread "main"
> > > > > > org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException:
> > > > > > org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException: No
> > > > > matching
> > > > > > handler for protocol
> > > > > > org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol in
> > > region
> > > > > > mytest,,1373597714844.e11ad2263faf89b5865ae98f524e3fb9.
> > > > > >  at
> > > > org.apache.hadoop.hbase.regionserver.HRegion.exec(HRegion.java:5463)
> > > > > >  at
> > > > > >
> > > > > >
> > > > >
> > > >
> > >
> >
> org.apache.hadoop.hbase.regionserver.HRegionServer.execCoprocessor(HRegionServer.java:3720)
> > > > > >  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> > > > > >  at
> > > > > >
> > > > > >
> > > > >
> > > >
> > >
> >
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
> > > > > >  at
> > > > > >
> > > > > >
> > > > >
> > > >
> > >
> >
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> > > > > >  at java.lang.reflect.Method.invoke(Method.java:597)
> > > > > >  at
> > > > > >
> > > > > >
> > > > >
> > > >
> > >
> >
> org.apache.hadoop.hbase.ipc.WritableRpcEngine$Server.call(WritableRpcEngine.java:320)
> > > > > >  at
> > > > > >
> > > > >
> > > >
> > >
> >
> org.apache.hadoop.hbase.ipc.HBaseServer$Handler.run(HBaseServer.java:1426)
> > > > > >  at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> > > > Method)
> > > > > >  at sun.reflect.NativeConstructorAccessorImpl.newInstance(Unknown
> > > > Source)
> > > > > >  at
> > sun.reflect.DelegatingConstructorAccessorImpl.newInstance(Unknown
> > > > > > Source)
> > > > > >  at java.lang.reflect.Constructor.newInstance(Unknown Source)
> > > > > >  at
> > > > > >
> > > > > >
> > > > >
> > >
> >
>

Re: problem in testing coprocessor endpoint

Posted by Kim Chew <kc...@gmail.com>.
No, Endpoint processor can be deployed via configuration only.
In hbase-site.xml, there should be an entry like this,

<property>
  <name>hbase.coprocessor.region.classes</name>
  <value>myEndpointImpl</value>
</property>

Also, you have to let HBase know where to find your class, so in
hbase-env.sh

    export HBASE_CLASSPATH=${HBASE_HOME}/lib/AggregateCounterEndpoint.jar


The trouble is you will need to restart RS. It would be nice to have APIs
to load the Endpoint coprocessor dynamically.

Kim


On Fri, Jul 12, 2013 at 9:18 AM, Gary Helmling <gh...@gmail.com> wrote:

> Endpoint coprocessors can be loaded on a single table.  They are no
> different from RegionObservers in this regard.  Both are instantiated per
> region by RegionCoprocessorHost.  You should be able to load the
> coprocessor by setting it as a table attribute.  If it doesn't seem to be
> loading, check the region server logs after you re-enable the table where
> you have added it.  Do you see any log messages from RegionCoprocessorHost?
>
>
> On Fri, Jul 12, 2013 at 4:33 AM, Asaf Mesika <as...@gmail.com>
> wrote:
>
> > You can't register and end point just for one table. It's like a stored
> > procedure - you choose to run it and pass parameters to it.
> >
> > On Friday, July 12, 2013, ch huang wrote:
> >
> > > what your describe is how to load endpoint coprocessor for every region
> > in
> > > the hbase, what i want to do is just load it into my test table ,only
> for
> > > the regions of the table
> > >
> > > On Fri, Jul 12, 2013 at 12:07 PM, Asaf Mesika <as...@gmail.com>
> > > wrote:
> > >
> > > > The only way to register endpoint coprocessor jars is by placing them
> > in
> > > > lib dir if hbase and modifying hbase-site.xml to point to it under a
> > > > property name I forgot at the moment.
> > > > What you described is a way to register an Observer type coprocessor.
> > > >
> > > >
> > > > On Friday, July 12, 2013, ch huang wrote:
> > > >
> > > > > i am testing coprocessor endpoint function, here is my testing
> > process
> > > > ,and
> > > > > error i get ,hope any expert on coprocessor can help me out
> > > > >
> > > > >
> > > > > # vi ColumnAggregationProtocol.java
> > > > >
> > > > > import java.io.IOException;
> > > > > import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
> > > > > // A sample protocol for performing aggregation at regions.
> > > > > public interface ColumnAggregationProtocol
> > > > > extends CoprocessorProtocol {
> > > > > // Perform aggregation for a given column at the region. The
> > > aggregation
> > > > > // will include all the rows inside the region. It can be extended
> to
> > > > > // allow passing start and end rows for a fine-grained aggregation.
> > > > >    public long sum(byte[] family, byte[] qualifier) throws
> > IOException;
> > > > > }
> > > > >
> > > > >
> > > > > # vi ColumnAggregationEndpoint.java
> > > > >
> > > > >
> > > > > import java.io.FileWriter;
> > > > > import java.io.IOException;
> > > > > import java.util.ArrayList;
> > > > > import java.util.List;
> > > > > import org.apache.hadoop.hbase.CoprocessorEnvironment;
> > > > > import org.apache.hadoop.hbase.KeyValue;
> > > > > import org.apache.hadoop.hbase.client.Scan;
> > > > > import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor;
> > > > > import
> > > org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
> > > > > import org.apache.hadoop.hbase.ipc.ProtocolSignature;
> > > > > import org.apache.hadoop.hbase.regionserver.HRegion;
> > > > > import org.apache.hadoop.hbase.regionserver.InternalScanner;
> > > > > import org.apache.hadoop.hbase.util.Bytes;
> > > > >
> > > > > //Aggregation implementation at a region.
> > > > >
> > > > > public class ColumnAggregationEndpoint extends
> > BaseEndpointCoprocessor
> > > > >   implements ColumnAggregationProtocol {
> > > > >      @Override
> > > > >      public long sum(byte[] family, byte[] qualifier)
> > > > >      throws IOException {
> > > > >        // aggregate at each region
> > > > >          Scan scan = new Scan();
> > > > >          scan.addColumn(family, qualifier);
> > > > >          long sumResult = 0;
> > > > >
> > > > >          CoprocessorEnvironment ce = getEnvironment();
> > > > >          HRegion hr =
> ((RegionCoprocessorEnvironment)ce).getRegion();
> > > > >          InternalScanner scanner = hr.getScanner(scan);
> > > > >
> > > > >          try {
> > > > >            List<KeyValue> curVals = new ArrayList<KeyValue>();
> > > > >            boolean hasMore = false;
> > > > >            do {
> > > > >          curVals.clear();
> > > > >          hasMore = scanner.next(curVals);
> > > > >          KeyValue kv = curVals.get(0);
> > > > >          sumResult +=
> Long.parseLong(Bytes.toString(kv.getValue()));
> > > > >
> > > > >            } while (hasMore);
> > > > >          } finally {
> > > > >              scanner.close();
> > > > >          }
> > > > >          return sumResult;
> > > > >       }
> > > > >
> > > > >       @Override
> > > > >       public long getProtocolVersion(String protocol, long
> > > clientVersion)
> > > > >              throws IOException {
> > > > >          // TODO Auto-generated method stub
> > > > >          return 0;
> > > > >       }
> > > > >
> > > > > > 192.168.10.22:9000/alex/test.jar|ColumnAggregationEndpoint|1001<
> > >
> >
> http://192.168.10.22:9000/alex/test.jar%7CColumnAggregationEndpoint%7C1001
> > > >
> > > > '
> > > > >
> > > > > here is my testing java code
> > > > >
> > > > > package com.testme.demo;
> > > > > import java.io.IOException;
> > > > > import java.util.Map;
> > > > > import org.apache.hadoop.conf.Configuration;
> > > > > import org.apache.hadoop.hbase.HBaseConfiguration;
> > > > > import org.apache.hadoop.hbase.HTableDescriptor;
> > > > > import org.apache.hadoop.hbase.client.*;
> > > > > import
> org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol;
> > > > > import org.apache.hadoop.hbase.util.*;
> > > > > import org.apache.hadoop.hbase.client.coprocessor.Batch.Call;;
> > > > >
> > > > > public class TestCop {
> > > > >    private static Configuration conf =null;
> > > > >    private static String TEST_TABLE = "mytest";
> > > > >    private static String TEST_FAMILY = "myfl";
> > > > >    private static String TEST_QUALIFIER = "myqf";
> > > > >  /**
> > > > >   * @param args
> > > > >   */
> > > > >    static {
> > > > >           conf = HBaseConfiguration.create();
> > > > >           conf.addResource( "hbase-site.xml");
> > > > >    }
> > > > >
> > > > >  public static void main(String[] args) throws
> IOException,Throwable{
> > > > >   // TODO Auto-generated method stub
> > > > >   conf = HBaseConfiguration.create();
> > > > >
> > > > >    HTable table = new HTable(conf,TEST_TABLE);
> > > > >  //  HTableDescriptor htd = table.getTableDescriptor();
> > > > >
> > > > >    Scan scan = new Scan();
> > > > >    Map<byte[], Long> results;
> > > > >
> > > > >    results = table.coprocessorExec(ColumnAggregationProtocol.class,
> > > > > "1".getBytes(),"5".getBytes(), new
> > > > Call<ColumnAggregationProtocol,Long>(){
> > > > >      public Long call(ColumnAggregationProtocol instance)throws
> > > > > IOException{
> > > > >        return (Long) instance.sum(TEST_FAMILY.getBytes(),
> > > > > TEST_QUALIFIER.getBytes());
> > > > >     }});
> > > > >
> > > > >    long sumResult = 0;
> > > > >    long expectedResult = 0;
> > > > >    for (Map.Entry<byte[], Long> e:results.entrySet()){
> > > > >     sumResult += e.getValue();
> > > > >    }
> > > > >    System.out.println(sumResult);
> > > > >  }
> > > > > }
> > > > > when i run it i get error
> > > > > Exception in thread "main"
> > > > > org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException:
> > > > > org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException: No
> > > > matching
> > > > > handler for protocol
> > > > > org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol in
> > region
> > > > > mytest,,1373597714844.e11ad2263faf89b5865ae98f524e3fb9.
> > > > >  at
> > > org.apache.hadoop.hbase.regionserver.HRegion.exec(HRegion.java:5463)
> > > > >  at
> > > > >
> > > > >
> > > >
> > >
> >
> org.apache.hadoop.hbase.regionserver.HRegionServer.execCoprocessor(HRegionServer.java:3720)
> > > > >  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> > > > >  at
> > > > >
> > > > >
> > > >
> > >
> >
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
> > > > >  at
> > > > >
> > > > >
> > > >
> > >
> >
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> > > > >  at java.lang.reflect.Method.invoke(Method.java:597)
> > > > >  at
> > > > >
> > > > >
> > > >
> > >
> >
> org.apache.hadoop.hbase.ipc.WritableRpcEngine$Server.call(WritableRpcEngine.java:320)
> > > > >  at
> > > > >
> > > >
> > >
> >
> org.apache.hadoop.hbase.ipc.HBaseServer$Handler.run(HBaseServer.java:1426)
> > > > >  at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> > > Method)
> > > > >  at sun.reflect.NativeConstructorAccessorImpl.newInstance(Unknown
> > > Source)
> > > > >  at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(Unknown
> > > > > Source)
> > > > >  at java.lang.reflect.Constructor.newInstance(Unknown Source)
> > > > >  at
> > > > >
> > > > >
> > > >
> >
>

Re: problem in testing coprocessor endpoint

Posted by Gary Helmling <gh...@gmail.com>.
Endpoint coprocessors can be loaded on a single table.  They are no
different from RegionObservers in this regard.  Both are instantiated per
region by RegionCoprocessorHost.  You should be able to load the
coprocessor by setting it as a table attribute.  If it doesn't seem to be
loading, check the region server logs after you re-enable the table where
you have added it.  Do you see any log messages from RegionCoprocessorHost?


On Fri, Jul 12, 2013 at 4:33 AM, Asaf Mesika <as...@gmail.com> wrote:

> You can't register and end point just for one table. It's like a stored
> procedure - you choose to run it and pass parameters to it.
>
> On Friday, July 12, 2013, ch huang wrote:
>
> > what your describe is how to load endpoint coprocessor for every region
> in
> > the hbase, what i want to do is just load it into my test table ,only for
> > the regions of the table
> >
> > On Fri, Jul 12, 2013 at 12:07 PM, Asaf Mesika <as...@gmail.com>
> > wrote:
> >
> > > The only way to register endpoint coprocessor jars is by placing them
> in
> > > lib dir if hbase and modifying hbase-site.xml to point to it under a
> > > property name I forgot at the moment.
> > > What you described is a way to register an Observer type coprocessor.
> > >
> > >
> > > On Friday, July 12, 2013, ch huang wrote:
> > >
> > > > i am testing coprocessor endpoint function, here is my testing
> process
> > > ,and
> > > > error i get ,hope any expert on coprocessor can help me out
> > > >
> > > >
> > > > # vi ColumnAggregationProtocol.java
> > > >
> > > > import java.io.IOException;
> > > > import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
> > > > // A sample protocol for performing aggregation at regions.
> > > > public interface ColumnAggregationProtocol
> > > > extends CoprocessorProtocol {
> > > > // Perform aggregation for a given column at the region. The
> > aggregation
> > > > // will include all the rows inside the region. It can be extended to
> > > > // allow passing start and end rows for a fine-grained aggregation.
> > > >    public long sum(byte[] family, byte[] qualifier) throws
> IOException;
> > > > }
> > > >
> > > >
> > > > # vi ColumnAggregationEndpoint.java
> > > >
> > > >
> > > > import java.io.FileWriter;
> > > > import java.io.IOException;
> > > > import java.util.ArrayList;
> > > > import java.util.List;
> > > > import org.apache.hadoop.hbase.CoprocessorEnvironment;
> > > > import org.apache.hadoop.hbase.KeyValue;
> > > > import org.apache.hadoop.hbase.client.Scan;
> > > > import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor;
> > > > import
> > org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
> > > > import org.apache.hadoop.hbase.ipc.ProtocolSignature;
> > > > import org.apache.hadoop.hbase.regionserver.HRegion;
> > > > import org.apache.hadoop.hbase.regionserver.InternalScanner;
> > > > import org.apache.hadoop.hbase.util.Bytes;
> > > >
> > > > //Aggregation implementation at a region.
> > > >
> > > > public class ColumnAggregationEndpoint extends
> BaseEndpointCoprocessor
> > > >   implements ColumnAggregationProtocol {
> > > >      @Override
> > > >      public long sum(byte[] family, byte[] qualifier)
> > > >      throws IOException {
> > > >        // aggregate at each region
> > > >          Scan scan = new Scan();
> > > >          scan.addColumn(family, qualifier);
> > > >          long sumResult = 0;
> > > >
> > > >          CoprocessorEnvironment ce = getEnvironment();
> > > >          HRegion hr = ((RegionCoprocessorEnvironment)ce).getRegion();
> > > >          InternalScanner scanner = hr.getScanner(scan);
> > > >
> > > >          try {
> > > >            List<KeyValue> curVals = new ArrayList<KeyValue>();
> > > >            boolean hasMore = false;
> > > >            do {
> > > >          curVals.clear();
> > > >          hasMore = scanner.next(curVals);
> > > >          KeyValue kv = curVals.get(0);
> > > >          sumResult += Long.parseLong(Bytes.toString(kv.getValue()));
> > > >
> > > >            } while (hasMore);
> > > >          } finally {
> > > >              scanner.close();
> > > >          }
> > > >          return sumResult;
> > > >       }
> > > >
> > > >       @Override
> > > >       public long getProtocolVersion(String protocol, long
> > clientVersion)
> > > >              throws IOException {
> > > >          // TODO Auto-generated method stub
> > > >          return 0;
> > > >       }
> > > >
> > > > > 192.168.10.22:9000/alex/test.jar|ColumnAggregationEndpoint|1001<
> >
> http://192.168.10.22:9000/alex/test.jar%7CColumnAggregationEndpoint%7C1001
> > >
> > > '
> > > >
> > > > here is my testing java code
> > > >
> > > > package com.testme.demo;
> > > > import java.io.IOException;
> > > > import java.util.Map;
> > > > import org.apache.hadoop.conf.Configuration;
> > > > import org.apache.hadoop.hbase.HBaseConfiguration;
> > > > import org.apache.hadoop.hbase.HTableDescriptor;
> > > > import org.apache.hadoop.hbase.client.*;
> > > > import org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol;
> > > > import org.apache.hadoop.hbase.util.*;
> > > > import org.apache.hadoop.hbase.client.coprocessor.Batch.Call;;
> > > >
> > > > public class TestCop {
> > > >    private static Configuration conf =null;
> > > >    private static String TEST_TABLE = "mytest";
> > > >    private static String TEST_FAMILY = "myfl";
> > > >    private static String TEST_QUALIFIER = "myqf";
> > > >  /**
> > > >   * @param args
> > > >   */
> > > >    static {
> > > >           conf = HBaseConfiguration.create();
> > > >           conf.addResource( "hbase-site.xml");
> > > >    }
> > > >
> > > >  public static void main(String[] args) throws IOException,Throwable{
> > > >   // TODO Auto-generated method stub
> > > >   conf = HBaseConfiguration.create();
> > > >
> > > >    HTable table = new HTable(conf,TEST_TABLE);
> > > >  //  HTableDescriptor htd = table.getTableDescriptor();
> > > >
> > > >    Scan scan = new Scan();
> > > >    Map<byte[], Long> results;
> > > >
> > > >    results = table.coprocessorExec(ColumnAggregationProtocol.class,
> > > > "1".getBytes(),"5".getBytes(), new
> > > Call<ColumnAggregationProtocol,Long>(){
> > > >      public Long call(ColumnAggregationProtocol instance)throws
> > > > IOException{
> > > >        return (Long) instance.sum(TEST_FAMILY.getBytes(),
> > > > TEST_QUALIFIER.getBytes());
> > > >     }});
> > > >
> > > >    long sumResult = 0;
> > > >    long expectedResult = 0;
> > > >    for (Map.Entry<byte[], Long> e:results.entrySet()){
> > > >     sumResult += e.getValue();
> > > >    }
> > > >    System.out.println(sumResult);
> > > >  }
> > > > }
> > > > when i run it i get error
> > > > Exception in thread "main"
> > > > org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException:
> > > > org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException: No
> > > matching
> > > > handler for protocol
> > > > org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol in
> region
> > > > mytest,,1373597714844.e11ad2263faf89b5865ae98f524e3fb9.
> > > >  at
> > org.apache.hadoop.hbase.regionserver.HRegion.exec(HRegion.java:5463)
> > > >  at
> > > >
> > > >
> > >
> >
> org.apache.hadoop.hbase.regionserver.HRegionServer.execCoprocessor(HRegionServer.java:3720)
> > > >  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> > > >  at
> > > >
> > > >
> > >
> >
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
> > > >  at
> > > >
> > > >
> > >
> >
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> > > >  at java.lang.reflect.Method.invoke(Method.java:597)
> > > >  at
> > > >
> > > >
> > >
> >
> org.apache.hadoop.hbase.ipc.WritableRpcEngine$Server.call(WritableRpcEngine.java:320)
> > > >  at
> > > >
> > >
> >
> org.apache.hadoop.hbase.ipc.HBaseServer$Handler.run(HBaseServer.java:1426)
> > > >  at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> > Method)
> > > >  at sun.reflect.NativeConstructorAccessorImpl.newInstance(Unknown
> > Source)
> > > >  at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(Unknown
> > > > Source)
> > > >  at java.lang.reflect.Constructor.newInstance(Unknown Source)
> > > >  at
> > > >
> > > >
> > >
>

Re: problem in testing coprocessor endpoint

Posted by Asaf Mesika <as...@gmail.com>.
You can't register and end point just for one table. It's like a stored
procedure - you choose to run it and pass parameters to it.

On Friday, July 12, 2013, ch huang wrote:

> what your describe is how to load endpoint coprocessor for every region in
> the hbase, what i want to do is just load it into my test table ,only for
> the regions of the table
>
> On Fri, Jul 12, 2013 at 12:07 PM, Asaf Mesika <as...@gmail.com>
> wrote:
>
> > The only way to register endpoint coprocessor jars is by placing them in
> > lib dir if hbase and modifying hbase-site.xml to point to it under a
> > property name I forgot at the moment.
> > What you described is a way to register an Observer type coprocessor.
> >
> >
> > On Friday, July 12, 2013, ch huang wrote:
> >
> > > i am testing coprocessor endpoint function, here is my testing process
> > ,and
> > > error i get ,hope any expert on coprocessor can help me out
> > >
> > >
> > > # vi ColumnAggregationProtocol.java
> > >
> > > import java.io.IOException;
> > > import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
> > > // A sample protocol for performing aggregation at regions.
> > > public interface ColumnAggregationProtocol
> > > extends CoprocessorProtocol {
> > > // Perform aggregation for a given column at the region. The
> aggregation
> > > // will include all the rows inside the region. It can be extended to
> > > // allow passing start and end rows for a fine-grained aggregation.
> > >    public long sum(byte[] family, byte[] qualifier) throws IOException;
> > > }
> > >
> > >
> > > # vi ColumnAggregationEndpoint.java
> > >
> > >
> > > import java.io.FileWriter;
> > > import java.io.IOException;
> > > import java.util.ArrayList;
> > > import java.util.List;
> > > import org.apache.hadoop.hbase.CoprocessorEnvironment;
> > > import org.apache.hadoop.hbase.KeyValue;
> > > import org.apache.hadoop.hbase.client.Scan;
> > > import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor;
> > > import
> org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
> > > import org.apache.hadoop.hbase.ipc.ProtocolSignature;
> > > import org.apache.hadoop.hbase.regionserver.HRegion;
> > > import org.apache.hadoop.hbase.regionserver.InternalScanner;
> > > import org.apache.hadoop.hbase.util.Bytes;
> > >
> > > //Aggregation implementation at a region.
> > >
> > > public class ColumnAggregationEndpoint extends BaseEndpointCoprocessor
> > >   implements ColumnAggregationProtocol {
> > >      @Override
> > >      public long sum(byte[] family, byte[] qualifier)
> > >      throws IOException {
> > >        // aggregate at each region
> > >          Scan scan = new Scan();
> > >          scan.addColumn(family, qualifier);
> > >          long sumResult = 0;
> > >
> > >          CoprocessorEnvironment ce = getEnvironment();
> > >          HRegion hr = ((RegionCoprocessorEnvironment)ce).getRegion();
> > >          InternalScanner scanner = hr.getScanner(scan);
> > >
> > >          try {
> > >            List<KeyValue> curVals = new ArrayList<KeyValue>();
> > >            boolean hasMore = false;
> > >            do {
> > >          curVals.clear();
> > >          hasMore = scanner.next(curVals);
> > >          KeyValue kv = curVals.get(0);
> > >          sumResult += Long.parseLong(Bytes.toString(kv.getValue()));
> > >
> > >            } while (hasMore);
> > >          } finally {
> > >              scanner.close();
> > >          }
> > >          return sumResult;
> > >       }
> > >
> > >       @Override
> > >       public long getProtocolVersion(String protocol, long
> clientVersion)
> > >              throws IOException {
> > >          // TODO Auto-generated method stub
> > >          return 0;
> > >       }
> > >
> > > > 192.168.10.22:9000/alex/test.jar|ColumnAggregationEndpoint|1001<
> http://192.168.10.22:9000/alex/test.jar%7CColumnAggregationEndpoint%7C1001
> >
> > '
> > >
> > > here is my testing java code
> > >
> > > package com.testme.demo;
> > > import java.io.IOException;
> > > import java.util.Map;
> > > import org.apache.hadoop.conf.Configuration;
> > > import org.apache.hadoop.hbase.HBaseConfiguration;
> > > import org.apache.hadoop.hbase.HTableDescriptor;
> > > import org.apache.hadoop.hbase.client.*;
> > > import org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol;
> > > import org.apache.hadoop.hbase.util.*;
> > > import org.apache.hadoop.hbase.client.coprocessor.Batch.Call;;
> > >
> > > public class TestCop {
> > >    private static Configuration conf =null;
> > >    private static String TEST_TABLE = "mytest";
> > >    private static String TEST_FAMILY = "myfl";
> > >    private static String TEST_QUALIFIER = "myqf";
> > >  /**
> > >   * @param args
> > >   */
> > >    static {
> > >           conf = HBaseConfiguration.create();
> > >           conf.addResource( "hbase-site.xml");
> > >    }
> > >
> > >  public static void main(String[] args) throws IOException,Throwable{
> > >   // TODO Auto-generated method stub
> > >   conf = HBaseConfiguration.create();
> > >
> > >    HTable table = new HTable(conf,TEST_TABLE);
> > >  //  HTableDescriptor htd = table.getTableDescriptor();
> > >
> > >    Scan scan = new Scan();
> > >    Map<byte[], Long> results;
> > >
> > >    results = table.coprocessorExec(ColumnAggregationProtocol.class,
> > > "1".getBytes(),"5".getBytes(), new
> > Call<ColumnAggregationProtocol,Long>(){
> > >      public Long call(ColumnAggregationProtocol instance)throws
> > > IOException{
> > >        return (Long) instance.sum(TEST_FAMILY.getBytes(),
> > > TEST_QUALIFIER.getBytes());
> > >     }});
> > >
> > >    long sumResult = 0;
> > >    long expectedResult = 0;
> > >    for (Map.Entry<byte[], Long> e:results.entrySet()){
> > >     sumResult += e.getValue();
> > >    }
> > >    System.out.println(sumResult);
> > >  }
> > > }
> > > when i run it i get error
> > > Exception in thread "main"
> > > org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException:
> > > org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException: No
> > matching
> > > handler for protocol
> > > org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol in region
> > > mytest,,1373597714844.e11ad2263faf89b5865ae98f524e3fb9.
> > >  at
> org.apache.hadoop.hbase.regionserver.HRegion.exec(HRegion.java:5463)
> > >  at
> > >
> > >
> >
> org.apache.hadoop.hbase.regionserver.HRegionServer.execCoprocessor(HRegionServer.java:3720)
> > >  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> > >  at
> > >
> > >
> >
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
> > >  at
> > >
> > >
> >
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> > >  at java.lang.reflect.Method.invoke(Method.java:597)
> > >  at
> > >
> > >
> >
> org.apache.hadoop.hbase.ipc.WritableRpcEngine$Server.call(WritableRpcEngine.java:320)
> > >  at
> > >
> >
> org.apache.hadoop.hbase.ipc.HBaseServer$Handler.run(HBaseServer.java:1426)
> > >  at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native
> Method)
> > >  at sun.reflect.NativeConstructorAccessorImpl.newInstance(Unknown
> Source)
> > >  at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(Unknown
> > > Source)
> > >  at java.lang.reflect.Constructor.newInstance(Unknown Source)
> > >  at
> > >
> > >
> >

Re: problem in testing coprocessor endpoint

Posted by ch huang <ju...@gmail.com>.
what your describe is how to load endpoint coprocessor for every region in
the hbase, what i want to do is just load it into my test table ,only for
the regions of the table

On Fri, Jul 12, 2013 at 12:07 PM, Asaf Mesika <as...@gmail.com> wrote:

> The only way to register endpoint coprocessor jars is by placing them in
> lib dir if hbase and modifying hbase-site.xml to point to it under a
> property name I forgot at the moment.
> What you described is a way to register an Observer type coprocessor.
>
>
> On Friday, July 12, 2013, ch huang wrote:
>
> > i am testing coprocessor endpoint function, here is my testing process
> ,and
> > error i get ,hope any expert on coprocessor can help me out
> >
> >
> > # vi ColumnAggregationProtocol.java
> >
> > import java.io.IOException;
> > import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
> > // A sample protocol for performing aggregation at regions.
> > public interface ColumnAggregationProtocol
> > extends CoprocessorProtocol {
> > // Perform aggregation for a given column at the region. The aggregation
> > // will include all the rows inside the region. It can be extended to
> > // allow passing start and end rows for a fine-grained aggregation.
> >    public long sum(byte[] family, byte[] qualifier) throws IOException;
> > }
> >
> >
> > # vi ColumnAggregationEndpoint.java
> >
> >
> > import java.io.FileWriter;
> > import java.io.IOException;
> > import java.util.ArrayList;
> > import java.util.List;
> > import org.apache.hadoop.hbase.CoprocessorEnvironment;
> > import org.apache.hadoop.hbase.KeyValue;
> > import org.apache.hadoop.hbase.client.Scan;
> > import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor;
> > import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
> > import org.apache.hadoop.hbase.ipc.ProtocolSignature;
> > import org.apache.hadoop.hbase.regionserver.HRegion;
> > import org.apache.hadoop.hbase.regionserver.InternalScanner;
> > import org.apache.hadoop.hbase.util.Bytes;
> >
> > //Aggregation implementation at a region.
> >
> > public class ColumnAggregationEndpoint extends BaseEndpointCoprocessor
> >   implements ColumnAggregationProtocol {
> >      @Override
> >      public long sum(byte[] family, byte[] qualifier)
> >      throws IOException {
> >        // aggregate at each region
> >          Scan scan = new Scan();
> >          scan.addColumn(family, qualifier);
> >          long sumResult = 0;
> >
> >          CoprocessorEnvironment ce = getEnvironment();
> >          HRegion hr = ((RegionCoprocessorEnvironment)ce).getRegion();
> >          InternalScanner scanner = hr.getScanner(scan);
> >
> >          try {
> >            List<KeyValue> curVals = new ArrayList<KeyValue>();
> >            boolean hasMore = false;
> >            do {
> >          curVals.clear();
> >          hasMore = scanner.next(curVals);
> >          KeyValue kv = curVals.get(0);
> >          sumResult += Long.parseLong(Bytes.toString(kv.getValue()));
> >
> >            } while (hasMore);
> >          } finally {
> >              scanner.close();
> >          }
> >          return sumResult;
> >       }
> >
> >       @Override
> >       public long getProtocolVersion(String protocol, long clientVersion)
> >              throws IOException {
> >          // TODO Auto-generated method stub
> >          return 0;
> >       }
> >
> >       @Override
> >
> >       public ProtocolSignature getProtocolSignature(String protocol,
> >              long clientVersion, int clientMethodsHash) throws
> IOException
> > {
> >           // TODO Auto-generated method stub
> >           return null;
> >       }
> > }
> >
> > i compile and pack the two into test.jar,and put it into my HDFS
> filesystem
> >
> > and load it into my test table
> >
> > hbase(main):006:0> alter 'mytest', METHOD =>
> > 'table_att','coprocessor'=>'hdfs:///
> > 192.168.10.22:9000/alex/test.jar|ColumnAggregationEndpoint|1001<http://192.168.10.22:9000/alex/test.jar%7CColumnAggregationEndpoint%7C1001>
> '
> >
> > here is my testing java code
> >
> > package com.testme.demo;
> > import java.io.IOException;
> > import java.util.Map;
> > import org.apache.hadoop.conf.Configuration;
> > import org.apache.hadoop.hbase.HBaseConfiguration;
> > import org.apache.hadoop.hbase.HTableDescriptor;
> > import org.apache.hadoop.hbase.client.*;
> > import org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol;
> > import org.apache.hadoop.hbase.util.*;
> > import org.apache.hadoop.hbase.client.coprocessor.Batch.Call;;
> >
> > public class TestCop {
> >    private static Configuration conf =null;
> >    private static String TEST_TABLE = "mytest";
> >    private static String TEST_FAMILY = "myfl";
> >    private static String TEST_QUALIFIER = "myqf";
> >  /**
> >   * @param args
> >   */
> >    static {
> >           conf = HBaseConfiguration.create();
> >           conf.addResource( "hbase-site.xml");
> >    }
> >
> >  public static void main(String[] args) throws IOException,Throwable{
> >   // TODO Auto-generated method stub
> >   conf = HBaseConfiguration.create();
> >
> >    HTable table = new HTable(conf,TEST_TABLE);
> >  //  HTableDescriptor htd = table.getTableDescriptor();
> >
> >    Scan scan = new Scan();
> >    Map<byte[], Long> results;
> >
> >    results = table.coprocessorExec(ColumnAggregationProtocol.class,
> > "1".getBytes(),"5".getBytes(), new
> Call<ColumnAggregationProtocol,Long>(){
> >      public Long call(ColumnAggregationProtocol instance)throws
> > IOException{
> >        return (Long) instance.sum(TEST_FAMILY.getBytes(),
> > TEST_QUALIFIER.getBytes());
> >     }});
> >
> >    long sumResult = 0;
> >    long expectedResult = 0;
> >    for (Map.Entry<byte[], Long> e:results.entrySet()){
> >     sumResult += e.getValue();
> >    }
> >    System.out.println(sumResult);
> >  }
> > }
> > when i run it i get error
> > Exception in thread "main"
> > org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException:
> > org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException: No
> matching
> > handler for protocol
> > org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol in region
> > mytest,,1373597714844.e11ad2263faf89b5865ae98f524e3fb9.
> >  at org.apache.hadoop.hbase.regionserver.HRegion.exec(HRegion.java:5463)
> >  at
> >
> >
> org.apache.hadoop.hbase.regionserver.HRegionServer.execCoprocessor(HRegionServer.java:3720)
> >  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> >  at
> >
> >
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
> >  at
> >
> >
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> >  at java.lang.reflect.Method.invoke(Method.java:597)
> >  at
> >
> >
> org.apache.hadoop.hbase.ipc.WritableRpcEngine$Server.call(WritableRpcEngine.java:320)
> >  at
> >
> org.apache.hadoop.hbase.ipc.HBaseServer$Handler.run(HBaseServer.java:1426)
> >  at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> >  at sun.reflect.NativeConstructorAccessorImpl.newInstance(Unknown Source)
> >  at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(Unknown
> > Source)
> >  at java.lang.reflect.Constructor.newInstance(Unknown Source)
> >  at
> >
> >
> org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:90)
> >  at
> >
> >
> org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:79)
> >  at
> >
> >
> org.apache.hadoop.hbase.client.ServerCallable.translateException(ServerCallable.java:228)
> >  at
> >
> >
> org.apache.hadoop.hbase.client.ServerCallable.withRetries(ServerCallable.java:166)
> >  at
> > org.apache.hadoop.hbase.ipc.ExecRPCInvoker.invoke(ExecRPCInvoker.java:79)
> >  at com.sun.proxy.$Proxy8.sum(Unknown Source)
> >  at com.testme.demo.TestCop$1.call(TestCop.java:41)
> >  at com.testme.demo.TestCop$1.call(TestCop.java:1)
> >  at
> >
> >
> org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation$4.call(HConnectionManager.java:1466)
> >  at java.util.concurrent.FutureTask$Sync.innerRun(Unknown Source)
> >  at java.util.concurrent.FutureTask.run(Unknown Source)
> >  at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
> >  at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
> >  at java.lang.Thread.run(Unknown Source)
> > Caused by:
> >
> >
> org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException):
> > org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException: No
> matching
> > handler for protocol
> > org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol in region
> > mytest,,1373597714844.e11ad2263faf89b5865ae98f524e3fb9.
> >  at org.apache.hadoop.hbase.regionserver.HRegion.exec(HRegion.java:5463)
> >  at
> >
> >
> org.apache.hadoop.hbase.regionserver.HRegionServer.execCoprocessor(HRegionServer.java:3720)
> >  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> >  at
> >
> >
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
> >  at
> >
> >
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> >  at java.lang.reflect.Method.invoke(Method.java:597)
> >  at
> >
> >
> org.apache.hadoop.hbase.ipc.WritableRpcEngine$Server.call(WritableRpcEngine.java:320)
> >  at
> >
> org.apache.hadoop.hbase.ipc.HBaseServer$Handler.run(HBaseServer.java:1426)
> >  at org.apache.hadoop.hbase.ipc.HBaseClient.call(HBaseClient.java:995)
> >  at
> >
> >
> org.apache.hadoop.hbase.ipc.WritableRpcEngine$Invoker.invoke(WritableRpcEngine.java:86)
> >  at com.sun.proxy.$Proxy7.execCoprocessor(Unknown Source)
> >  at
> > org.apache.hadoop.hbase.ipc.ExecRPCInvoker$1.call(ExecRPCInvoker.java:75)
> >  at
> > org.apache.hadoop.hbase.ipc.ExecRPCInvoker$1.call(ExecRPCInvoker.java:73)
> >  at
> >
> >
> org.apache.hadoop.hbase.client.ServerCallable.withRetries(ServerCallable.java:163)
> >  ... 10 more
> >
> > hbase(main):020:0> describe
> > 'mytest'
> > DESCRIPTION                                          ENABLED
> >  {NAME => 'mytest', coprocessor$1 => 'hdfs:///192.16 true
> >  8.10.22:9000/alex/test.jar|ColumnAggregationEndpoin
> >  t|1001', FAMILIES => [{NAME => 'myfl'
> >  , DATA_BLOCK_ENCODING => 'NONE', BLOOMFILTER => 'NO
> >  NE', REPLICATION_SCOPE => '0', VERSIONS => '3', COM
> >  PRESSION => 'NONE', MIN_VERSIONS => '0', TTL => '21
> >  47483647', KEEP_DELETED_CELLS => 'false', BLOCKSIZE
> >   => '65536', IN_MEMORY => 'false', ENCODE_ON_DISK =
> >  > 'true', BLOCKCACHE => 'true'}]}
> > 1 row(s) in 0.0920 seconds
> >
>

Re: problem in testing coprocessor endpoint

Posted by Asaf Mesika <as...@gmail.com>.
The only way to register endpoint coprocessor jars is by placing them in
lib dir if hbase and modifying hbase-site.xml to point to it under a
property name I forgot at the moment.
What you described is a way to register an Observer type coprocessor.


On Friday, July 12, 2013, ch huang wrote:

> i am testing coprocessor endpoint function, here is my testing process ,and
> error i get ,hope any expert on coprocessor can help me out
>
>
> # vi ColumnAggregationProtocol.java
>
> import java.io.IOException;
> import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
> // A sample protocol for performing aggregation at regions.
> public interface ColumnAggregationProtocol
> extends CoprocessorProtocol {
> // Perform aggregation for a given column at the region. The aggregation
> // will include all the rows inside the region. It can be extended to
> // allow passing start and end rows for a fine-grained aggregation.
>    public long sum(byte[] family, byte[] qualifier) throws IOException;
> }
>
>
> # vi ColumnAggregationEndpoint.java
>
>
> import java.io.FileWriter;
> import java.io.IOException;
> import java.util.ArrayList;
> import java.util.List;
> import org.apache.hadoop.hbase.CoprocessorEnvironment;
> import org.apache.hadoop.hbase.KeyValue;
> import org.apache.hadoop.hbase.client.Scan;
> import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor;
> import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
> import org.apache.hadoop.hbase.ipc.ProtocolSignature;
> import org.apache.hadoop.hbase.regionserver.HRegion;
> import org.apache.hadoop.hbase.regionserver.InternalScanner;
> import org.apache.hadoop.hbase.util.Bytes;
>
> //Aggregation implementation at a region.
>
> public class ColumnAggregationEndpoint extends BaseEndpointCoprocessor
>   implements ColumnAggregationProtocol {
>      @Override
>      public long sum(byte[] family, byte[] qualifier)
>      throws IOException {
>        // aggregate at each region
>          Scan scan = new Scan();
>          scan.addColumn(family, qualifier);
>          long sumResult = 0;
>
>          CoprocessorEnvironment ce = getEnvironment();
>          HRegion hr = ((RegionCoprocessorEnvironment)ce).getRegion();
>          InternalScanner scanner = hr.getScanner(scan);
>
>          try {
>            List<KeyValue> curVals = new ArrayList<KeyValue>();
>            boolean hasMore = false;
>            do {
>          curVals.clear();
>          hasMore = scanner.next(curVals);
>          KeyValue kv = curVals.get(0);
>          sumResult += Long.parseLong(Bytes.toString(kv.getValue()));
>
>            } while (hasMore);
>          } finally {
>              scanner.close();
>          }
>          return sumResult;
>       }
>
>       @Override
>       public long getProtocolVersion(String protocol, long clientVersion)
>              throws IOException {
>          // TODO Auto-generated method stub
>          return 0;
>       }
>
>       @Override
>
>       public ProtocolSignature getProtocolSignature(String protocol,
>              long clientVersion, int clientMethodsHash) throws IOException
> {
>           // TODO Auto-generated method stub
>           return null;
>       }
> }
>
> i compile and pack the two into test.jar,and put it into my HDFS filesystem
>
> and load it into my test table
>
> hbase(main):006:0> alter 'mytest', METHOD =>
> 'table_att','coprocessor'=>'hdfs:///
> 192.168.10.22:9000/alex/test.jar|ColumnAggregationEndpoint|1001'
>
> here is my testing java code
>
> package com.testme.demo;
> import java.io.IOException;
> import java.util.Map;
> import org.apache.hadoop.conf.Configuration;
> import org.apache.hadoop.hbase.HBaseConfiguration;
> import org.apache.hadoop.hbase.HTableDescriptor;
> import org.apache.hadoop.hbase.client.*;
> import org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol;
> import org.apache.hadoop.hbase.util.*;
> import org.apache.hadoop.hbase.client.coprocessor.Batch.Call;;
>
> public class TestCop {
>    private static Configuration conf =null;
>    private static String TEST_TABLE = "mytest";
>    private static String TEST_FAMILY = "myfl";
>    private static String TEST_QUALIFIER = "myqf";
>  /**
>   * @param args
>   */
>    static {
>           conf = HBaseConfiguration.create();
>           conf.addResource( "hbase-site.xml");
>    }
>
>  public static void main(String[] args) throws IOException,Throwable{
>   // TODO Auto-generated method stub
>   conf = HBaseConfiguration.create();
>
>    HTable table = new HTable(conf,TEST_TABLE);
>  //  HTableDescriptor htd = table.getTableDescriptor();
>
>    Scan scan = new Scan();
>    Map<byte[], Long> results;
>
>    results = table.coprocessorExec(ColumnAggregationProtocol.class,
> "1".getBytes(),"5".getBytes(), new Call<ColumnAggregationProtocol,Long>(){
>      public Long call(ColumnAggregationProtocol instance)throws
> IOException{
>        return (Long) instance.sum(TEST_FAMILY.getBytes(),
> TEST_QUALIFIER.getBytes());
>     }});
>
>    long sumResult = 0;
>    long expectedResult = 0;
>    for (Map.Entry<byte[], Long> e:results.entrySet()){
>     sumResult += e.getValue();
>    }
>    System.out.println(sumResult);
>  }
> }
> when i run it i get error
> Exception in thread "main"
> org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException:
> org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException: No matching
> handler for protocol
> org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol in region
> mytest,,1373597714844.e11ad2263faf89b5865ae98f524e3fb9.
>  at org.apache.hadoop.hbase.regionserver.HRegion.exec(HRegion.java:5463)
>  at
>
> org.apache.hadoop.hbase.regionserver.HRegionServer.execCoprocessor(HRegionServer.java:3720)
>  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>  at
>
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
>  at
>
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
>  at java.lang.reflect.Method.invoke(Method.java:597)
>  at
>
> org.apache.hadoop.hbase.ipc.WritableRpcEngine$Server.call(WritableRpcEngine.java:320)
>  at
> org.apache.hadoop.hbase.ipc.HBaseServer$Handler.run(HBaseServer.java:1426)
>  at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>  at sun.reflect.NativeConstructorAccessorImpl.newInstance(Unknown Source)
>  at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(Unknown
> Source)
>  at java.lang.reflect.Constructor.newInstance(Unknown Source)
>  at
>
> org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:90)
>  at
>
> org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:79)
>  at
>
> org.apache.hadoop.hbase.client.ServerCallable.translateException(ServerCallable.java:228)
>  at
>
> org.apache.hadoop.hbase.client.ServerCallable.withRetries(ServerCallable.java:166)
>  at
> org.apache.hadoop.hbase.ipc.ExecRPCInvoker.invoke(ExecRPCInvoker.java:79)
>  at com.sun.proxy.$Proxy8.sum(Unknown Source)
>  at com.testme.demo.TestCop$1.call(TestCop.java:41)
>  at com.testme.demo.TestCop$1.call(TestCop.java:1)
>  at
>
> org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation$4.call(HConnectionManager.java:1466)
>  at java.util.concurrent.FutureTask$Sync.innerRun(Unknown Source)
>  at java.util.concurrent.FutureTask.run(Unknown Source)
>  at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
>  at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
>  at java.lang.Thread.run(Unknown Source)
> Caused by:
>
> org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException):
> org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException: No matching
> handler for protocol
> org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol in region
> mytest,,1373597714844.e11ad2263faf89b5865ae98f524e3fb9.
>  at org.apache.hadoop.hbase.regionserver.HRegion.exec(HRegion.java:5463)
>  at
>
> org.apache.hadoop.hbase.regionserver.HRegionServer.execCoprocessor(HRegionServer.java:3720)
>  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>  at
>
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
>  at
>
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
>  at java.lang.reflect.Method.invoke(Method.java:597)
>  at
>
> org.apache.hadoop.hbase.ipc.WritableRpcEngine$Server.call(WritableRpcEngine.java:320)
>  at
> org.apache.hadoop.hbase.ipc.HBaseServer$Handler.run(HBaseServer.java:1426)
>  at org.apache.hadoop.hbase.ipc.HBaseClient.call(HBaseClient.java:995)
>  at
>
> org.apache.hadoop.hbase.ipc.WritableRpcEngine$Invoker.invoke(WritableRpcEngine.java:86)
>  at com.sun.proxy.$Proxy7.execCoprocessor(Unknown Source)
>  at
> org.apache.hadoop.hbase.ipc.ExecRPCInvoker$1.call(ExecRPCInvoker.java:75)
>  at
> org.apache.hadoop.hbase.ipc.ExecRPCInvoker$1.call(ExecRPCInvoker.java:73)
>  at
>
> org.apache.hadoop.hbase.client.ServerCallable.withRetries(ServerCallable.java:163)
>  ... 10 more
>
> hbase(main):020:0> describe
> 'mytest'
> DESCRIPTION                                          ENABLED
>  {NAME => 'mytest', coprocessor$1 => 'hdfs:///192.16 true
>  8.10.22:9000/alex/test.jar|ColumnAggregationEndpoin
>  t|1001', FAMILIES => [{NAME => 'myfl'
>  , DATA_BLOCK_ENCODING => 'NONE', BLOOMFILTER => 'NO
>  NE', REPLICATION_SCOPE => '0', VERSIONS => '3', COM
>  PRESSION => 'NONE', MIN_VERSIONS => '0', TTL => '21
>  47483647', KEEP_DELETED_CELLS => 'false', BLOCKSIZE
>   => '65536', IN_MEMORY => 'false', ENCODE_ON_DISK =
>  > 'true', BLOCKCACHE => 'true'}]}
> 1 row(s) in 0.0920 seconds
>

Re: problem in testing coprocessor endpoint

Posted by ch huang <ju...@gmail.com>.
thanks,but why the test code can not run properly?

On Fri, Jul 12, 2013 at 11:56 AM, Ted Yu <yu...@gmail.com> wrote:

> In 0.94, we already have:
>
> public class ColumnAggregationEndpoint extends BaseEndpointCoprocessor
> implements ColumnAggregationProtocol {
>
>   @Override
>   public long sum(byte[] family, byte[] qualifier)
>
> What additional functionality do you need ?
>
> On Thu, Jul 11, 2013 at 8:26 PM, ch huang <ju...@gmail.com> wrote:
>
> > i am testing coprocessor endpoint function, here is my testing process
> ,and
> > error i get ,hope any expert on coprocessor can help me out
> >
> >
> > # vi ColumnAggregationProtocol.java
> >
> > import java.io.IOException;
> > import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
> > // A sample protocol for performing aggregation at regions.
> > public interface ColumnAggregationProtocol
> > extends CoprocessorProtocol {
> > // Perform aggregation for a given column at the region. The aggregation
> > // will include all the rows inside the region. It can be extended to
> > // allow passing start and end rows for a fine-grained aggregation.
> >    public long sum(byte[] family, byte[] qualifier) throws IOException;
> > }
> >
> >
> > # vi ColumnAggregationEndpoint.java
> >
> >
> > import java.io.FileWriter;
> > import java.io.IOException;
> > import java.util.ArrayList;
> > import java.util.List;
> > import org.apache.hadoop.hbase.CoprocessorEnvironment;
> > import org.apache.hadoop.hbase.KeyValue;
> > import org.apache.hadoop.hbase.client.Scan;
> > import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor;
> > import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
> > import org.apache.hadoop.hbase.ipc.ProtocolSignature;
> > import org.apache.hadoop.hbase.regionserver.HRegion;
> > import org.apache.hadoop.hbase.regionserver.InternalScanner;
> > import org.apache.hadoop.hbase.util.Bytes;
> >
> > //Aggregation implementation at a region.
> >
> > public class ColumnAggregationEndpoint extends BaseEndpointCoprocessor
> >   implements ColumnAggregationProtocol {
> >      @Override
> >      public long sum(byte[] family, byte[] qualifier)
> >      throws IOException {
> >        // aggregate at each region
> >          Scan scan = new Scan();
> >          scan.addColumn(family, qualifier);
> >          long sumResult = 0;
> >
> >          CoprocessorEnvironment ce = getEnvironment();
> >          HRegion hr = ((RegionCoprocessorEnvironment)ce).getRegion();
> >          InternalScanner scanner = hr.getScanner(scan);
> >
> >          try {
> >            List<KeyValue> curVals = new ArrayList<KeyValue>();
> >            boolean hasMore = false;
> >            do {
> >          curVals.clear();
> >          hasMore = scanner.next(curVals);
> >          KeyValue kv = curVals.get(0);
> >          sumResult += Long.parseLong(Bytes.toString(kv.getValue()));
> >
> >            } while (hasMore);
> >          } finally {
> >              scanner.close();
> >          }
> >          return sumResult;
> >       }
> >
> >       @Override
> >       public long getProtocolVersion(String protocol, long clientVersion)
> >              throws IOException {
> >          // TODO Auto-generated method stub
> >          return 0;
> >       }
> >
> >       @Override
> >
> >       public ProtocolSignature getProtocolSignature(String protocol,
> >              long clientVersion, int clientMethodsHash) throws
> IOException
> > {
> >           // TODO Auto-generated method stub
> >           return null;
> >       }
> > }
> >
> > i compile and pack the two into test.jar,and put it into my HDFS
> filesystem
> >
> > and load it into my test table
> >
> > hbase(main):006:0> alter 'mytest', METHOD =>
> > 'table_att','coprocessor'=>'hdfs:///
> > 192.168.10.22:9000/alex/test.jar|ColumnAggregationEndpoint|1001<http://192.168.10.22:9000/alex/test.jar%7CColumnAggregationEndpoint%7C1001>
> '
> >
> > here is my testing java code
> >
> > package com.testme.demo;
> > import java.io.IOException;
> > import java.util.Map;
> > import org.apache.hadoop.conf.Configuration;
> > import org.apache.hadoop.hbase.HBaseConfiguration;
> > import org.apache.hadoop.hbase.HTableDescriptor;
> > import org.apache.hadoop.hbase.client.*;
> > import org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol;
> > import org.apache.hadoop.hbase.util.*;
> > import org.apache.hadoop.hbase.client.coprocessor.Batch.Call;;
> >
> > public class TestCop {
> >    private static Configuration conf =null;
> >    private static String TEST_TABLE = "mytest";
> >    private static String TEST_FAMILY = "myfl";
> >    private static String TEST_QUALIFIER = "myqf";
> >  /**
> >   * @param args
> >   */
> >    static {
> >           conf = HBaseConfiguration.create();
> >           conf.addResource( "hbase-site.xml");
> >    }
> >
> >  public static void main(String[] args) throws IOException,Throwable{
> >   // TODO Auto-generated method stub
> >   conf = HBaseConfiguration.create();
> >
> >    HTable table = new HTable(conf,TEST_TABLE);
> >  //  HTableDescriptor htd = table.getTableDescriptor();
> >
> >    Scan scan = new Scan();
> >    Map<byte[], Long> results;
> >
> >    results = table.coprocessorExec(ColumnAggregationProtocol.class,
> > "1".getBytes(),"5".getBytes(), new
> Call<ColumnAggregationProtocol,Long>(){
> >      public Long call(ColumnAggregationProtocol instance)throws
> > IOException{
> >        return (Long) instance.sum(TEST_FAMILY.getBytes(),
> > TEST_QUALIFIER.getBytes());
> >     }});
> >
> >    long sumResult = 0;
> >    long expectedResult = 0;
> >    for (Map.Entry<byte[], Long> e:results.entrySet()){
> >     sumResult += e.getValue();
> >    }
> >    System.out.println(sumResult);
> >  }
> > }
> > when i run it i get error
> > Exception in thread "main"
> > org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException:
> > org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException: No
> matching
> > handler for protocol
> > org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol in region
> > mytest,,1373597714844.e11ad2263faf89b5865ae98f524e3fb9.
> >  at org.apache.hadoop.hbase.regionserver.HRegion.exec(HRegion.java:5463)
> >  at
> >
> >
> org.apache.hadoop.hbase.regionserver.HRegionServer.execCoprocessor(HRegionServer.java:3720)
> >  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> >  at
> >
> >
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
> >  at
> >
> >
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> >  at java.lang.reflect.Method.invoke(Method.java:597)
> >  at
> >
> >
> org.apache.hadoop.hbase.ipc.WritableRpcEngine$Server.call(WritableRpcEngine.java:320)
> >  at
> >
> org.apache.hadoop.hbase.ipc.HBaseServer$Handler.run(HBaseServer.java:1426)
> >  at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> >  at sun.reflect.NativeConstructorAccessorImpl.newInstance(Unknown Source)
> >  at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(Unknown
> > Source)
> >  at java.lang.reflect.Constructor.newInstance(Unknown Source)
> >  at
> >
> >
> org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:90)
> >  at
> >
> >
> org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:79)
> >  at
> >
> >
> org.apache.hadoop.hbase.client.ServerCallable.translateException(ServerCallable.java:228)
> >  at
> >
> >
> org.apache.hadoop.hbase.client.ServerCallable.withRetries(ServerCallable.java:166)
> >  at
> > org.apache.hadoop.hbase.ipc.ExecRPCInvoker.invoke(ExecRPCInvoker.java:79)
> >  at com.sun.proxy.$Proxy8.sum(Unknown Source)
> >  at com.testme.demo.TestCop$1.call(TestCop.java:41)
> >  at com.testme.demo.TestCop$1.call(TestCop.java:1)
> >  at
> >
> >
> org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation$4.call(HConnectionManager.java:1466)
> >  at java.util.concurrent.FutureTask$Sync.innerRun(Unknown Source)
> >  at java.util.concurrent.FutureTask.run(Unknown Source)
> >  at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
> >  at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
> >  at java.lang.Thread.run(Unknown Source)
> > Caused by:
> >
> >
> org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException):
> > org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException: No
> matching
> > handler for protocol
> > org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol in region
> > mytest,,1373597714844.e11ad2263faf89b5865ae98f524e3fb9.
> >  at org.apache.hadoop.hbase.regionserver.HRegion.exec(HRegion.java:5463)
> >  at
> >
> >
> org.apache.hadoop.hbase.regionserver.HRegionServer.execCoprocessor(HRegionServer.java:3720)
> >  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> >  at
> >
> >
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
> >  at
> >
> >
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> >  at java.lang.reflect.Method.invoke(Method.java:597)
> >  at
> >
> >
> org.apache.hadoop.hbase.ipc.WritableRpcEngine$Server.call(WritableRpcEngine.java:320)
> >  at
> >
> org.apache.hadoop.hbase.ipc.HBaseServer$Handler.run(HBaseServer.java:1426)
> >  at org.apache.hadoop.hbase.ipc.HBaseClient.call(HBaseClient.java:995)
> >  at
> >
> >
> org.apache.hadoop.hbase.ipc.WritableRpcEngine$Invoker.invoke(WritableRpcEngine.java:86)
> >  at com.sun.proxy.$Proxy7.execCoprocessor(Unknown Source)
> >  at
> > org.apache.hadoop.hbase.ipc.ExecRPCInvoker$1.call(ExecRPCInvoker.java:75)
> >  at
> > org.apache.hadoop.hbase.ipc.ExecRPCInvoker$1.call(ExecRPCInvoker.java:73)
> >  at
> >
> >
> org.apache.hadoop.hbase.client.ServerCallable.withRetries(ServerCallable.java:163)
> >  ... 10 more
> >
> > hbase(main):020:0> describe
> > 'mytest'
> > DESCRIPTION                                          ENABLED
> >  {NAME => 'mytest', coprocessor$1 => 'hdfs:///192.16 true
> >  8.10.22:9000/alex/test.jar|ColumnAggregationEndpoin
> >  t|1001', FAMILIES => [{NAME => 'myfl'
> >  , DATA_BLOCK_ENCODING => 'NONE', BLOOMFILTER => 'NO
> >  NE', REPLICATION_SCOPE => '0', VERSIONS => '3', COM
> >  PRESSION => 'NONE', MIN_VERSIONS => '0', TTL => '21
> >  47483647', KEEP_DELETED_CELLS => 'false', BLOCKSIZE
> >   => '65536', IN_MEMORY => 'false', ENCODE_ON_DISK =
> >  > 'true', BLOCKCACHE => 'true'}]}
> > 1 row(s) in 0.0920 seconds
> >
>

Re: problem in testing coprocessor endpoint

Posted by Ted Yu <yu...@gmail.com>.
In 0.94, we already have:

public class ColumnAggregationEndpoint extends BaseEndpointCoprocessor
implements ColumnAggregationProtocol {

  @Override
  public long sum(byte[] family, byte[] qualifier)

What additional functionality do you need ?

On Thu, Jul 11, 2013 at 8:26 PM, ch huang <ju...@gmail.com> wrote:

> i am testing coprocessor endpoint function, here is my testing process ,and
> error i get ,hope any expert on coprocessor can help me out
>
>
> # vi ColumnAggregationProtocol.java
>
> import java.io.IOException;
> import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
> // A sample protocol for performing aggregation at regions.
> public interface ColumnAggregationProtocol
> extends CoprocessorProtocol {
> // Perform aggregation for a given column at the region. The aggregation
> // will include all the rows inside the region. It can be extended to
> // allow passing start and end rows for a fine-grained aggregation.
>    public long sum(byte[] family, byte[] qualifier) throws IOException;
> }
>
>
> # vi ColumnAggregationEndpoint.java
>
>
> import java.io.FileWriter;
> import java.io.IOException;
> import java.util.ArrayList;
> import java.util.List;
> import org.apache.hadoop.hbase.CoprocessorEnvironment;
> import org.apache.hadoop.hbase.KeyValue;
> import org.apache.hadoop.hbase.client.Scan;
> import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor;
> import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
> import org.apache.hadoop.hbase.ipc.ProtocolSignature;
> import org.apache.hadoop.hbase.regionserver.HRegion;
> import org.apache.hadoop.hbase.regionserver.InternalScanner;
> import org.apache.hadoop.hbase.util.Bytes;
>
> //Aggregation implementation at a region.
>
> public class ColumnAggregationEndpoint extends BaseEndpointCoprocessor
>   implements ColumnAggregationProtocol {
>      @Override
>      public long sum(byte[] family, byte[] qualifier)
>      throws IOException {
>        // aggregate at each region
>          Scan scan = new Scan();
>          scan.addColumn(family, qualifier);
>          long sumResult = 0;
>
>          CoprocessorEnvironment ce = getEnvironment();
>          HRegion hr = ((RegionCoprocessorEnvironment)ce).getRegion();
>          InternalScanner scanner = hr.getScanner(scan);
>
>          try {
>            List<KeyValue> curVals = new ArrayList<KeyValue>();
>            boolean hasMore = false;
>            do {
>          curVals.clear();
>          hasMore = scanner.next(curVals);
>          KeyValue kv = curVals.get(0);
>          sumResult += Long.parseLong(Bytes.toString(kv.getValue()));
>
>            } while (hasMore);
>          } finally {
>              scanner.close();
>          }
>          return sumResult;
>       }
>
>       @Override
>       public long getProtocolVersion(String protocol, long clientVersion)
>              throws IOException {
>          // TODO Auto-generated method stub
>          return 0;
>       }
>
>       @Override
>
>       public ProtocolSignature getProtocolSignature(String protocol,
>              long clientVersion, int clientMethodsHash) throws IOException
> {
>           // TODO Auto-generated method stub
>           return null;
>       }
> }
>
> i compile and pack the two into test.jar,and put it into my HDFS filesystem
>
> and load it into my test table
>
> hbase(main):006:0> alter 'mytest', METHOD =>
> 'table_att','coprocessor'=>'hdfs:///
> 192.168.10.22:9000/alex/test.jar|ColumnAggregationEndpoint|1001'
>
> here is my testing java code
>
> package com.testme.demo;
> import java.io.IOException;
> import java.util.Map;
> import org.apache.hadoop.conf.Configuration;
> import org.apache.hadoop.hbase.HBaseConfiguration;
> import org.apache.hadoop.hbase.HTableDescriptor;
> import org.apache.hadoop.hbase.client.*;
> import org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol;
> import org.apache.hadoop.hbase.util.*;
> import org.apache.hadoop.hbase.client.coprocessor.Batch.Call;;
>
> public class TestCop {
>    private static Configuration conf =null;
>    private static String TEST_TABLE = "mytest";
>    private static String TEST_FAMILY = "myfl";
>    private static String TEST_QUALIFIER = "myqf";
>  /**
>   * @param args
>   */
>    static {
>           conf = HBaseConfiguration.create();
>           conf.addResource( "hbase-site.xml");
>    }
>
>  public static void main(String[] args) throws IOException,Throwable{
>   // TODO Auto-generated method stub
>   conf = HBaseConfiguration.create();
>
>    HTable table = new HTable(conf,TEST_TABLE);
>  //  HTableDescriptor htd = table.getTableDescriptor();
>
>    Scan scan = new Scan();
>    Map<byte[], Long> results;
>
>    results = table.coprocessorExec(ColumnAggregationProtocol.class,
> "1".getBytes(),"5".getBytes(), new Call<ColumnAggregationProtocol,Long>(){
>      public Long call(ColumnAggregationProtocol instance)throws
> IOException{
>        return (Long) instance.sum(TEST_FAMILY.getBytes(),
> TEST_QUALIFIER.getBytes());
>     }});
>
>    long sumResult = 0;
>    long expectedResult = 0;
>    for (Map.Entry<byte[], Long> e:results.entrySet()){
>     sumResult += e.getValue();
>    }
>    System.out.println(sumResult);
>  }
> }
> when i run it i get error
> Exception in thread "main"
> org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException:
> org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException: No matching
> handler for protocol
> org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol in region
> mytest,,1373597714844.e11ad2263faf89b5865ae98f524e3fb9.
>  at org.apache.hadoop.hbase.regionserver.HRegion.exec(HRegion.java:5463)
>  at
>
> org.apache.hadoop.hbase.regionserver.HRegionServer.execCoprocessor(HRegionServer.java:3720)
>  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>  at
>
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
>  at
>
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
>  at java.lang.reflect.Method.invoke(Method.java:597)
>  at
>
> org.apache.hadoop.hbase.ipc.WritableRpcEngine$Server.call(WritableRpcEngine.java:320)
>  at
> org.apache.hadoop.hbase.ipc.HBaseServer$Handler.run(HBaseServer.java:1426)
>  at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>  at sun.reflect.NativeConstructorAccessorImpl.newInstance(Unknown Source)
>  at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(Unknown
> Source)
>  at java.lang.reflect.Constructor.newInstance(Unknown Source)
>  at
>
> org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:90)
>  at
>
> org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:79)
>  at
>
> org.apache.hadoop.hbase.client.ServerCallable.translateException(ServerCallable.java:228)
>  at
>
> org.apache.hadoop.hbase.client.ServerCallable.withRetries(ServerCallable.java:166)
>  at
> org.apache.hadoop.hbase.ipc.ExecRPCInvoker.invoke(ExecRPCInvoker.java:79)
>  at com.sun.proxy.$Proxy8.sum(Unknown Source)
>  at com.testme.demo.TestCop$1.call(TestCop.java:41)
>  at com.testme.demo.TestCop$1.call(TestCop.java:1)
>  at
>
> org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation$4.call(HConnectionManager.java:1466)
>  at java.util.concurrent.FutureTask$Sync.innerRun(Unknown Source)
>  at java.util.concurrent.FutureTask.run(Unknown Source)
>  at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source)
>  at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
>  at java.lang.Thread.run(Unknown Source)
> Caused by:
>
> org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException):
> org.apache.hadoop.hbase.ipc.HBaseRPC$UnknownProtocolException: No matching
> handler for protocol
> org.apache.hadoop.hbase.coprocessor.ColumnAggregationProtocol in region
> mytest,,1373597714844.e11ad2263faf89b5865ae98f524e3fb9.
>  at org.apache.hadoop.hbase.regionserver.HRegion.exec(HRegion.java:5463)
>  at
>
> org.apache.hadoop.hbase.regionserver.HRegionServer.execCoprocessor(HRegionServer.java:3720)
>  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>  at
>
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
>  at
>
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
>  at java.lang.reflect.Method.invoke(Method.java:597)
>  at
>
> org.apache.hadoop.hbase.ipc.WritableRpcEngine$Server.call(WritableRpcEngine.java:320)
>  at
> org.apache.hadoop.hbase.ipc.HBaseServer$Handler.run(HBaseServer.java:1426)
>  at org.apache.hadoop.hbase.ipc.HBaseClient.call(HBaseClient.java:995)
>  at
>
> org.apache.hadoop.hbase.ipc.WritableRpcEngine$Invoker.invoke(WritableRpcEngine.java:86)
>  at com.sun.proxy.$Proxy7.execCoprocessor(Unknown Source)
>  at
> org.apache.hadoop.hbase.ipc.ExecRPCInvoker$1.call(ExecRPCInvoker.java:75)
>  at
> org.apache.hadoop.hbase.ipc.ExecRPCInvoker$1.call(ExecRPCInvoker.java:73)
>  at
>
> org.apache.hadoop.hbase.client.ServerCallable.withRetries(ServerCallable.java:163)
>  ... 10 more
>
> hbase(main):020:0> describe
> 'mytest'
> DESCRIPTION                                          ENABLED
>  {NAME => 'mytest', coprocessor$1 => 'hdfs:///192.16 true
>  8.10.22:9000/alex/test.jar|ColumnAggregationEndpoin
>  t|1001', FAMILIES => [{NAME => 'myfl'
>  , DATA_BLOCK_ENCODING => 'NONE', BLOOMFILTER => 'NO
>  NE', REPLICATION_SCOPE => '0', VERSIONS => '3', COM
>  PRESSION => 'NONE', MIN_VERSIONS => '0', TTL => '21
>  47483647', KEEP_DELETED_CELLS => 'false', BLOCKSIZE
>   => '65536', IN_MEMORY => 'false', ENCODE_ON_DISK =
>  > 'true', BLOCKCACHE => 'true'}]}
> 1 row(s) in 0.0920 seconds
>