You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ec...@apache.org on 2014/10/10 18:53:00 UTC
[19/38] HBASE-12197 Move rest to it's on module
http://git-wip-us.apache.org/repos/asf/hbase/blob/876617bd/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServletContainer.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServletContainer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServletContainer.java
deleted file mode 100644
index 2ce8ede..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RESTServletContainer.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest;
-
-import java.io.IOException;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-
-import com.sun.jersey.spi.container.servlet.ServletContainer;
-
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authorize.AuthorizationException;
-import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * REST servlet container. It is used to get the remote request user
- * without going through @HttpContext, so that we can minimize code changes.
- */
-@InterfaceAudience.Private
-public class RESTServletContainer extends ServletContainer {
- private static final long serialVersionUID = -2474255003443394314L;
-
- /**
- * This container is used only if authentication and
- * impersonation is enabled. The remote request user is used
- * as a proxy user for impersonation in invoking any REST service.
- */
- @Override
- public void service(final HttpServletRequest request,
- final HttpServletResponse response) throws ServletException, IOException {
- final String doAsUserFromQuery = request.getParameter("doAs");
- RESTServlet servlet = RESTServlet.getInstance();
- if (doAsUserFromQuery != null) {
- Configuration conf = servlet.getConfiguration();
- if (!servlet.supportsProxyuser()) {
- throw new ServletException("Support for proxyuser is not configured");
- }
- UserGroupInformation ugi = servlet.getRealUser();
- // create and attempt to authorize a proxy user (the client is attempting
- // to do proxy user)
- ugi = UserGroupInformation.createProxyUser(doAsUserFromQuery, ugi);
- // validate the proxy user authorization
- try {
- ProxyUsers.authorize(ugi, request.getRemoteAddr(), conf);
- } catch(AuthorizationException e) {
- throw new ServletException(e.getMessage());
- }
- servlet.setEffectiveUser(doAsUserFromQuery);
- } else {
- String effectiveUser = request.getRemoteUser();
- servlet.setEffectiveUser(effectiveUser);
- }
- super.service(request, response);
- }
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/876617bd/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
deleted file mode 100644
index ddc2f56..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RegionsResource.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest;
-
-import java.io.IOException;
-import java.util.Map;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.CacheControl;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.UriInfo;
-import javax.ws.rs.core.Response.ResponseBuilder;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.MetaScanner;
-import org.apache.hadoop.hbase.rest.model.TableInfoModel;
-import org.apache.hadoop.hbase.rest.model.TableRegionModel;
-
-@InterfaceAudience.Private
-public class RegionsResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(RegionsResource.class);
-
- static CacheControl cacheControl;
- static {
- cacheControl = new CacheControl();
- cacheControl.setNoCache(true);
- cacheControl.setNoTransform(false);
- }
-
- TableResource tableResource;
-
- /**
- * Constructor
- * @param tableResource
- * @throws IOException
- */
- public RegionsResource(TableResource tableResource) throws IOException {
- super();
- this.tableResource = tableResource;
- }
-
- @GET
- @Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
- MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
- }
- servlet.getMetrics().incrementRequests(1);
- try {
- TableName tableName = TableName.valueOf(tableResource.getName());
- TableInfoModel model = new TableInfoModel(tableName.getNameAsString());
- Map<HRegionInfo,ServerName> regions = MetaScanner.allTableRegions(
- servlet.getConfiguration(), null, tableName, false);
- for (Map.Entry<HRegionInfo,ServerName> e: regions.entrySet()) {
- HRegionInfo hri = e.getKey();
- ServerName addr = e.getValue();
- model.add(
- new TableRegionModel(tableName.getNameAsString(), hri.getRegionId(),
- hri.getStartKey(), hri.getEndKey(), addr.getHostAndPort()));
- }
- ResponseBuilder response = Response.ok(model);
- response.cacheControl(cacheControl);
- servlet.getMetrics().incrementSucessfulGetRequests(1);
- return response.build();
- } catch (TableNotFoundException e) {
- servlet.getMetrics().incrementFailedGetRequests(1);
- return Response.status(Response.Status.NOT_FOUND)
- .type(MIMETYPE_TEXT).entity("Not found" + CRLF)
- .build();
- } catch (IOException e) {
- servlet.getMetrics().incrementFailedGetRequests(1);
- return Response.status(Response.Status.SERVICE_UNAVAILABLE)
- .type(MIMETYPE_TEXT).entity("Unavailable" + CRLF)
- .build();
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/876617bd/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ResourceBase.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ResourceBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ResourceBase.java
deleted file mode 100644
index f71d848..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ResourceBase.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest;
-
-import java.io.IOException;
-
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.Response;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
-import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException;
-import org.apache.hadoop.util.StringUtils;
-
-@InterfaceAudience.Private
-public class ResourceBase implements Constants {
-
- RESTServlet servlet;
- Class<?> accessDeniedClazz;
-
- public ResourceBase() throws IOException {
- servlet = RESTServlet.getInstance();
- try {
- accessDeniedClazz = Class.forName("org.apache.hadoop.hbase.security.AccessDeniedException");
- } catch (ClassNotFoundException e) {
- }
- }
-
- protected Response processException(Throwable exp) {
- Throwable curr = exp;
- if(accessDeniedClazz != null) {
- //some access denied exceptions are buried
- while (curr != null) {
- if(accessDeniedClazz.isAssignableFrom(curr.getClass())) {
- throw new WebApplicationException(
- Response.status(Response.Status.FORBIDDEN)
- .type(MIMETYPE_TEXT).entity("Forbidden" + CRLF +
- StringUtils.stringifyException(exp) + CRLF)
- .build());
- }
- curr = curr.getCause();
- }
- }
- //TableNotFound may also be buried one level deep
- if (exp instanceof TableNotFoundException ||
- exp.getCause() instanceof TableNotFoundException) {
- throw new WebApplicationException(
- Response.status(Response.Status.NOT_FOUND)
- .type(MIMETYPE_TEXT).entity("Not found" + CRLF +
- StringUtils.stringifyException(exp) + CRLF)
- .build());
- }
- if (exp instanceof NoSuchColumnFamilyException){
- throw new WebApplicationException(
- Response.status(Response.Status.NOT_FOUND)
- .type(MIMETYPE_TEXT).entity("Not found" + CRLF +
- StringUtils.stringifyException(exp) + CRLF)
- .build());
- }
- if (exp instanceof RuntimeException) {
- throw new WebApplicationException(
- Response.status(Response.Status.BAD_REQUEST)
- .type(MIMETYPE_TEXT).entity("Bad request" + CRLF +
- StringUtils.stringifyException(exp) + CRLF)
- .build());
- }
- if (exp instanceof RetriesExhaustedWithDetailsException) {
- RetriesExhaustedWithDetailsException retryException =
- (RetriesExhaustedWithDetailsException) exp;
- processException(retryException.getCause(0));
- }
- throw new WebApplicationException(
- Response.status(Response.Status.SERVICE_UNAVAILABLE)
- .type(MIMETYPE_TEXT).entity("Unavailable" + CRLF +
- StringUtils.stringifyException(exp) + CRLF)
- .build());
- }
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/876617bd/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ResourceConfig.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ResourceConfig.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ResourceConfig.java
deleted file mode 100644
index d397399..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ResourceConfig.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-
-import com.sun.jersey.api.core.PackagesResourceConfig;
-
-@InterfaceAudience.Private
-public class ResourceConfig extends PackagesResourceConfig {
- public ResourceConfig() {
- super("org.apache.hadoop.hbase.rest");
- }
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/876617bd/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ResultGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ResultGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ResultGenerator.java
deleted file mode 100644
index 989c59e..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ResultGenerator.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest;
-
-import java.io.IOException;
-import java.util.Iterator;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.rest.model.ScannerModel;
-
-@InterfaceAudience.Private
-public abstract class ResultGenerator implements Iterator<Cell> {
-
- public static ResultGenerator fromRowSpec(final String table,
- final RowSpec rowspec, final Filter filter, final boolean cacheBlocks)
- throws IOException {
- if (rowspec.isSingleRow()) {
- return new RowResultGenerator(table, rowspec, filter, cacheBlocks);
- } else {
- return new ScannerResultGenerator(table, rowspec, filter, cacheBlocks);
- }
- }
-
- public static Filter buildFilter(final String filter) throws Exception {
- return ScannerModel.buildFilter(filter);
- }
-
- public abstract void putBack(Cell kv);
-
- public abstract void close();
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/876617bd/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
deleted file mode 100644
index c425e84..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RootResource.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest;
-
-import java.io.IOException;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.CacheControl;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.UriInfo;
-import javax.ws.rs.core.Response.ResponseBuilder;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.rest.model.TableListModel;
-import org.apache.hadoop.hbase.rest.model.TableModel;
-
-@Path("/")
-@InterfaceAudience.Private
-public class RootResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(RootResource.class);
-
- static CacheControl cacheControl;
- static {
- cacheControl = new CacheControl();
- cacheControl.setNoCache(true);
- cacheControl.setNoTransform(false);
- }
-
- /**
- * Constructor
- * @throws IOException
- */
- public RootResource() throws IOException {
- super();
- }
-
- private final TableListModel getTableList() throws IOException {
- TableListModel tableList = new TableListModel();
- TableName[] tableNames = servlet.getAdmin().listTableNames();
- for (TableName name: tableNames) {
- tableList.add(new TableModel(name.getNameAsString()));
- }
- return tableList;
- }
-
- @GET
- @Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
- MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
- }
- servlet.getMetrics().incrementRequests(1);
- try {
- ResponseBuilder response = Response.ok(getTableList());
- response.cacheControl(cacheControl);
- servlet.getMetrics().incrementSucessfulGetRequests(1);
- return response.build();
- } catch (Exception e) {
- servlet.getMetrics().incrementFailedGetRequests(1);
- return processException(e);
- }
- }
-
- @Path("status/cluster")
- public StorageClusterStatusResource getClusterStatusResource()
- throws IOException {
- return new StorageClusterStatusResource();
- }
-
- @Path("version")
- public VersionResource getVersionResource() throws IOException {
- return new VersionResource();
- }
-
- @Path("{table}")
- public TableResource getTableResource(
- final @PathParam("table") String table) throws IOException {
- return new TableResource(table);
- }
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/876617bd/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
deleted file mode 100644
index 7db5328..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResource.java
+++ /dev/null
@@ -1,598 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.ws.rs.Consumes;
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import javax.ws.rs.core.UriInfo;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.rest.model.CellModel;
-import org.apache.hadoop.hbase.rest.model.CellSetModel;
-import org.apache.hadoop.hbase.rest.model.RowModel;
-import org.apache.hadoop.hbase.util.Bytes;
-
-@InterfaceAudience.Private
-public class RowResource extends ResourceBase {
- private static final Log LOG = LogFactory.getLog(RowResource.class);
-
- static final String CHECK_PUT = "put";
- static final String CHECK_DELETE = "delete";
-
- TableResource tableResource;
- RowSpec rowspec;
- private String check = null;
-
- /**
- * Constructor
- * @param tableResource
- * @param rowspec
- * @param versions
- * @throws IOException
- */
- public RowResource(TableResource tableResource, String rowspec,
- String versions, String check) throws IOException {
- super();
- this.tableResource = tableResource;
- this.rowspec = new RowSpec(rowspec);
- if (versions != null) {
- this.rowspec.setMaxVersions(Integer.valueOf(versions));
- }
- this.check = check;
- }
-
- @GET
- @Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
- MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
- }
- servlet.getMetrics().incrementRequests(1);
- MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
- try {
- ResultGenerator generator =
- ResultGenerator.fromRowSpec(tableResource.getName(), rowspec, null,
- !params.containsKey(NOCACHE_PARAM_NAME));
- if (!generator.hasNext()) {
- servlet.getMetrics().incrementFailedGetRequests(1);
- return Response.status(Response.Status.NOT_FOUND)
- .type(MIMETYPE_TEXT).entity("Not found" + CRLF)
- .build();
- }
- int count = 0;
- CellSetModel model = new CellSetModel();
- Cell value = generator.next();
- byte[] rowKey = CellUtil.cloneRow(value);
- RowModel rowModel = new RowModel(rowKey);
- do {
- if (!Bytes.equals(CellUtil.cloneRow(value), rowKey)) {
- model.addRow(rowModel);
- rowKey = CellUtil.cloneRow(value);
- rowModel = new RowModel(rowKey);
- }
- rowModel.addCell(new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value),
- value.getTimestamp(), CellUtil.cloneValue(value)));
- if (++count > rowspec.getMaxValues()) {
- break;
- }
- value = generator.next();
- } while (value != null);
- model.addRow(rowModel);
- servlet.getMetrics().incrementSucessfulGetRequests(1);
- return Response.ok(model).build();
- } catch (Exception e) {
- servlet.getMetrics().incrementFailedPutRequests(1);
- return processException(e);
- }
- }
-
- @GET
- @Produces(MIMETYPE_BINARY)
- public Response getBinary(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
- }
- servlet.getMetrics().incrementRequests(1);
- // doesn't make sense to use a non specific coordinate as this can only
- // return a single cell
- if (!rowspec.hasColumns() || rowspec.getColumns().length > 1) {
- servlet.getMetrics().incrementFailedGetRequests(1);
- return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT)
- .entity("Bad request: Either 0 or more than 1 columns specified." + CRLF).build();
- }
- MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
- try {
- ResultGenerator generator =
- ResultGenerator.fromRowSpec(tableResource.getName(), rowspec, null,
- !params.containsKey(NOCACHE_PARAM_NAME));
- if (!generator.hasNext()) {
- servlet.getMetrics().incrementFailedGetRequests(1);
- return Response.status(Response.Status.NOT_FOUND)
- .type(MIMETYPE_TEXT).entity("Not found" + CRLF)
- .build();
- }
- Cell value = generator.next();
- ResponseBuilder response = Response.ok(CellUtil.cloneValue(value));
- response.header("X-Timestamp", value.getTimestamp());
- servlet.getMetrics().incrementSucessfulGetRequests(1);
- return response.build();
- } catch (Exception e) {
- servlet.getMetrics().incrementFailedGetRequests(1);
- return processException(e);
- }
- }
-
- Response update(final CellSetModel model, final boolean replace) {
- servlet.getMetrics().incrementRequests(1);
- if (servlet.isReadOnly()) {
- servlet.getMetrics().incrementFailedPutRequests(1);
- return Response.status(Response.Status.FORBIDDEN)
- .type(MIMETYPE_TEXT).entity("Forbidden" + CRLF)
- .build();
- }
-
- if (CHECK_PUT.equalsIgnoreCase(check)) {
- return checkAndPut(model);
- } else if (CHECK_DELETE.equalsIgnoreCase(check)) {
- return checkAndDelete(model);
- } else if (check != null && check.length() > 0) {
- return Response.status(Response.Status.BAD_REQUEST)
- .type(MIMETYPE_TEXT).entity("Invalid check value '" + check + "'" + CRLF)
- .build();
- }
-
- HTableInterface table = null;
- try {
- List<RowModel> rows = model.getRows();
- List<Put> puts = new ArrayList<Put>();
- for (RowModel row: rows) {
- byte[] key = row.getKey();
- if (key == null) {
- key = rowspec.getRow();
- }
- if (key == null) {
- servlet.getMetrics().incrementFailedPutRequests(1);
- return Response.status(Response.Status.BAD_REQUEST)
- .type(MIMETYPE_TEXT).entity("Bad request: Row key not specified." + CRLF)
- .build();
- }
- Put put = new Put(key);
- int i = 0;
- for (CellModel cell: row.getCells()) {
- byte[] col = cell.getColumn();
- if (col == null) try {
- col = rowspec.getColumns()[i++];
- } catch (ArrayIndexOutOfBoundsException e) {
- col = null;
- }
- if (col == null) {
- servlet.getMetrics().incrementFailedPutRequests(1);
- return Response.status(Response.Status.BAD_REQUEST)
- .type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF)
- .build();
- }
- byte [][] parts = KeyValue.parseColumn(col);
- if (parts.length != 2) {
- return Response.status(Response.Status.BAD_REQUEST)
- .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
- .build();
- }
- put.addImmutable(parts[0], parts[1], cell.getTimestamp(), cell.getValue());
- }
- puts.add(put);
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + put.toString());
- }
- }
- table = servlet.getTable(tableResource.getName());
- table.put(puts);
- table.flushCommits();
- ResponseBuilder response = Response.ok();
- servlet.getMetrics().incrementSucessfulPutRequests(1);
- return response.build();
- } catch (Exception e) {
- servlet.getMetrics().incrementFailedPutRequests(1);
- return processException(e);
- } finally {
- if (table != null) try {
- table.close();
- } catch (IOException ioe) {
- LOG.debug("Exception received while closing the table", ioe);
- }
- }
- }
-
- // This currently supports only update of one row at a time.
- Response updateBinary(final byte[] message, final HttpHeaders headers,
- final boolean replace) {
- servlet.getMetrics().incrementRequests(1);
- if (servlet.isReadOnly()) {
- servlet.getMetrics().incrementFailedPutRequests(1);
- return Response.status(Response.Status.FORBIDDEN)
- .type(MIMETYPE_TEXT).entity("Forbidden" + CRLF)
- .build();
- }
- HTableInterface table = null;
- try {
- byte[] row = rowspec.getRow();
- byte[][] columns = rowspec.getColumns();
- byte[] column = null;
- if (columns != null) {
- column = columns[0];
- }
- long timestamp = HConstants.LATEST_TIMESTAMP;
- List<String> vals = headers.getRequestHeader("X-Row");
- if (vals != null && !vals.isEmpty()) {
- row = Bytes.toBytes(vals.get(0));
- }
- vals = headers.getRequestHeader("X-Column");
- if (vals != null && !vals.isEmpty()) {
- column = Bytes.toBytes(vals.get(0));
- }
- vals = headers.getRequestHeader("X-Timestamp");
- if (vals != null && !vals.isEmpty()) {
- timestamp = Long.valueOf(vals.get(0));
- }
- if (column == null) {
- servlet.getMetrics().incrementFailedPutRequests(1);
- return Response.status(Response.Status.BAD_REQUEST)
- .type(MIMETYPE_TEXT).entity("Bad request: Column found to be null." + CRLF)
- .build();
- }
- Put put = new Put(row);
- byte parts[][] = KeyValue.parseColumn(column);
- if (parts.length != 2) {
- return Response.status(Response.Status.BAD_REQUEST)
- .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
- .build();
- }
- put.addImmutable(parts[0], parts[1], timestamp, message);
- table = servlet.getTable(tableResource.getName());
- table.put(put);
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + put.toString());
- }
- servlet.getMetrics().incrementSucessfulPutRequests(1);
- return Response.ok().build();
- } catch (Exception e) {
- servlet.getMetrics().incrementFailedPutRequests(1);
- return processException(e);
- } finally {
- if (table != null) try {
- table.close();
- } catch (IOException ioe) {
- LOG.debug(ioe);
- }
- }
- }
-
- @PUT
- @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
- MIMETYPE_PROTOBUF_IETF})
- public Response put(final CellSetModel model,
- final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath()
- + " " + uriInfo.getQueryParameters());
- }
- return update(model, true);
- }
-
- @PUT
- @Consumes(MIMETYPE_BINARY)
- public Response putBinary(final byte[] message,
- final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath() + " as "+ MIMETYPE_BINARY);
- }
- return updateBinary(message, headers, true);
- }
-
- @POST
- @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
- MIMETYPE_PROTOBUF_IETF})
- public Response post(final CellSetModel model,
- final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath()
- + " " + uriInfo.getQueryParameters());
- }
- return update(model, false);
- }
-
- @POST
- @Consumes(MIMETYPE_BINARY)
- public Response postBinary(final byte[] message,
- final @Context UriInfo uriInfo, final @Context HttpHeaders headers) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath() + " as "+MIMETYPE_BINARY);
- }
- return updateBinary(message, headers, false);
- }
-
- @DELETE
- public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
- }
- servlet.getMetrics().incrementRequests(1);
- if (servlet.isReadOnly()) {
- servlet.getMetrics().incrementFailedDeleteRequests(1);
- return Response.status(Response.Status.FORBIDDEN)
- .type(MIMETYPE_TEXT).entity("Forbidden" + CRLF)
- .build();
- }
- Delete delete = null;
- if (rowspec.hasTimestamp())
- delete = new Delete(rowspec.getRow(), rowspec.getTimestamp());
- else
- delete = new Delete(rowspec.getRow());
-
- for (byte[] column: rowspec.getColumns()) {
- byte[][] split = KeyValue.parseColumn(column);
- if (rowspec.hasTimestamp()) {
- if (split.length == 1) {
- delete.deleteFamily(split[0], rowspec.getTimestamp());
- } else if (split.length == 2) {
- delete.deleteColumns(split[0], split[1], rowspec.getTimestamp());
- } else {
- return Response.status(Response.Status.BAD_REQUEST)
- .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
- .build();
- }
- } else {
- if (split.length == 1) {
- delete.deleteFamily(split[0]);
- } else if (split.length == 2) {
- delete.deleteColumns(split[0], split[1]);
- } else {
- return Response.status(Response.Status.BAD_REQUEST)
- .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
- .build();
- }
- }
- }
- HTableInterface table = null;
- try {
- table = servlet.getTable(tableResource.getName());
- table.delete(delete);
- servlet.getMetrics().incrementSucessfulDeleteRequests(1);
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + delete.toString());
- }
- } catch (Exception e) {
- servlet.getMetrics().incrementFailedDeleteRequests(1);
- return processException(e);
- } finally {
- if (table != null) try {
- table.close();
- } catch (IOException ioe) {
- LOG.debug(ioe);
- }
- }
- return Response.ok().build();
- }
-
- /**
- * Validates the input request parameters, parses columns from CellSetModel,
- * and invokes checkAndPut on HTable.
- *
- * @param model instance of CellSetModel
- * @return Response 200 OK, 304 Not modified, 400 Bad request
- */
- Response checkAndPut(final CellSetModel model) {
- HTableInterface table = null;
- try {
- table = servlet.getTable(tableResource.getName());
- if (model.getRows().size() != 1) {
- servlet.getMetrics().incrementFailedPutRequests(1);
- return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT)
- .entity("Bad request: Number of rows specified is not 1." + CRLF).build();
- }
-
- RowModel rowModel = model.getRows().get(0);
- byte[] key = rowModel.getKey();
- if (key == null) {
- key = rowspec.getRow();
- }
-
- List<CellModel> cellModels = rowModel.getCells();
- int cellModelCount = cellModels.size();
- if (key == null || cellModelCount <= 1) {
- servlet.getMetrics().incrementFailedPutRequests(1);
- return Response
- .status(Response.Status.BAD_REQUEST)
- .type(MIMETYPE_TEXT)
- .entity(
- "Bad request: Either row key is null or no data found for columns specified." + CRLF)
- .build();
- }
-
- Put put = new Put(key);
- boolean retValue;
- CellModel valueToCheckCell = cellModels.get(cellModelCount - 1);
- byte[] valueToCheckColumn = valueToCheckCell.getColumn();
- byte[][] valueToPutParts = KeyValue.parseColumn(valueToCheckColumn);
- if (valueToPutParts.length == 2 && valueToPutParts[1].length > 0) {
- CellModel valueToPutCell = null;
- for (int i = 0, n = cellModelCount - 1; i < n ; i++) {
- if(Bytes.equals(cellModels.get(i).getColumn(),
- valueToCheckCell.getColumn())) {
- valueToPutCell = cellModels.get(i);
- break;
- }
- }
- if (valueToPutCell == null) {
- servlet.getMetrics().incrementFailedPutRequests(1);
- return Response.status(Response.Status.BAD_REQUEST).type(MIMETYPE_TEXT)
- .entity("Bad request: The column to put and check do not match." + CRLF).build();
- } else {
- put.addImmutable(valueToPutParts[0], valueToPutParts[1], valueToPutCell.getTimestamp(),
- valueToPutCell.getValue());
- retValue = table.checkAndPut(key, valueToPutParts[0], valueToPutParts[1],
- valueToCheckCell.getValue(), put);
- }
- } else {
- servlet.getMetrics().incrementFailedPutRequests(1);
- return Response.status(Response.Status.BAD_REQUEST)
- .type(MIMETYPE_TEXT).entity("Bad request: Column incorrectly specified." + CRLF)
- .build();
- }
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("CHECK-AND-PUT " + put.toString() + ", returns " + retValue);
- }
- if (!retValue) {
- servlet.getMetrics().incrementFailedPutRequests(1);
- return Response.status(Response.Status.NOT_MODIFIED)
- .type(MIMETYPE_TEXT).entity("Value not Modified" + CRLF)
- .build();
- }
- table.flushCommits();
- ResponseBuilder response = Response.ok();
- servlet.getMetrics().incrementSucessfulPutRequests(1);
- return response.build();
- } catch (Exception e) {
- servlet.getMetrics().incrementFailedPutRequests(1);
- return processException(e);
- } finally {
- if (table != null) try {
- table.close();
- } catch (IOException ioe) {
- LOG.debug("Exception received while closing the table", ioe);
- }
- }
- }
-
- /**
- * Validates the input request parameters, parses columns from CellSetModel,
- * and invokes checkAndDelete on HTable.
- *
- * @param model instance of CellSetModel
- * @return Response 200 OK, 304 Not modified, 400 Bad request
- */
- Response checkAndDelete(final CellSetModel model) {
- HTableInterface table = null;
- Delete delete = null;
- try {
- table = servlet.getTable(tableResource.getName());
- if (model.getRows().size() != 1) {
- servlet.getMetrics().incrementFailedDeleteRequests(1);
- return Response.status(Response.Status.BAD_REQUEST)
- .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
- .build();
- }
- RowModel rowModel = model.getRows().get(0);
- byte[] key = rowModel.getKey();
- if (key == null) {
- key = rowspec.getRow();
- }
- if (key == null) {
- servlet.getMetrics().incrementFailedDeleteRequests(1);
- return Response.status(Response.Status.BAD_REQUEST)
- .type(MIMETYPE_TEXT).entity("Bad request: Row key found to be null." + CRLF)
- .build();
- }
-
- delete = new Delete(key);
- boolean retValue;
- CellModel valueToDeleteCell = rowModel.getCells().get(0);
- byte[] valueToDeleteColumn = valueToDeleteCell.getColumn();
- if (valueToDeleteColumn == null) {
- try {
- valueToDeleteColumn = rowspec.getColumns()[0];
- } catch (final ArrayIndexOutOfBoundsException e) {
- servlet.getMetrics().incrementFailedDeleteRequests(1);
- return Response.status(Response.Status.BAD_REQUEST)
- .type(MIMETYPE_TEXT).entity("Bad request: Column not specified for check." + CRLF)
- .build();
- }
- }
- byte[][] parts = KeyValue.parseColumn(valueToDeleteColumn);
- if (parts.length == 2) {
- if (parts[1].length != 0) {
- delete.deleteColumns(parts[0], parts[1]);
- retValue = table.checkAndDelete(key, parts[0], parts[1],
- valueToDeleteCell.getValue(), delete);
- } else {
- // The case of empty qualifier.
- delete.deleteColumns(parts[0], Bytes.toBytes(StringUtils.EMPTY));
- retValue = table.checkAndDelete(key, parts[0], Bytes.toBytes(StringUtils.EMPTY),
- valueToDeleteCell.getValue(), delete);
- }
- } else {
- servlet.getMetrics().incrementFailedDeleteRequests(1);
- return Response.status(Response.Status.BAD_REQUEST)
- .type(MIMETYPE_TEXT).entity("Bad request: Column incorrectly specified." + CRLF)
- .build();
- }
- delete.deleteColumns(parts[0], parts[1]);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("CHECK-AND-DELETE " + delete.toString() + ", returns "
- + retValue);
- }
-
- if (!retValue) {
- servlet.getMetrics().incrementFailedDeleteRequests(1);
- return Response.status(Response.Status.NOT_MODIFIED)
- .type(MIMETYPE_TEXT).entity(" Delete check failed." + CRLF)
- .build();
- }
- table.flushCommits();
- ResponseBuilder response = Response.ok();
- servlet.getMetrics().incrementSucessfulDeleteRequests(1);
- return response.build();
- } catch (Exception e) {
- servlet.getMetrics().incrementFailedDeleteRequests(1);
- return processException(e);
- } finally {
- if (table != null) try {
- table.close();
- } catch (IOException ioe) {
- LOG.debug("Exception received while closing the table", ioe);
- }
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/876617bd/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
deleted file mode 100644
index b9492dd..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowResultGenerator.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest;
-
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.NoSuchElementException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.DoNotRetryIOException;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.util.StringUtils;
-
-@InterfaceAudience.Private
-public class RowResultGenerator extends ResultGenerator {
- private static final Log LOG = LogFactory.getLog(RowResultGenerator.class);
-
- private Iterator<Cell> valuesI;
- private Cell cache;
-
- public RowResultGenerator(final String tableName, final RowSpec rowspec,
- final Filter filter, final boolean cacheBlocks)
- throws IllegalArgumentException, IOException {
- HTableInterface table = RESTServlet.getInstance().getTable(tableName);
- try {
- Get get = new Get(rowspec.getRow());
- if (rowspec.hasColumns()) {
- for (byte[] col: rowspec.getColumns()) {
- byte[][] split = KeyValue.parseColumn(col);
- if (split.length == 1) {
- get.addFamily(split[0]);
- } else if (split.length == 2) {
- get.addColumn(split[0], split[1]);
- } else {
- throw new IllegalArgumentException("Invalid column specifier.");
- }
- }
- }
- get.setTimeRange(rowspec.getStartTime(), rowspec.getEndTime());
- get.setMaxVersions(rowspec.getMaxVersions());
- if (filter != null) {
- get.setFilter(filter);
- }
- get.setCacheBlocks(cacheBlocks);
- Result result = table.get(get);
- if (result != null && !result.isEmpty()) {
- valuesI = result.listCells().iterator();
- }
- } catch (DoNotRetryIOException e) {
- // Warn here because Stargate will return 404 in the case if multiple
- // column families were specified but one did not exist -- currently
- // HBase will fail the whole Get.
- // Specifying multiple columns in a URI should be uncommon usage but
- // help to avoid confusion by leaving a record of what happened here in
- // the log.
- LOG.warn(StringUtils.stringifyException(e));
- } finally {
- table.close();
- }
- }
-
- public void close() {
- }
-
- public boolean hasNext() {
- if (cache != null) {
- return true;
- }
- if (valuesI == null) {
- return false;
- }
- return valuesI.hasNext();
- }
-
- public Cell next() {
- if (cache != null) {
- Cell kv = cache;
- cache = null;
- return kv;
- }
- if (valuesI == null) {
- return null;
- }
- try {
- return valuesI.next();
- } catch (NoSuchElementException e) {
- return null;
- }
- }
-
- public void putBack(Cell kv) {
- this.cache = kv;
- }
-
- public void remove() {
- throw new UnsupportedOperationException("remove not supported");
- }
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/876617bd/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java
deleted file mode 100644
index b6c1ca8..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/RowSpec.java
+++ /dev/null
@@ -1,407 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest;
-
-import java.io.UnsupportedEncodingException;
-import java.net.URLDecoder;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.TreeSet;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.util.Bytes;
-
-/**
- * Parses a path based row/column/timestamp specification into its component
- * elements.
- * <p>
- *
- */
-@InterfaceAudience.Private
-public class RowSpec {
- public static final long DEFAULT_START_TIMESTAMP = 0;
- public static final long DEFAULT_END_TIMESTAMP = Long.MAX_VALUE;
-
- private byte[] row = HConstants.EMPTY_START_ROW;
- private byte[] endRow = null;
- private TreeSet<byte[]> columns =
- new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
- private List<String> labels = new ArrayList<String>();
- private long startTime = DEFAULT_START_TIMESTAMP;
- private long endTime = DEFAULT_END_TIMESTAMP;
- private int maxVersions = 1;
- private int maxValues = Integer.MAX_VALUE;
-
- public RowSpec(String path) throws IllegalArgumentException {
- int i = 0;
- while (path.charAt(i) == '/') {
- i++;
- }
- i = parseRowKeys(path, i);
- i = parseColumns(path, i);
- i = parseTimestamp(path, i);
- i = parseQueryParams(path, i);
- }
-
- private int parseRowKeys(final String path, int i)
- throws IllegalArgumentException {
- String startRow = null, endRow = null;
- try {
- StringBuilder sb = new StringBuilder();
- char c;
- while (i < path.length() && (c = path.charAt(i)) != '/') {
- sb.append(c);
- i++;
- }
- i++;
- String row = startRow = sb.toString();
- int idx = startRow.indexOf(',');
- if (idx != -1) {
- startRow = URLDecoder.decode(row.substring(0, idx),
- HConstants.UTF8_ENCODING);
- endRow = URLDecoder.decode(row.substring(idx + 1),
- HConstants.UTF8_ENCODING);
- } else {
- startRow = URLDecoder.decode(row, HConstants.UTF8_ENCODING);
- }
- } catch (IndexOutOfBoundsException e) {
- throw new IllegalArgumentException(e);
- } catch (UnsupportedEncodingException e) {
- throw new RuntimeException(e);
- }
- // HBase does not support wildcards on row keys so we will emulate a
- // suffix glob by synthesizing appropriate start and end row keys for
- // table scanning
- if (startRow.charAt(startRow.length() - 1) == '*') {
- if (endRow != null)
- throw new IllegalArgumentException("invalid path: start row "+
- "specified with wildcard");
- this.row = Bytes.toBytes(startRow.substring(0,
- startRow.lastIndexOf("*")));
- this.endRow = new byte[this.row.length + 1];
- System.arraycopy(this.row, 0, this.endRow, 0, this.row.length);
- this.endRow[this.row.length] = (byte)255;
- } else {
- this.row = Bytes.toBytes(startRow.toString());
- if (endRow != null) {
- this.endRow = Bytes.toBytes(endRow.toString());
- }
- }
- return i;
- }
-
- private int parseColumns(final String path, int i) throws IllegalArgumentException {
- if (i >= path.length()) {
- return i;
- }
- try {
- char c;
- StringBuilder column = new StringBuilder();
- while (i < path.length() && (c = path.charAt(i)) != '/') {
- if (c == ',') {
- if (column.length() < 1) {
- throw new IllegalArgumentException("invalid path");
- }
- String s = URLDecoder.decode(column.toString(), HConstants.UTF8_ENCODING);
- this.columns.add(Bytes.toBytes(s));
- column.setLength(0);
- i++;
- continue;
- }
- column.append(c);
- i++;
- }
- i++;
- // trailing list entry
- if (column.length() > 0) {
- String s = URLDecoder.decode(column.toString(), HConstants.UTF8_ENCODING);
- this.columns.add(Bytes.toBytes(s));
- }
- } catch (IndexOutOfBoundsException e) {
- throw new IllegalArgumentException(e);
- } catch (UnsupportedEncodingException e) {
- // shouldn't happen
- throw new RuntimeException(e);
- }
- return i;
- }
-
- private int parseTimestamp(final String path, int i)
- throws IllegalArgumentException {
- if (i >= path.length()) {
- return i;
- }
- long time0 = 0, time1 = 0;
- try {
- char c = 0;
- StringBuilder stamp = new StringBuilder();
- while (i < path.length()) {
- c = path.charAt(i);
- if (c == '/' || c == ',') {
- break;
- }
- stamp.append(c);
- i++;
- }
- try {
- time0 = Long.valueOf(URLDecoder.decode(stamp.toString(),
- HConstants.UTF8_ENCODING));
- } catch (NumberFormatException e) {
- throw new IllegalArgumentException(e);
- }
- if (c == ',') {
- stamp = new StringBuilder();
- i++;
- while (i < path.length() && ((c = path.charAt(i)) != '/')) {
- stamp.append(c);
- i++;
- }
- try {
- time1 = Long.valueOf(URLDecoder.decode(stamp.toString(),
- HConstants.UTF8_ENCODING));
- } catch (NumberFormatException e) {
- throw new IllegalArgumentException(e);
- }
- }
- if (c == '/') {
- i++;
- }
- } catch (IndexOutOfBoundsException e) {
- throw new IllegalArgumentException(e);
- } catch (UnsupportedEncodingException e) {
- // shouldn't happen
- throw new RuntimeException(e);
- }
- if (time1 != 0) {
- startTime = time0;
- endTime = time1;
- } else {
- endTime = time0;
- }
- return i;
- }
-
- private int parseQueryParams(final String path, int i) {
- if (i >= path.length()) {
- return i;
- }
- StringBuilder query = new StringBuilder();
- try {
- query.append(URLDecoder.decode(path.substring(i),
- HConstants.UTF8_ENCODING));
- } catch (UnsupportedEncodingException e) {
- // should not happen
- throw new RuntimeException(e);
- }
- i += query.length();
- int j = 0;
- while (j < query.length()) {
- char c = query.charAt(j);
- if (c != '?' && c != '&') {
- break;
- }
- if (++j > query.length()) {
- throw new IllegalArgumentException("malformed query parameter");
- }
- char what = query.charAt(j);
- if (++j > query.length()) {
- break;
- }
- c = query.charAt(j);
- if (c != '=') {
- throw new IllegalArgumentException("malformed query parameter");
- }
- if (++j > query.length()) {
- break;
- }
- switch (what) {
- case 'm': {
- StringBuilder sb = new StringBuilder();
- while (j <= query.length()) {
- c = query.charAt(j);
- if (c < '0' || c > '9') {
- j--;
- break;
- }
- sb.append(c);
- }
- maxVersions = Integer.valueOf(sb.toString());
- } break;
- case 'n': {
- StringBuilder sb = new StringBuilder();
- while (j <= query.length()) {
- c = query.charAt(j);
- if (c < '0' || c > '9') {
- j--;
- break;
- }
- sb.append(c);
- }
- maxValues = Integer.valueOf(sb.toString());
- } break;
- default:
- throw new IllegalArgumentException("unknown parameter '" + c + "'");
- }
- }
- return i;
- }
-
- public RowSpec(byte[] startRow, byte[] endRow, byte[][] columns,
- long startTime, long endTime, int maxVersions) {
- this.row = startRow;
- this.endRow = endRow;
- if (columns != null) {
- Collections.addAll(this.columns, columns);
- }
- this.startTime = startTime;
- this.endTime = endTime;
- this.maxVersions = maxVersions;
- }
-
- public RowSpec(byte[] startRow, byte[] endRow, Collection<byte[]> columns,
- long startTime, long endTime, int maxVersions, Collection<String> labels) {
- this(startRow, endRow, columns, startTime, endTime, maxVersions);
- if(labels != null) {
- this.labels.addAll(labels);
- }
- }
- public RowSpec(byte[] startRow, byte[] endRow, Collection<byte[]> columns,
- long startTime, long endTime, int maxVersions) {
- this.row = startRow;
- this.endRow = endRow;
- if (columns != null) {
- this.columns.addAll(columns);
- }
- this.startTime = startTime;
- this.endTime = endTime;
- this.maxVersions = maxVersions;
- }
-
- public boolean isSingleRow() {
- return endRow == null;
- }
-
- public int getMaxVersions() {
- return maxVersions;
- }
-
- public void setMaxVersions(final int maxVersions) {
- this.maxVersions = maxVersions;
- }
-
- public int getMaxValues() {
- return maxValues;
- }
-
- public void setMaxValues(final int maxValues) {
- this.maxValues = maxValues;
- }
-
- public boolean hasColumns() {
- return !columns.isEmpty();
- }
-
- public boolean hasLabels() {
- return !labels.isEmpty();
- }
-
- public byte[] getRow() {
- return row;
- }
-
- public byte[] getStartRow() {
- return row;
- }
-
- public boolean hasEndRow() {
- return endRow != null;
- }
-
- public byte[] getEndRow() {
- return endRow;
- }
-
- public void addColumn(final byte[] column) {
- columns.add(column);
- }
-
- public byte[][] getColumns() {
- return columns.toArray(new byte[columns.size()][]);
- }
-
- public List<String> getLabels() {
- return labels;
- }
-
- public boolean hasTimestamp() {
- return (startTime == 0) && (endTime != Long.MAX_VALUE);
- }
-
- public long getTimestamp() {
- return endTime;
- }
-
- public long getStartTime() {
- return startTime;
- }
-
- public void setStartTime(final long startTime) {
- this.startTime = startTime;
- }
-
- public long getEndTime() {
- return endTime;
- }
-
- public void setEndTime(long endTime) {
- this.endTime = endTime;
- }
-
- public String toString() {
- StringBuilder result = new StringBuilder();
- result.append("{startRow => '");
- if (row != null) {
- result.append(Bytes.toString(row));
- }
- result.append("', endRow => '");
- if (endRow != null) {
- result.append(Bytes.toString(endRow));
- }
- result.append("', columns => [");
- for (byte[] col: columns) {
- result.append(" '");
- result.append(Bytes.toString(col));
- result.append("'");
- }
- result.append(" ], startTime => ");
- result.append(Long.toString(startTime));
- result.append(", endTime => ");
- result.append(Long.toString(endTime));
- result.append(", maxVersions => ");
- result.append(Integer.toString(maxVersions));
- result.append(", maxValues => ");
- result.append(Integer.toString(maxValues));
- result.append("}");
- return result.toString();
- }
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/876617bd/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
deleted file mode 100644
index ffb2fae..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerInstanceResource.java
+++ /dev/null
@@ -1,201 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest;
-
-import java.io.IOException;
-
-import javax.ws.rs.DELETE;
-import javax.ws.rs.GET;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.CacheControl;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import javax.ws.rs.core.UriInfo;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.rest.model.CellModel;
-import org.apache.hadoop.hbase.rest.model.CellSetModel;
-import org.apache.hadoop.hbase.rest.model.RowModel;
-import org.apache.hadoop.hbase.util.Base64;
-import org.apache.hadoop.hbase.util.Bytes;
-
-@InterfaceAudience.Private
-public class ScannerInstanceResource extends ResourceBase {
- private static final Log LOG =
- LogFactory.getLog(ScannerInstanceResource.class);
-
- static CacheControl cacheControl;
- static {
- cacheControl = new CacheControl();
- cacheControl.setNoCache(true);
- cacheControl.setNoTransform(false);
- }
-
- ResultGenerator generator = null;
- String id = null;
- int batch = 1;
-
- public ScannerInstanceResource() throws IOException { }
-
- public ScannerInstanceResource(String table, String id,
- ResultGenerator generator, int batch) throws IOException {
- this.id = id;
- this.generator = generator;
- this.batch = batch;
- }
-
- @GET
- @Produces({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
- MIMETYPE_PROTOBUF_IETF})
- public Response get(final @Context UriInfo uriInfo,
- @QueryParam("n") int maxRows, final @QueryParam("c") int maxValues) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath());
- }
- servlet.getMetrics().incrementRequests(1);
- if (generator == null) {
- servlet.getMetrics().incrementFailedGetRequests(1);
- return Response.status(Response.Status.NOT_FOUND)
- .type(MIMETYPE_TEXT).entity("Not found" + CRLF)
- .build();
- }
- CellSetModel model = new CellSetModel();
- RowModel rowModel = null;
- byte[] rowKey = null;
- int limit = batch;
- if (maxValues > 0) {
- limit = maxValues;
- }
- int count = limit;
- do {
- Cell value = null;
- try {
- value = generator.next();
- } catch (IllegalStateException e) {
- if (ScannerResource.delete(id)) {
- servlet.getMetrics().incrementSucessfulDeleteRequests(1);
- } else {
- servlet.getMetrics().incrementFailedDeleteRequests(1);
- }
- servlet.getMetrics().incrementFailedGetRequests(1);
- return Response.status(Response.Status.GONE)
- .type(MIMETYPE_TEXT).entity("Gone" + CRLF)
- .build();
- }
- if (value == null) {
- LOG.info("generator exhausted");
- // respond with 204 (No Content) if an empty cell set would be
- // returned
- if (count == limit) {
- return Response.noContent().build();
- }
- break;
- }
- if (rowKey == null) {
- rowKey = CellUtil.cloneRow(value);
- rowModel = new RowModel(rowKey);
- }
- if (!Bytes.equals(CellUtil.cloneRow(value), rowKey)) {
- // if maxRows was given as a query param, stop if we would exceed the
- // specified number of rows
- if (maxRows > 0) {
- if (--maxRows == 0) {
- generator.putBack(value);
- break;
- }
- }
- model.addRow(rowModel);
- rowKey = CellUtil.cloneRow(value);
- rowModel = new RowModel(rowKey);
- }
- rowModel.addCell(
- new CellModel(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value),
- value.getTimestamp(), CellUtil.cloneValue(value)));
- } while (--count > 0);
- model.addRow(rowModel);
- ResponseBuilder response = Response.ok(model);
- response.cacheControl(cacheControl);
- servlet.getMetrics().incrementSucessfulGetRequests(1);
- return response.build();
- }
-
- @GET
- @Produces(MIMETYPE_BINARY)
- public Response getBinary(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("GET " + uriInfo.getAbsolutePath() + " as " +
- MIMETYPE_BINARY);
- }
- servlet.getMetrics().incrementRequests(1);
- try {
- Cell value = generator.next();
- if (value == null) {
- LOG.info("generator exhausted");
- return Response.noContent().build();
- }
- ResponseBuilder response = Response.ok(CellUtil.cloneValue(value));
- response.cacheControl(cacheControl);
- response.header("X-Row", Base64.encodeBytes(CellUtil.cloneRow(value)));
- response.header("X-Column",
- Base64.encodeBytes(
- KeyValue.makeColumn(CellUtil.cloneFamily(value), CellUtil.cloneQualifier(value))));
- response.header("X-Timestamp", value.getTimestamp());
- servlet.getMetrics().incrementSucessfulGetRequests(1);
- return response.build();
- } catch (IllegalStateException e) {
- if (ScannerResource.delete(id)) {
- servlet.getMetrics().incrementSucessfulDeleteRequests(1);
- } else {
- servlet.getMetrics().incrementFailedDeleteRequests(1);
- }
- servlet.getMetrics().incrementFailedGetRequests(1);
- return Response.status(Response.Status.GONE)
- .type(MIMETYPE_TEXT).entity("Gone" + CRLF)
- .build();
- }
- }
-
- @DELETE
- public Response delete(final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("DELETE " + uriInfo.getAbsolutePath());
- }
- servlet.getMetrics().incrementRequests(1);
- if (servlet.isReadOnly()) {
- return Response.status(Response.Status.FORBIDDEN)
- .type(MIMETYPE_TEXT).entity("Forbidden" + CRLF)
- .build();
- }
- if (ScannerResource.delete(id)) {
- servlet.getMetrics().incrementSucessfulDeleteRequests(1);
- } else {
- servlet.getMetrics().incrementFailedDeleteRequests(1);
- }
- return Response.ok().build();
- }
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/876617bd/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
deleted file mode 100644
index 6c424ce..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResource.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest;
-
-import java.io.IOException;
-import java.net.URI;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.ws.rs.Consumes;
-import javax.ws.rs.POST;
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.core.UriBuilder;
-import javax.ws.rs.core.UriInfo;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.TableNotFoundException;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.rest.model.ScannerModel;
-
-@InterfaceAudience.Private
-public class ScannerResource extends ResourceBase {
-
- private static final Log LOG = LogFactory.getLog(ScannerResource.class);
-
- static final Map<String,ScannerInstanceResource> scanners =
- Collections.synchronizedMap(new HashMap<String,ScannerInstanceResource>());
-
- TableResource tableResource;
-
- /**
- * Constructor
- * @param tableResource
- * @throws IOException
- */
- public ScannerResource(TableResource tableResource)throws IOException {
- super();
- this.tableResource = tableResource;
- }
-
- static boolean delete(final String id) {
- ScannerInstanceResource instance = scanners.remove(id);
- if (instance != null) {
- instance.generator.close();
- return true;
- } else {
- return false;
- }
- }
-
- Response update(final ScannerModel model, final boolean replace,
- final UriInfo uriInfo) {
- servlet.getMetrics().incrementRequests(1);
- if (servlet.isReadOnly()) {
- return Response.status(Response.Status.FORBIDDEN)
- .type(MIMETYPE_TEXT).entity("Forbidden" + CRLF)
- .build();
- }
- byte[] endRow = model.hasEndRow() ? model.getEndRow() : null;
- RowSpec spec = null;
- if (model.getLabels() != null) {
- spec = new RowSpec(model.getStartRow(), endRow, model.getColumns(), model.getStartTime(),
- model.getEndTime(), model.getMaxVersions(), model.getLabels());
- } else {
- spec = new RowSpec(model.getStartRow(), endRow, model.getColumns(), model.getStartTime(),
- model.getEndTime(), model.getMaxVersions());
- }
- MultivaluedMap<String, String> params = uriInfo.getQueryParameters();
-
- try {
- Filter filter = ScannerResultGenerator.buildFilterFromModel(model);
- String tableName = tableResource.getName();
- ScannerResultGenerator gen =
- new ScannerResultGenerator(tableName, spec, filter, model.getCaching(),
- model.getCacheBlocks());
- String id = gen.getID();
- ScannerInstanceResource instance =
- new ScannerInstanceResource(tableName, id, gen, model.getBatch());
- scanners.put(id, instance);
- if (LOG.isDebugEnabled()) {
- LOG.debug("new scanner: " + id);
- }
- UriBuilder builder = uriInfo.getAbsolutePathBuilder();
- URI uri = builder.path(id).build();
- servlet.getMetrics().incrementSucessfulPutRequests(1);
- return Response.created(uri).build();
- } catch (Exception e) {
- servlet.getMetrics().incrementFailedPutRequests(1);
- if (e instanceof TableNotFoundException) {
- return Response.status(Response.Status.NOT_FOUND)
- .type(MIMETYPE_TEXT).entity("Not found" + CRLF)
- .build();
- } else if (e instanceof RuntimeException) {
- return Response.status(Response.Status.BAD_REQUEST)
- .type(MIMETYPE_TEXT).entity("Bad request" + CRLF)
- .build();
- }
- return Response.status(Response.Status.SERVICE_UNAVAILABLE)
- .type(MIMETYPE_TEXT).entity("Unavailable" + CRLF)
- .build();
- }
- }
-
- @PUT
- @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
- MIMETYPE_PROTOBUF_IETF})
- public Response put(final ScannerModel model,
- final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("PUT " + uriInfo.getAbsolutePath());
- }
- return update(model, true, uriInfo);
- }
-
- @POST
- @Consumes({MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_PROTOBUF,
- MIMETYPE_PROTOBUF_IETF})
- public Response post(final ScannerModel model,
- final @Context UriInfo uriInfo) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("POST " + uriInfo.getAbsolutePath());
- }
- return update(model, false, uriInfo);
- }
-
- @Path("{scanner: .+}")
- public ScannerInstanceResource getScannerInstanceResource(
- final @PathParam("scanner") String id) throws IOException {
- ScannerInstanceResource instance = scanners.get(id);
- if (instance == null) {
- servlet.getMetrics().incrementFailedGetRequests(1);
- return new ScannerInstanceResource();
- } else {
- servlet.getMetrics().incrementSucessfulGetRequests(1);
- }
- return instance;
- }
-}
http://git-wip-us.apache.org/repos/asf/hbase/blob/876617bd/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
deleted file mode 100644
index 055c971..0000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/rest/ScannerResultGenerator.java
+++ /dev/null
@@ -1,191 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.rest;
-
-import java.io.IOException;
-import java.util.Iterator;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.UnknownScannerException;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.rest.model.ScannerModel;
-import org.apache.hadoop.hbase.security.visibility.Authorizations;
-import org.apache.hadoop.util.StringUtils;
-
-@InterfaceAudience.Private
-public class ScannerResultGenerator extends ResultGenerator {
-
- private static final Log LOG =
- LogFactory.getLog(ScannerResultGenerator.class);
-
- public static Filter buildFilterFromModel(final ScannerModel model)
- throws Exception {
- String filter = model.getFilter();
- if (filter == null || filter.length() == 0) {
- return null;
- }
- return buildFilter(filter);
- }
-
- private String id;
- private Iterator<Cell> rowI;
- private Cell cache;
- private ResultScanner scanner;
- private Result cached;
-
- public ScannerResultGenerator(final String tableName, final RowSpec rowspec,
- final Filter filter, final boolean cacheBlocks)
- throws IllegalArgumentException, IOException {
- this(tableName, rowspec, filter, -1, cacheBlocks);
- }
-
- public ScannerResultGenerator(final String tableName, final RowSpec rowspec,
- final Filter filter, final int caching, final boolean cacheBlocks)
- throws IllegalArgumentException, IOException {
- HTableInterface table = RESTServlet.getInstance().getTable(tableName);
- try {
- Scan scan;
- if (rowspec.hasEndRow()) {
- scan = new Scan(rowspec.getStartRow(), rowspec.getEndRow());
- } else {
- scan = new Scan(rowspec.getStartRow());
- }
- if (rowspec.hasColumns()) {
- byte[][] columns = rowspec.getColumns();
- for (byte[] column: columns) {
- byte[][] split = KeyValue.parseColumn(column);
- if (split.length == 1) {
- scan.addFamily(split[0]);
- } else if (split.length == 2) {
- scan.addColumn(split[0], split[1]);
- } else {
- throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
- }
- }
- }
- scan.setTimeRange(rowspec.getStartTime(), rowspec.getEndTime());
- scan.setMaxVersions(rowspec.getMaxVersions());
- if (filter != null) {
- scan.setFilter(filter);
- }
- if (caching > 0 ) {
- scan.setCaching(caching);
- }
- scan.setCacheBlocks(cacheBlocks);
- if (rowspec.hasLabels()) {
- scan.setAuthorizations(new Authorizations(rowspec.getLabels()));
- }
- scanner = table.getScanner(scan);
- cached = null;
- id = Long.toString(System.currentTimeMillis()) +
- Integer.toHexString(scanner.hashCode());
- } finally {
- table.close();
- }
- }
-
- public String getID() {
- return id;
- }
-
- public void close() {
- if (scanner != null) {
- scanner.close();
- scanner = null;
- }
- }
-
- public boolean hasNext() {
- if (cache != null) {
- return true;
- }
- if (rowI != null && rowI.hasNext()) {
- return true;
- }
- if (cached != null) {
- return true;
- }
- try {
- Result result = scanner.next();
- if (result != null && !result.isEmpty()) {
- cached = result;
- }
- } catch (UnknownScannerException e) {
- throw new IllegalArgumentException(e);
- } catch (IOException e) {
- LOG.error(StringUtils.stringifyException(e));
- }
- return cached != null;
- }
-
- public Cell next() {
- if (cache != null) {
- Cell kv = cache;
- cache = null;
- return kv;
- }
- boolean loop;
- do {
- loop = false;
- if (rowI != null) {
- if (rowI.hasNext()) {
- return rowI.next();
- } else {
- rowI = null;
- }
- }
- if (cached != null) {
- rowI = cached.listCells().iterator();
- loop = true;
- cached = null;
- } else {
- Result result = null;
- try {
- result = scanner.next();
- } catch (UnknownScannerException e) {
- throw new IllegalArgumentException(e);
- } catch (IOException e) {
- LOG.error(StringUtils.stringifyException(e));
- }
- if (result != null && !result.isEmpty()) {
- rowI = result.listCells().iterator();
- loop = true;
- }
- }
- } while (loop);
- return null;
- }
-
- public void putBack(Cell kv) {
- this.cache = kv;
- }
-
- public void remove() {
- throw new UnsupportedOperationException("remove not supported");
- }
-}