You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by md...@apache.org on 2018/05/23 18:26:01 UTC
[01/14] hbase git commit: HBASE-20593 HBase website landing page
should link to HBaseCon Asia 2018 [Forced Update!]
Repository: hbase
Updated Branches:
refs/heads/HBASE-20478 36a7cf443 -> 484e241bc (forced update)
HBASE-20593 HBase website landing page should link to HBaseCon Asia 2018
Signed-off-by: Michael Stack <st...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2c32272d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2c32272d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2c32272d
Branch: refs/heads/HBASE-20478
Commit: 2c32272dfa40dbf574343901c2ddea9319ca0bd5
Parents: 6d656b7
Author: Sean Busbey <bu...@apache.org>
Authored: Wed May 16 11:13:01 2018 -0500
Committer: Sean Busbey <bu...@apache.org>
Committed: Wed May 16 13:29:15 2018 -0500
----------------------------------------------------------------------
src/site/xdoc/index.xml | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/2c32272d/src/site/xdoc/index.xml
----------------------------------------------------------------------
diff --git a/src/site/xdoc/index.xml b/src/site/xdoc/index.xml
index d87ec55..0b54cc0 100644
--- a/src/site/xdoc/index.xml
+++ b/src/site/xdoc/index.xml
@@ -83,7 +83,8 @@ Apache HBase is an open-source, distributed, versioned, non-relational database
</section>
<section name="News">
- <p>June 18th, 2018 <a href="https://hbase.apache.org/hbasecon-2018">HBaseCon 2018</a> @ San Jose Convention Center, San Jose, CA, USA. CFP open, see site for details!</p>
+ <p>August 17th, 2018 <a href="https://hbase.apache.org/hbaseconasia-2018/">HBaseCon Asia 2018</a> @ Gehua New Century Hotel, Beijing, China. CFP open, see site for details!</p>
+ <p>June 18th, 2018 <a href="https://hbase.apache.org/hbasecon-2018">HBaseCon North America West 2018</a> @ San Jose Convention Center, San Jose, CA, USA. registration still open, see site for details!</p>
<p>August 4th, 2017 <a href="https://easychair.org/cfp/HBaseConAsia2017">HBaseCon Asia 2017</a> @ the Huawei Campus in Shenzhen, China</p>
<p>June 12th, 2017 <a href="https://easychair.org/cfp/hbasecon2017">HBaseCon2017</a> at the Crittenden Buildings on the Google Mountain View Campus</p>
<p>April 25th, 2017 <a href="https://www.meetup.com/hbaseusergroup/events/239291716/">Meetup</a> @ Visa in Palo Alto</p>
[13/14] hbase git commit: HBASE-20478 move hbaseanti import checks to
checkstyle
Posted by md...@apache.org.
HBASE-20478 move hbaseanti import checks to checkstyle
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fbc41c2e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fbc41c2e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fbc41c2e
Branch: refs/heads/HBASE-20478
Commit: fbc41c2e9f75eb118edae228665dd9bece925504
Parents: 6c1097e
Author: Mike Drob <md...@apache.org>
Authored: Thu Apr 26 20:12:07 2018 -0500
Committer: Mike Drob <md...@apache.org>
Committed: Tue May 22 12:34:43 2018 -0500
----------------------------------------------------------------------
dev-support/hbase-personality.sh | 18 ------------------
.../src/main/resources/hbase/checkstyle.xml | 7 ++++++-
pom.xml | 4 ++--
3 files changed, 8 insertions(+), 21 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/fbc41c2e/dev-support/hbase-personality.sh
----------------------------------------------------------------------
diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh
index 2b1e2c3..b033645 100755
--- a/dev-support/hbase-personality.sh
+++ b/dev-support/hbase-personality.sh
@@ -631,24 +631,6 @@ function hbaseanti_patchfile
((result=result+1))
fi
- warnings=$(${GREP} -c 'import org.apache.hadoop.classification' "${patchfile}")
- if [[ ${warnings} -gt 0 ]]; then
- add_vote_table -1 hbaseanti "" "The patch appears use Hadoop classification instead of HBase."
- ((result=result+1))
- fi
-
- warnings=$(${GREP} -c 'import org.codehaus.jackson' "${patchfile}")
- if [[ ${warnings} -gt 0 ]]; then
- add_vote_table -1 hbaseanti "" "The patch appears use Jackson 1 classes/annotations."
- ((result=result+1))
- fi
-
- warnings=$(${GREP} -cE 'org.apache.commons.logging.Log(Factory|;)' "${patchfile}")
- if [[ ${warnings} -gt 0 ]]; then
- add_vote_table -1 hbaseanti "" "The patch appears to use commons-logging instead of slf4j."
- ((result=result+1))
- fi
-
if [[ ${result} -gt 0 ]]; then
return 1
fi
http://git-wip-us.apache.org/repos/asf/hbase/blob/fbc41c2e/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
----------------------------------------------------------------------
diff --git a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
index c77d46b9..5b6f41f 100644
--- a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
+++ b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
@@ -85,7 +85,12 @@
org.apache.commons.collections4,
org.apache.commons.lang,
org.apache.curator.shaded,
- org.apache.htrace.shaded"/>
+ org.apache.hadoop.classification,
+ org.apache.htrace.shaded,
+ org.codehaus.jackson"/>
+ <property name="illegalClasses" value="
+ org.apache.commons.logging.Log,
+ org.apache.commons.logging.LogFactory"/>
</module>
<!-- Javadoc Checks
http://checkstyle.sourceforge.net/config_javadoc.html -->
http://git-wip-us.apache.org/repos/asf/hbase/blob/fbc41c2e/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index c5d8148..2feaf43 100755
--- a/pom.xml
+++ b/pom.xml
@@ -1500,7 +1500,7 @@
<asciidoctorj.pdf.version>1.5.0-alpha.15</asciidoctorj.pdf.version>
<build.helper.maven.version>3.0.0</build.helper.maven.version>
<buildnumber.maven.version>1.4</buildnumber.maven.version>
- <checkstyle.version>6.18</checkstyle.version>
+ <checkstyle.version>8.9</checkstyle.version>
<exec.maven.version>1.6.0</exec.maven.version>
<error-prone.version>2.2.0</error-prone.version>
<findbugs-annotations>1.3.9-1</findbugs-annotations>
@@ -1509,7 +1509,7 @@
<lifecycle.mapping.version>1.0.0</lifecycle.mapping.version>
<maven.antrun.version>1.8</maven.antrun.version>
<maven.bundle.version>3.3.0</maven.bundle.version>
- <maven.checkstyle.version>2.17</maven.checkstyle.version>
+ <maven.checkstyle.version>3.0.0</maven.checkstyle.version>
<maven.compiler.version>3.6.1</maven.compiler.version>
<maven.dependency.version>3.0.1</maven.dependency.version>
<maven.eclipse.version>2.10</maven.eclipse.version>
[11/14] hbase git commit: HBASE-20579 Improve snapshot manifest copy
in ExportSnapshot
Posted by md...@apache.org.
HBASE-20579 Improve snapshot manifest copy in ExportSnapshot
Signed-off-by: tedyu <yu...@gmail.com>
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c9f8c343
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c9f8c343
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c9f8c343
Branch: refs/heads/HBASE-20478
Commit: c9f8c3436f6e38b5c7807677c5c3e7fc3e19e071
Parents: 0836b07
Author: jingyuntian <ti...@gmail.com>
Authored: Thu May 17 11:32:49 2018 +0800
Committer: tedyu <yu...@gmail.com>
Committed: Fri May 18 06:42:12 2018 -0700
----------------------------------------------------------------------
.../hadoop/hbase/snapshot/ExportSnapshot.java | 82 +++++++++++++-------
.../org/apache/hadoop/hbase/util/FSUtils.java | 43 ++++++++++
.../apache/hadoop/hbase/util/TestFSUtils.java | 27 +++++++
3 files changed, 125 insertions(+), 27 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/c9f8c343/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
index ef67b7b..4af7dfb 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/snapshot/ExportSnapshot.java
@@ -29,6 +29,11 @@ import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.function.BiConsumer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
@@ -36,7 +41,6 @@ import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -109,6 +113,10 @@ public class ExportSnapshot extends AbstractHBaseTool implements Tool {
private static final String CONF_BANDWIDTH_MB = "snapshot.export.map.bandwidth.mb";
private static final String CONF_MR_JOB_NAME = "mapreduce.job.name";
protected static final String CONF_SKIP_TMP = "snapshot.export.skip.tmp";
+ private static final String CONF_COPY_MANIFEST_THREADS =
+ "snapshot.export.copy.references.threads";
+ private static final int DEFAULT_COPY_MANIFEST_THREADS =
+ Runtime.getRuntime().availableProcessors();
static class Testing {
static final String CONF_TEST_FAILURE = "test.snapshot.export.failure";
@@ -842,35 +850,52 @@ public class ExportSnapshot extends AbstractHBaseTool implements Tool {
SnapshotReferenceUtil.verifySnapshot(conf, fs, snapshotDir, snapshotDesc);
}
- /**
- * Set path ownership.
- */
- private void setOwner(final FileSystem fs, final Path path, final String user,
- final String group, final boolean recursive) throws IOException {
- if (user != null || group != null) {
- if (recursive && fs.isDirectory(path)) {
- for (FileStatus child : fs.listStatus(path)) {
- setOwner(fs, child.getPath(), user, group, recursive);
- }
+ private void setConfigParallel(FileSystem outputFs, List<Path> traversedPath,
+ BiConsumer<FileSystem, Path> task, Configuration conf) throws IOException {
+ ExecutorService pool = Executors
+ .newFixedThreadPool(conf.getInt(CONF_COPY_MANIFEST_THREADS, DEFAULT_COPY_MANIFEST_THREADS));
+ List<Future<Void>> futures = new ArrayList<>();
+ for (Path dstPath : traversedPath) {
+ Future<Void> future = (Future<Void>) pool.submit(() -> task.accept(outputFs, dstPath));
+ futures.add(future);
+ }
+ try {
+ for (Future<Void> future : futures) {
+ future.get();
}
- fs.setOwner(path, user, group);
+ } catch (InterruptedException | ExecutionException e) {
+ throw new IOException(e);
+ } finally {
+ pool.shutdownNow();
}
}
- /**
- * Set path permission.
- */
- private void setPermission(final FileSystem fs, final Path path, final short filesMode,
- final boolean recursive) throws IOException {
- if (filesMode > 0) {
- FsPermission perm = new FsPermission(filesMode);
- if (recursive && fs.isDirectory(path)) {
- for (FileStatus child : fs.listStatus(path)) {
- setPermission(fs, child.getPath(), filesMode, recursive);
- }
+ private void setOwnerParallel(FileSystem outputFs, String filesUser, String filesGroup,
+ Configuration conf, List<Path> traversedPath) throws IOException {
+ setConfigParallel(outputFs, traversedPath, (fs, path) -> {
+ try {
+ fs.setOwner(path, filesUser, filesGroup);
+ } catch (IOException e) {
+ throw new RuntimeException(
+ "set owner for file " + path + " to " + filesUser + ":" + filesGroup + " failed");
}
- fs.setPermission(path, perm);
+ }, conf);
+ }
+
+ private void setPermissionParallel(final FileSystem outputFs, final short filesMode,
+ final List<Path> traversedPath, final Configuration conf) throws IOException {
+ if (filesMode <= 0) {
+ return;
}
+ FsPermission perm = new FsPermission(filesMode);
+ setConfigParallel(outputFs, traversedPath, (fs, path) -> {
+ try {
+ fs.setPermission(path, perm);
+ } catch (IOException e) {
+ throw new RuntimeException(
+ "set permission for file " + path + " to " + filesMode + " failed");
+ }
+ }, conf);
}
private boolean verifyTarget = true;
@@ -1001,9 +1026,12 @@ public class ExportSnapshot extends AbstractHBaseTool implements Tool {
// Step 1 - Copy fs1:/.snapshot/<snapshot> to fs2:/.snapshot/.tmp/<snapshot>
// The snapshot references must be copied before the hfiles otherwise the cleaner
// will remove them because they are unreferenced.
+ List<Path> travesedPaths = new ArrayList<>();
try {
LOG.info("Copy Snapshot Manifest");
- FileUtil.copy(inputFs, snapshotDir, outputFs, initialOutputSnapshotDir, false, false, conf);
+ travesedPaths =
+ FSUtils.copyFilesParallel(inputFs, snapshotDir, outputFs, initialOutputSnapshotDir, conf,
+ conf.getInt(CONF_COPY_MANIFEST_THREADS, DEFAULT_COPY_MANIFEST_THREADS));
} catch (IOException e) {
throw new ExportSnapshotException("Failed to copy the snapshot directory: from=" +
snapshotDir + " to=" + initialOutputSnapshotDir, e);
@@ -1013,11 +1041,11 @@ public class ExportSnapshot extends AbstractHBaseTool implements Tool {
+ filesUser)
+ (filesGroup == null ? "" : ", Change the group of " + needSetOwnerDir + " to "
+ filesGroup));
- setOwner(outputFs, needSetOwnerDir, filesUser, filesGroup, true);
+ setOwnerParallel(outputFs, filesUser, filesGroup, conf, travesedPaths);
}
if (filesMode > 0) {
LOG.warn("Change the permission of " + needSetOwnerDir + " to " + filesMode);
- setPermission(outputFs, needSetOwnerDir, (short)filesMode, true);
+ setPermissionParallel(outputFs, (short)filesMode, travesedPaths, conf);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/c9f8c343/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
index b106a31..53db140 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
@@ -43,6 +43,7 @@ import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.FutureTask;
import java.util.concurrent.ThreadPoolExecutor;
@@ -54,6 +55,7 @@ import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.permission.FsAction;
@@ -1741,4 +1743,45 @@ public abstract class FSUtils extends CommonFSUtils {
}
}
+ public static List<Path> copyFilesParallel(FileSystem srcFS, Path src, FileSystem dstFS, Path dst,
+ Configuration conf, int threads) throws IOException {
+ ExecutorService pool = Executors.newFixedThreadPool(threads);
+ List<Future<Void>> futures = new ArrayList<>();
+ List<Path> traversedPaths;
+ try {
+ traversedPaths = copyFiles(srcFS, src, dstFS, dst, conf, pool, futures);
+ for (Future<Void> future : futures) {
+ future.get();
+ }
+ } catch (ExecutionException | InterruptedException | IOException e) {
+ throw new IOException("copy snapshot reference files failed", e);
+ } finally {
+ pool.shutdownNow();
+ }
+ return traversedPaths;
+ }
+
+ private static List<Path> copyFiles(FileSystem srcFS, Path src, FileSystem dstFS, Path dst,
+ Configuration conf, ExecutorService pool, List<Future<Void>> futures) throws IOException {
+ List<Path> traversedPaths = new ArrayList<>();
+ traversedPaths.add(dst);
+ FileStatus currentFileStatus = srcFS.getFileStatus(src);
+ if (currentFileStatus.isDirectory()) {
+ if (!dstFS.mkdirs(dst)) {
+ throw new IOException("create dir failed: " + dst);
+ }
+ FileStatus[] subPaths = srcFS.listStatus(src);
+ for (FileStatus subPath : subPaths) {
+ traversedPaths.addAll(copyFiles(srcFS, subPath.getPath(), dstFS,
+ new Path(dst, subPath.getPath().getName()), conf, pool, futures));
+ }
+ } else {
+ Future<Void> future = pool.submit(() -> {
+ FileUtil.copy(srcFS, src, dstFS, dst, false, false, conf);
+ return null;
+ });
+ futures.add(future);
+ }
+ return traversedPaths;
+ }
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/c9f8c343/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java
index 2718120..a862c8c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSUtils.java
@@ -26,6 +26,7 @@ import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
+import java.util.List;
import java.util.Random;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration;
@@ -412,6 +413,32 @@ public class TestFSUtils {
}
}
+
+ @Test
+ public void testCopyFilesParallel() throws Exception {
+ MiniDFSCluster cluster = htu.startMiniDFSCluster(1);
+ cluster.waitActive();
+ FileSystem fs = cluster.getFileSystem();
+ Path src = new Path("/src");
+ fs.mkdirs(src);
+ for (int i = 0; i < 50; i++) {
+ WriteDataToHDFS(fs, new Path(src, String.valueOf(i)), 1024);
+ }
+ Path sub = new Path(src, "sub");
+ fs.mkdirs(sub);
+ for (int i = 0; i < 50; i++) {
+ WriteDataToHDFS(fs, new Path(sub, String.valueOf(i)), 1024);
+ }
+ Path dst = new Path("/dst");
+ List<Path> allFiles = FSUtils.copyFilesParallel(fs, src, fs, dst, conf, 4);
+
+ assertEquals(102, allFiles.size());
+ FileStatus[] list = fs.listStatus(dst);
+ assertEquals(51, list.length);
+ FileStatus[] sublist = fs.listStatus(new Path(dst, "sub"));
+ assertEquals(50, sublist.length);
+ }
+
// Below is taken from TestPread over in HDFS.
static final int blockSize = 4096;
static final long seed = 0xDEADBEEFL;
[10/14] hbase git commit: HBASE-20583 SplitLogWorker should handle
FileNotFoundException when split a wal
Posted by md...@apache.org.
HBASE-20583 SplitLogWorker should handle FileNotFoundException when split a wal
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0836b071
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0836b071
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0836b071
Branch: refs/heads/HBASE-20478
Commit: 0836b0719a77a5a4c705d8ecfb135ae49ed1b934
Parents: f917f4e
Author: Guanghao Zhang <zg...@apache.org>
Authored: Wed May 16 07:52:32 2018 +0800
Committer: Guanghao Zhang <zg...@apache.org>
Committed: Fri May 18 14:29:41 2018 +0800
----------------------------------------------------------------------
.../org/apache/hadoop/hbase/regionserver/SplitLogWorker.java | 6 ++++++
1 file changed, 6 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/0836b071/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java
index 0046b67..a1c2030 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/SplitLogWorker.java
@@ -18,6 +18,7 @@
*/
package org.apache.hadoop.hbase.regionserver;
+import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.net.ConnectException;
@@ -105,6 +106,11 @@ public class SplitLogWorker implements Runnable {
LOG.warn("log splitting of " + filename + " interrupted, resigning", iioe);
return Status.RESIGNED;
} catch (IOException e) {
+ if (e instanceof FileNotFoundException) {
+ // A wal file may not exist anymore. Nothing can be recovered so move on
+ LOG.warn("WAL {} does not exist anymore", filename, e);
+ return Status.DONE;
+ }
Throwable cause = e.getCause();
if (e instanceof RetriesExhaustedException && (cause instanceof NotServingRegionException
|| cause instanceof ConnectException
[04/14] hbase git commit: HBASE-20564 Tighter ByteBufferKeyValue Cell
Comparator; ADDENDUM2 Add a Test
Posted by md...@apache.org.
HBASE-20564 Tighter ByteBufferKeyValue Cell Comparator; ADDENDUM2 Add a Test
Run meta tests but using our new basis, the ByteBufferKeyValue
instead of the old byte array-backed KeyValue so we catch any
oddness in the Type processing or in the Comparator.
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f4006b50
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f4006b50
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f4006b50
Branch: refs/heads/HBASE-20478
Commit: f4006b5039170ca1ac8adb63abbc111184e2e9ee
Parents: 6148b47
Author: Michael Stack <st...@apache.org>
Authored: Wed May 16 11:01:21 2018 -0700
Committer: Michael Stack <st...@apache.org>
Committed: Wed May 16 12:56:08 2018 -0700
----------------------------------------------------------------------
.../apache/hadoop/hbase/TestCellComparator.java | 119 +++++++++++++++++++
1 file changed, 119 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/f4006b50/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java
index 8652d82..a318515 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/TestCellComparator.java
@@ -21,6 +21,10 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.nio.ByteBuffer;
+import java.util.Collections;
+import java.util.Set;
+import java.util.TreeSet;
+
import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -128,4 +132,119 @@ public class TestCellComparator {
assertEquals(0, comparator.compareRows(bbCell2, bbCell3));
assertTrue(comparator.compareRows(bbCell1, bbCell2) < 0);
}
+
+ /**
+ * Test meta comparisons using our new ByteBufferKeyValue Cell type, the type we use everywhere
+ * in 2.0.
+ */
+ @Test
+ public void testMetaComparisons() throws Exception {
+ long now = System.currentTimeMillis();
+
+ // Meta compares
+ Cell aaa = createByteBufferKeyValueFromKeyValue(new KeyValue(
+ Bytes.toBytes("TestScanMultipleVersions,row_0500,1236020145502"), now));
+ Cell bbb = createByteBufferKeyValueFromKeyValue(new KeyValue(
+ Bytes.toBytes("TestScanMultipleVersions,,99999999999999"), now));
+ CellComparator c = CellComparatorImpl.META_COMPARATOR;
+ assertTrue(c.compare(bbb, aaa) < 0);
+
+ Cell ccc = createByteBufferKeyValueFromKeyValue(
+ new KeyValue(Bytes.toBytes("TestScanMultipleVersions,,1236023996656"),
+ Bytes.toBytes("info"), Bytes.toBytes("regioninfo"), 1236024396271L,
+ (byte[])null));
+ assertTrue(c.compare(ccc, bbb) < 0);
+
+ Cell x = createByteBufferKeyValueFromKeyValue(
+ new KeyValue(Bytes.toBytes("TestScanMultipleVersions,row_0500,1236034574162"),
+ Bytes.toBytes("info"), Bytes.toBytes(""), 9223372036854775807L,
+ (byte[])null));
+ Cell y = createByteBufferKeyValueFromKeyValue(
+ new KeyValue(Bytes.toBytes("TestScanMultipleVersions,row_0500,1236034574162"),
+ Bytes.toBytes("info"), Bytes.toBytes("regioninfo"), 1236034574912L,
+ (byte[])null));
+ assertTrue(c.compare(x, y) < 0);
+ }
+
+ private static Cell createByteBufferKeyValueFromKeyValue(KeyValue kv) {
+ ByteBuffer bb = ByteBuffer.wrap(kv.getBuffer());
+ return new ByteBufferKeyValue(bb, 0, bb.remaining());
+ }
+
+ /**
+ * More tests using ByteBufferKeyValue copied over from TestKeyValue which uses old KVs only.
+ */
+ @Test
+ public void testMetaComparisons2() {
+ long now = System.currentTimeMillis();
+ CellComparator c = CellComparatorImpl.META_COMPARATOR;
+ assertTrue(c.compare(createByteBufferKeyValueFromKeyValue(new KeyValue(
+ Bytes.toBytes(TableName.META_TABLE_NAME.getNameAsString()+",a,,0,1"), now)),
+ createByteBufferKeyValueFromKeyValue(new KeyValue(
+ Bytes.toBytes(TableName.META_TABLE_NAME.getNameAsString()+",a,,0,1"), now))) == 0);
+ Cell a = createByteBufferKeyValueFromKeyValue(new KeyValue(
+ Bytes.toBytes(TableName.META_TABLE_NAME.getNameAsString()+",a,,0,1"), now));
+ Cell b = createByteBufferKeyValueFromKeyValue(new KeyValue(
+ Bytes.toBytes(TableName.META_TABLE_NAME.getNameAsString()+",a,,0,2"), now));
+ assertTrue(c.compare(a, b) < 0);
+ assertTrue(c.compare(createByteBufferKeyValueFromKeyValue(new KeyValue(
+ Bytes.toBytes(TableName.META_TABLE_NAME.getNameAsString()+",a,,0,2"), now)),
+ createByteBufferKeyValueFromKeyValue(new KeyValue(
+ Bytes.toBytes(TableName.META_TABLE_NAME.getNameAsString()+",a,,0,1"), now))) > 0);
+ assertTrue(c.compare(createByteBufferKeyValueFromKeyValue(new KeyValue(
+ Bytes.toBytes(TableName.META_TABLE_NAME.getNameAsString()+",,1"), now)),
+ createByteBufferKeyValueFromKeyValue(new KeyValue(
+ Bytes.toBytes(TableName.META_TABLE_NAME.getNameAsString()+",,1"), now))) == 0);
+ assertTrue(c.compare(createByteBufferKeyValueFromKeyValue(new KeyValue(
+ Bytes.toBytes(TableName.META_TABLE_NAME.getNameAsString()+",,1"), now)),
+ createByteBufferKeyValueFromKeyValue(new KeyValue(
+ Bytes.toBytes(TableName.META_TABLE_NAME.getNameAsString()+",,2"), now))) < 0);
+ assertTrue(c.compare(createByteBufferKeyValueFromKeyValue(new KeyValue(
+ Bytes.toBytes(TableName.META_TABLE_NAME.getNameAsString()+",,2"), now)),
+ createByteBufferKeyValueFromKeyValue(new KeyValue(
+ Bytes.toBytes(TableName.META_TABLE_NAME.getNameAsString()+",,1"), now))) > 0);
+ }
+
+ @Test
+ public void testBinaryKeys() throws Exception {
+ Set<Cell> set = new TreeSet<>(CellComparatorImpl.COMPARATOR);
+ final byte [] fam = Bytes.toBytes("col");
+ final byte [] qf = Bytes.toBytes("umn");
+ final byte [] nb = new byte[0];
+ Cell [] keys = {
+ createByteBufferKeyValueFromKeyValue(
+ new KeyValue(Bytes.toBytes("aaaaa,\u0000\u0000,2"), fam, qf, 2, nb)),
+ createByteBufferKeyValueFromKeyValue(
+ new KeyValue(Bytes.toBytes("aaaaa,\u0001,3"), fam, qf, 3, nb)),
+ createByteBufferKeyValueFromKeyValue(
+ new KeyValue(Bytes.toBytes("aaaaa,,1"), fam, qf, 1, nb)),
+ createByteBufferKeyValueFromKeyValue(
+ new KeyValue(Bytes.toBytes("aaaaa,\u1000,5"), fam, qf, 5, nb)),
+ createByteBufferKeyValueFromKeyValue(
+ new KeyValue(Bytes.toBytes("aaaaa,a,4"), fam, qf, 4, nb)),
+ createByteBufferKeyValueFromKeyValue(
+ new KeyValue(Bytes.toBytes("a,a,0"), fam, qf, 0, nb)),
+ };
+ // Add to set with bad comparator
+ Collections.addAll(set, keys);
+ // This will output the keys incorrectly.
+ boolean assertion = false;
+ int count = 0;
+ try {
+ for (Cell k: set) {
+ assertTrue("count=" + count + ", " + k.toString(), count++ == k.getTimestamp());
+ }
+ } catch (AssertionError e) {
+ // Expected
+ assertion = true;
+ }
+ assertTrue(assertion);
+ // Make set with good comparator
+ set = new TreeSet<>(CellComparatorImpl.META_COMPARATOR);
+ Collections.addAll(set, keys);
+ count = 0;
+ for (Cell k: set) {
+ assertTrue("count=" + count + ", " + k.toString(), count++ == k.getTimestamp());
+ }
+ }
}
[14/14] hbase git commit: HBASE-20478 try disabling import separation
check
Posted by md...@apache.org.
HBASE-20478 try disabling import separation check
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/484e241b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/484e241b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/484e241b
Branch: refs/heads/HBASE-20478
Commit: 484e241bcf2997a0721a82ab607e27b147f5044b
Parents: fbc41c2
Author: Mike Drob <md...@apache.org>
Authored: Wed May 23 13:25:34 2018 -0500
Committer: Mike Drob <md...@apache.org>
Committed: Wed May 23 13:25:34 2018 -0500
----------------------------------------------------------------------
hbase-checkstyle/src/main/resources/hbase/checkstyle.xml | 1 +
1 file changed, 1 insertion(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/484e241b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
----------------------------------------------------------------------
diff --git a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
index 5b6f41f..fb4eb49 100644
--- a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
+++ b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml
@@ -69,6 +69,7 @@
<module name="ImportOrder">
<property name="groups" value="*,org.apache.hbase.thirdparty,org.apache.hadoop.hbase.shaded"/>
<property name="option" value="top" />
+ <property name="separated" value="false"/>
<property name="ordered" value="true"/>
<property name="sortStaticImportsAlphabetically" value="true"/>
</module>
[08/14] hbase git commit: HBASE-20582 Upgrade JRuby to 9.1.17.0
Posted by md...@apache.org.
HBASE-20582 Upgrade JRuby to 9.1.17.0
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/dab0e90c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/dab0e90c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/dab0e90c
Branch: refs/heads/HBASE-20478
Commit: dab0e90c54c6909338638ad8addafda2584bb5bd
Parents: cf529f1
Author: Josh Elser <el...@apache.org>
Authored: Wed May 16 13:07:21 2018 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Thu May 17 10:06:06 2018 -0400
----------------------------------------------------------------------
pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/dab0e90c/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 7aede4a..c5d8148 100755
--- a/pom.xml
+++ b/pom.xml
@@ -1467,7 +1467,7 @@
<glassfish.jsp.version>2.3.2</glassfish.jsp.version>
<glassfish.el.version>3.0.1-b08</glassfish.el.version>
<jetty.jspapi.version>6.1.14</jetty.jspapi.version>
- <jruby.version>9.1.10.0</jruby.version>
+ <jruby.version>9.1.17.0</jruby.version>
<junit.version>4.12</junit.version>
<hamcrest.version>1.3</hamcrest.version>
<htrace.version>4.2.0-incubating</htrace.version>
[02/14] hbase git commit: HBASE-20567 Pass both old and new
descriptors to pre/post hooks of modify operations for table and namespace.
Posted by md...@apache.org.
HBASE-20567 Pass both old and new descriptors to pre/post hooks of modify operations for table and namespace.
Signed-off-by: Mike Drob <md...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8c9825a0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8c9825a0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8c9825a0
Branch: refs/heads/HBASE-20478
Commit: 8c9825a030ace256343dc6669b6edec22e6f75fd
Parents: 2c32272
Author: Apekshit Sharma <ap...@apache.org>
Authored: Thu May 10 20:34:14 2018 -0700
Committer: Mike Drob <md...@apache.org>
Committed: Wed May 16 14:03:18 2018 -0500
----------------------------------------------------------------------
.../hbase/coprocessor/MasterObserver.java | 122 +++++++++++++++++--
.../org/apache/hadoop/hbase/master/HMaster.java | 29 +++--
.../hbase/master/MasterCoprocessorHost.java | 36 +++---
.../master/procedure/ModifyTableProcedure.java | 6 +-
4 files changed, 151 insertions(+), 42 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/8c9825a0/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
index a17bc9f..a37f21a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/MasterObserver.java
@@ -209,20 +209,67 @@ public interface MasterObserver {
* table RPC call.
* @param ctx the environment to interact with the framework and master
* @param tableName the name of the table
- * @param htd the TableDescriptor
+ * @param newDescriptor after modify operation, table will have this descriptor
+ * @deprecated Since 2.1. Will be removed in 3.0.
*/
+ @Deprecated
default void preModifyTable(final ObserverContext<MasterCoprocessorEnvironment> ctx,
- final TableName tableName, TableDescriptor htd) throws IOException {}
+ final TableName tableName, TableDescriptor newDescriptor) throws IOException {}
+
+ /**
+ * Called prior to modifying a table's properties. Called as part of modify
+ * table RPC call.
+ * @param ctx the environment to interact with the framework and master
+ * @param tableName the name of the table
+ * @param currentDescriptor current TableDescriptor of the table
+ * @param newDescriptor after modify operation, table will have this descriptor
+ */
+ default void preModifyTable(final ObserverContext<MasterCoprocessorEnvironment> ctx,
+ final TableName tableName, TableDescriptor currentDescriptor, TableDescriptor newDescriptor)
+ throws IOException {
+ preModifyTable(ctx, tableName, newDescriptor);
+ }
/**
* Called after the modifyTable operation has been requested. Called as part
* of modify table RPC call.
* @param ctx the environment to interact with the framework and master
* @param tableName the name of the table
- * @param htd the TableDescriptor
+ * @param currentDescriptor current TableDescriptor of the table
+ * @deprecated Since 2.1. Will be removed in 3.0.
*/
+ @Deprecated
default void postModifyTable(final ObserverContext<MasterCoprocessorEnvironment> ctx,
- final TableName tableName, TableDescriptor htd) throws IOException {}
+ final TableName tableName, TableDescriptor currentDescriptor) throws IOException {}
+
+ /**
+ * Called after the modifyTable operation has been requested. Called as part
+ * of modify table RPC call.
+ * @param ctx the environment to interact with the framework and master
+ * @param tableName the name of the table
+ * @param oldDescriptor descriptor of table before modify operation happened
+ * @param currentDescriptor current TableDescriptor of the table
+ */
+ default void postModifyTable(final ObserverContext<MasterCoprocessorEnvironment> ctx,
+ final TableName tableName, TableDescriptor oldDescriptor, TableDescriptor currentDescriptor)
+ throws IOException {
+ postModifyTable(ctx, tableName, currentDescriptor);
+ }
+
+ /**
+ * Called prior to modifying a table's properties. Called as part of modify
+ * table procedure and it is async to the modify table RPC call.
+ *
+ * @param ctx the environment to interact with the framework and master
+ * @param tableName the name of the table
+ * @param newDescriptor after modify operation, table will have this descriptor
+ * @deprecated Since 2.1. Will be removed in 3.0.
+ */
+ @Deprecated
+ default void preModifyTableAction(
+ final ObserverContext<MasterCoprocessorEnvironment> ctx,
+ final TableName tableName,
+ final TableDescriptor newDescriptor) throws IOException {}
/**
* Called prior to modifying a table's properties. Called as part of modify
@@ -230,12 +277,16 @@ public interface MasterObserver {
*
* @param ctx the environment to interact with the framework and master
* @param tableName the name of the table
- * @param htd the TableDescriptor
+ * @param currentDescriptor current TableDescriptor of the table
+ * @param newDescriptor after modify operation, table will have this descriptor
*/
default void preModifyTableAction(
final ObserverContext<MasterCoprocessorEnvironment> ctx,
final TableName tableName,
- final TableDescriptor htd) throws IOException {}
+ final TableDescriptor currentDescriptor,
+ final TableDescriptor newDescriptor) throws IOException {
+ preModifyTableAction(ctx, tableName, newDescriptor);
+ }
/**
* Called after to modifying a table's properties. Called as part of modify
@@ -243,12 +294,31 @@ public interface MasterObserver {
*
* @param ctx the environment to interact with the framework and master
* @param tableName the name of the table
- * @param htd the TableDescriptor
+ * @param currentDescriptor current TableDescriptor of the table
+ * @deprecated Since 2.1. Will be removed in 3.0.
+ */
+ @Deprecated
+ default void postCompletedModifyTableAction(
+ final ObserverContext<MasterCoprocessorEnvironment> ctx,
+ final TableName tableName,
+ final TableDescriptor currentDescriptor) throws IOException {}
+
+ /**
+ * Called after to modifying a table's properties. Called as part of modify
+ * table procedure and it is async to the modify table RPC call.
+ *
+ * @param ctx the environment to interact with the framework and master
+ * @param tableName the name of the table
+ * @param oldDescriptor descriptor of table before modify operation happened
+ * @param currentDescriptor current TableDescriptor of the table
*/
default void postCompletedModifyTableAction(
final ObserverContext<MasterCoprocessorEnvironment> ctx,
final TableName tableName,
- final TableDescriptor htd) throws IOException {}
+ final TableDescriptor oldDescriptor,
+ final TableDescriptor currentDescriptor) throws IOException {
+ postCompletedModifyTableAction(ctx, tableName, currentDescriptor);
+ }
/**
* Called prior to enabling a table. Called as part of enable table RPC call.
@@ -817,18 +887,46 @@ public interface MasterObserver {
/**
* Called prior to modifying a namespace's properties.
* @param ctx the environment to interact with the framework and master
- * @param ns the NamespaceDescriptor
+ * @param newNsDescriptor after modify operation, namespace will have this descriptor
+ * @deprecated Since 2.1. Will be removed in 3.0.
*/
+ @Deprecated
default void preModifyNamespace(final ObserverContext<MasterCoprocessorEnvironment> ctx,
- NamespaceDescriptor ns) throws IOException {}
+ NamespaceDescriptor newNsDescriptor) throws IOException {}
+
+ /**
+ * Called prior to modifying a namespace's properties.
+ * @param ctx the environment to interact with the framework and master
+ * @param currentNsDescriptor current NamespaceDescriptor of the namespace
+ * @param newNsDescriptor after modify operation, namespace will have this descriptor
+ */
+ default void preModifyNamespace(final ObserverContext<MasterCoprocessorEnvironment> ctx,
+ NamespaceDescriptor currentNsDescriptor, NamespaceDescriptor newNsDescriptor)
+ throws IOException {
+ preModifyNamespace(ctx, newNsDescriptor);
+ }
/**
* Called after the modifyNamespace operation has been requested.
* @param ctx the environment to interact with the framework and master
- * @param ns the NamespaceDescriptor
+ * @param currentNsDescriptor current NamespaceDescriptor of the namespace
+ * @deprecated Since 2.1. Will be removed in 3.0.
*/
+ @Deprecated
default void postModifyNamespace(final ObserverContext<MasterCoprocessorEnvironment> ctx,
- NamespaceDescriptor ns) throws IOException {}
+ NamespaceDescriptor currentNsDescriptor) throws IOException {}
+
+ /**
+ * Called after the modifyNamespace operation has been requested.
+ * @param ctx the environment to interact with the framework and master
+ * @param oldNsDescriptor descriptor of namespace before modify operation happened
+ * @param currentNsDescriptor current NamespaceDescriptor of the namespace
+ */
+ default void postModifyNamespace(final ObserverContext<MasterCoprocessorEnvironment> ctx,
+ NamespaceDescriptor oldNsDescriptor, NamespaceDescriptor currentNsDescriptor)
+ throws IOException {
+ postModifyNamespace(ctx, currentNsDescriptor);
+ }
/**
* Called before a getNamespaceDescriptor request has been processed.
http://git-wip-us.apache.org/repos/asf/hbase/blob/8c9825a0/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 9dd685d..6c41b8e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -2362,16 +2362,18 @@ public class HMaster extends HRegionServer implements MasterServices {
}
@Override
- public long modifyTable(final TableName tableName, final TableDescriptor descriptor,
+ public long modifyTable(final TableName tableName, final TableDescriptor newDescriptor,
final long nonceGroup, final long nonce) throws IOException {
checkInitialized();
- sanityCheckTableDescriptor(descriptor);
+ sanityCheckTableDescriptor(newDescriptor);
return MasterProcedureUtil.submitProcedure(
new MasterProcedureUtil.NonceProcedureRunnable(this, nonceGroup, nonce) {
@Override
protected void run() throws IOException {
- getMaster().getMasterCoprocessorHost().preModifyTable(tableName, descriptor);
+ TableDescriptor oldDescriptor = getMaster().getTableDescriptors().get(tableName);
+ getMaster().getMasterCoprocessorHost()
+ .preModifyTable(tableName, oldDescriptor, newDescriptor);
LOG.info(getClientIdAuditPrefix() + " modify " + tableName);
@@ -2380,11 +2382,12 @@ public class HMaster extends HRegionServer implements MasterServices {
// We need to wait for the procedure to potentially fail due to "prepare" sanity
// checks. This will block only the beginning of the procedure. See HBASE-19953.
ProcedurePrepareLatch latch = ProcedurePrepareLatch.createBlockingLatch();
- submitProcedure(new ModifyTableProcedure(procedureExecutor.getEnvironment(),
- descriptor, latch));
+ submitProcedure(
+ new ModifyTableProcedure(procedureExecutor.getEnvironment(), newDescriptor, latch));
latch.await();
- getMaster().getMasterCoprocessorHost().postModifyTable(tableName, descriptor);
+ getMaster().getMasterCoprocessorHost()
+ .postModifyTable(tableName, oldDescriptor, newDescriptor);
}
@Override
@@ -2997,26 +3000,28 @@ public class HMaster extends HRegionServer implements MasterServices {
* <code>nonceGroup</code> (the source must ensure each operation gets a unique id).
* @return procedure id
*/
- long modifyNamespace(final NamespaceDescriptor namespaceDescriptor, final long nonceGroup,
+ long modifyNamespace(final NamespaceDescriptor newNsDescriptor, final long nonceGroup,
final long nonce) throws IOException {
checkInitialized();
- TableName.isLegalNamespaceName(Bytes.toBytes(namespaceDescriptor.getName()));
+ TableName.isLegalNamespaceName(Bytes.toBytes(newNsDescriptor.getName()));
return MasterProcedureUtil.submitProcedure(new MasterProcedureUtil.NonceProcedureRunnable(this,
nonceGroup, nonce) {
@Override
protected void run() throws IOException {
- getMaster().getMasterCoprocessorHost().preModifyNamespace(namespaceDescriptor);
+ NamespaceDescriptor oldNsDescriptor = getNamespace(newNsDescriptor.getName());
+ getMaster().getMasterCoprocessorHost().preModifyNamespace(oldNsDescriptor, newNsDescriptor);
// We need to wait for the procedure to potentially fail due to "prepare" sanity
// checks. This will block only the beginning of the procedure. See HBASE-19953.
ProcedurePrepareLatch latch = ProcedurePrepareLatch.createBlockingLatch();
- LOG.info(getClientIdAuditPrefix() + " modify " + namespaceDescriptor);
+ LOG.info(getClientIdAuditPrefix() + " modify " + newNsDescriptor);
// Execute the operation synchronously - wait for the operation to complete before
// continuing.
- setProcId(getClusterSchema().modifyNamespace(namespaceDescriptor, getNonceKey(), latch));
+ setProcId(getClusterSchema().modifyNamespace(newNsDescriptor, getNonceKey(), latch));
latch.await();
- getMaster().getMasterCoprocessorHost().postModifyNamespace(namespaceDescriptor);
+ getMaster().getMasterCoprocessorHost().postModifyNamespace(oldNsDescriptor,
+ newNsDescriptor);
}
@Override
http://git-wip-us.apache.org/repos/asf/hbase/blob/8c9825a0/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
index 8c8c02c..072ae8a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterCoprocessorHost.java
@@ -254,20 +254,22 @@ public class MasterCoprocessorHost
});
}
- public void preModifyNamespace(final NamespaceDescriptor ns) throws IOException {
+ public void preModifyNamespace(final NamespaceDescriptor currentNsDescriptor,
+ final NamespaceDescriptor newNsDescriptor) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
- observer.preModifyNamespace(this, ns);
+ observer.preModifyNamespace(this, currentNsDescriptor, newNsDescriptor);
}
});
}
- public void postModifyNamespace(final NamespaceDescriptor ns) throws IOException {
+ public void postModifyNamespace(final NamespaceDescriptor oldNsDescriptor,
+ final NamespaceDescriptor currentNsDescriptor) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
- observer.postModifyNamespace(this, ns);
+ observer.postModifyNamespace(this, oldNsDescriptor, currentNsDescriptor);
}
});
}
@@ -429,42 +431,44 @@ public class MasterCoprocessorHost
});
}
- public void preModifyTable(final TableName tableName, final TableDescriptor htd)
- throws IOException {
+ public void preModifyTable(final TableName tableName, final TableDescriptor currentDescriptor,
+ final TableDescriptor newDescriptor) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
- observer.preModifyTable(this, tableName, htd);
+ observer.preModifyTable(this, tableName, currentDescriptor, newDescriptor);
}
});
}
- public void postModifyTable(final TableName tableName, final TableDescriptor htd)
- throws IOException {
+ public void postModifyTable(final TableName tableName, final TableDescriptor oldDescriptor,
+ final TableDescriptor currentDescriptor) throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation() {
@Override
public void call(MasterObserver observer) throws IOException {
- observer.postModifyTable(this, tableName, htd);
+ observer.postModifyTable(this, tableName, oldDescriptor, currentDescriptor);
}
});
}
- public void preModifyTableAction(final TableName tableName, final TableDescriptor htd,
- final User user) throws IOException {
+ public void preModifyTableAction(final TableName tableName,
+ final TableDescriptor currentDescriptor, final TableDescriptor newDescriptor, final User user)
+ throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
- observer.preModifyTableAction(this, tableName, htd);
+ observer.preModifyTableAction(this, tableName, currentDescriptor, newDescriptor);
}
});
}
- public void postCompletedModifyTableAction(final TableName tableName, final TableDescriptor htd,
- final User user) throws IOException {
+ public void postCompletedModifyTableAction(final TableName tableName,
+ final TableDescriptor oldDescriptor, final TableDescriptor currentDescriptor, final User user)
+ throws IOException {
execOperation(coprocEnvironments.isEmpty() ? null : new MasterObserverOperation(user) {
@Override
public void call(MasterObserver observer) throws IOException {
- observer.postCompletedModifyTableAction(this, tableName, htd);
+ observer.postCompletedModifyTableAction(this, tableName, oldDescriptor, currentDescriptor);
}
});
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/8c9825a0/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
index 1f1ba3c..6fb9caa 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyTableProcedure.java
@@ -420,10 +420,12 @@ public class ModifyTableProcedure
if (cpHost != null) {
switch (state) {
case MODIFY_TABLE_PRE_OPERATION:
- cpHost.preModifyTableAction(getTableName(), modifiedTableDescriptor, getUser());
+ cpHost.preModifyTableAction(getTableName(), unmodifiedTableDescriptor,
+ modifiedTableDescriptor, getUser());
break;
case MODIFY_TABLE_POST_OPERATION:
- cpHost.postCompletedModifyTableAction(getTableName(), modifiedTableDescriptor,getUser());
+ cpHost.postCompletedModifyTableAction(getTableName(), unmodifiedTableDescriptor,
+ modifiedTableDescriptor,getUser());
break;
default:
throw new UnsupportedOperationException(this + " unhandled state=" + state);
[03/14] hbase git commit: HBASE-20571 JMXJsonServlet generates
invalid JSON if it has NaN in metrics
Posted by md...@apache.org.
HBASE-20571 JMXJsonServlet generates invalid JSON if it has NaN in metrics
- CacheStats won't generate NaN metrics.
- JSONBean class will serialize special floating point values as
"NaN", "Infinity" or "-Infinity"
Signed-off-by: Andrew Purtell <ap...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6148b478
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6148b478
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6148b478
Branch: refs/heads/HBASE-20478
Commit: 6148b4785d5fb9b1f8fbe40e5c4293950ec03012
Parents: 8c9825a
Author: Balazs Meszaros <ba...@cloudera.com>
Authored: Fri May 11 16:30:38 2018 +0200
Committer: Andrew Purtell <ap...@apache.org>
Committed: Wed May 16 12:20:39 2018 -0700
----------------------------------------------------------------------
.../org/apache/hadoop/hbase/util/JSONBean.java | 6 ++-
.../hadoop/hbase/io/hfile/CacheStats.java | 40 +++++++++++++++++---
2 files changed, 40 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/6148b478/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
index 80ffa27..da89a41 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JSONBean.java
@@ -310,7 +310,11 @@ public class JSONBean {
jg.writeEndArray();
} else if(value instanceof Number) {
Number n = (Number)value;
- jg.writeNumber(n.toString());
+ if (Double.isFinite(n.doubleValue())) {
+ jg.writeNumber(n.toString());
+ } else {
+ jg.writeString(n.toString());
+ }
} else if(value instanceof Boolean) {
Boolean b = (Boolean)value;
jg.writeBoolean(b);
http://git-wip-us.apache.org/repos/asf/hbase/blob/6148b478/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java
index 5edd259..c1c92e1 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheStats.java
@@ -388,23 +388,53 @@ public class CacheStats {
}
public double getHitRatio() {
- return ((double) getHitCount() / (double) getRequestCount());
+ double requestCount = getRequestCount();
+
+ if (requestCount == 0) {
+ return 0;
+ }
+
+ return getHitCount() / requestCount;
}
public double getHitCachingRatio() {
- return ((double) getHitCachingCount() / (double) getRequestCachingCount());
+ double requestCachingCount = getRequestCachingCount();
+
+ if (requestCachingCount == 0) {
+ return 0;
+ }
+
+ return getHitCachingCount() / requestCachingCount;
}
public double getMissRatio() {
- return ((double) getMissCount() / (double) getRequestCount());
+ double requestCount = getRequestCount();
+
+ if (requestCount == 0) {
+ return 0;
+ }
+
+ return getMissCount() / requestCount;
}
public double getMissCachingRatio() {
- return ((double) getMissCachingCount() / (double) getRequestCachingCount());
+ double requestCachingCount = getRequestCachingCount();
+
+ if (requestCachingCount == 0) {
+ return 0;
+ }
+
+ return getMissCachingCount() / requestCachingCount;
}
public double evictedPerEviction() {
- return ((double) getEvictedCount() / (double) getEvictionCount());
+ double evictionCount = getEvictionCount();
+
+ if (evictionCount == 0) {
+ return 0;
+ }
+
+ return getEvictedCount() / evictionCount;
}
public long getFailedInserts() {
[12/14] hbase git commit: HBASE-20609 SnapshotHFileCleaner#init
should check that params is not null
Posted by md...@apache.org.
HBASE-20609 SnapshotHFileCleaner#init should check that params is not null
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6c1097e9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6c1097e9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6c1097e9
Branch: refs/heads/HBASE-20478
Commit: 6c1097e92fecbf8ef81f6e5d3217967abe94fb45
Parents: c9f8c34
Author: tedyu <yu...@gmail.com>
Authored: Mon May 21 18:36:38 2018 -0700
Committer: tedyu <yu...@gmail.com>
Committed: Mon May 21 18:36:38 2018 -0700
----------------------------------------------------------------------
.../apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java | 2 +-
.../java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java | 1 +
2 files changed, 2 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/6c1097e9/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java
index 559863e..7c866c2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/snapshot/SnapshotHFileCleaner.java
@@ -76,7 +76,7 @@ public class SnapshotHFileCleaner extends BaseHFileCleanerDelegate {
@Override
public void init(Map<String, Object> params) {
- if (params.containsKey(HMaster.MASTER)) {
+ if (params != null && params.containsKey(HMaster.MASTER)) {
this.master = (MasterServices) params.get(HMaster.MASTER);
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/6c1097e9/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
index 578724f..8666126 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java
@@ -375,6 +375,7 @@ public class TestHFileArchiving {
// The cleaner should be looping without long pauses to reproduce the race condition.
HFileCleaner cleaner = new HFileCleaner(1, stoppable, conf, fs, archiveDir);
+ assertFalse("cleaner should not be null", cleaner == null);
try {
choreService.scheduleChore(cleaner);
[05/14] hbase git commit: HBASE-20530 Composition of backup directory
containing namespace when restoring is different from the actual hfile
location
Posted by md...@apache.org.
HBASE-20530 Composition of backup directory containing namespace when restoring is different from the actual hfile location
Signed-off-by: tedyu <yu...@gmail.com>
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/acbc3a22
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/acbc3a22
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/acbc3a22
Branch: refs/heads/HBASE-20478
Commit: acbc3a225338fd1ff82226ebbd937f7b15ef5b60
Parents: f4006b5
Author: Vladimir Rodionov <vr...@hortonworks.com>
Authored: Thu May 10 13:50:31 2018 -0700
Committer: tedyu <yu...@gmail.com>
Committed: Wed May 16 14:21:20 2018 -0700
----------------------------------------------------------------------
.../org/apache/hadoop/hbase/backup/TestBackupBase.java | 6 +++---
.../hadoop/hbase/backup/TestIncrementalBackup.java | 4 ++--
.../hadoop/hbase/mapreduce/HFileOutputFormat2.java | 9 ++++++++-
.../hadoop/hbase/mapreduce/TestHFileOutputFormat2.java | 11 ++++++++---
4 files changed, 21 insertions(+), 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/acbc3a22/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java
----------------------------------------------------------------------
diff --git a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java
index 4243f5b..08ecd63 100644
--- a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java
+++ b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupBase.java
@@ -88,8 +88,8 @@ public class TestBackupBase {
protected static TableName table3 = TableName.valueOf("table3");
protected static TableName table4 = TableName.valueOf("table4");
- protected static TableName table1_restore = TableName.valueOf("ns1:table1_restore");
- protected static TableName table2_restore = TableName.valueOf("ns2:table2_restore");
+ protected static TableName table1_restore = TableName.valueOf("default:table1");
+ protected static TableName table2_restore = TableName.valueOf("ns2:table2");
protected static TableName table3_restore = TableName.valueOf("ns3:table3_restore");
protected static TableName table4_restore = TableName.valueOf("ns4:table4_restore");
@@ -404,7 +404,7 @@ public class TestBackupBase {
protected static void createTables() throws Exception {
long tid = System.currentTimeMillis();
- table1 = TableName.valueOf("ns1:test-" + tid);
+ table1 = TableName.valueOf("test-" + tid);
HBaseAdmin ha = TEST_UTIL.getHBaseAdmin();
// Create namespaces
http://git-wip-us.apache.org/repos/asf/hbase/blob/acbc3a22/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackup.java
----------------------------------------------------------------------
diff --git a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackup.java b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackup.java
index 0bce769..b74f42f 100644
--- a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackup.java
+++ b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackup.java
@@ -163,14 +163,14 @@ public class TestIncrementalBackup extends TestBackupBase {
String backupIdIncMultiple2 = client.backupTables(request);
assertTrue(checkSucceeded(backupIdIncMultiple2));
- // #4 - restore full backup for all tables, without overwrite
+ // #4 - restore full backup for all tables
TableName[] tablesRestoreFull = new TableName[] { table1, table2 };
TableName[] tablesMapFull = new TableName[] { table1_restore, table2_restore };
LOG.debug("Restoring full " + backupIdFull);
client.restore(BackupUtils.createRestoreRequest(BACKUP_ROOT_DIR, backupIdFull, false,
- tablesRestoreFull, tablesMapFull, false));
+ tablesRestoreFull, tablesMapFull, true));
// #5.1 - check tables for full restore
HBaseAdmin hAdmin = TEST_UTIL.getHBaseAdmin();
http://git-wip-us.apache.org/repos/asf/hbase/blob/acbc3a22/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
index 3b04c0b..a403455 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
@@ -27,6 +27,7 @@ import java.io.UnsupportedEncodingException;
import java.net.InetSocketAddress;
import java.net.URLDecoder;
import java.net.URLEncoder;
+import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -251,6 +252,9 @@ public class HFileOutputFormat2
byte[] tableNameBytes = null;
if (writeMultipleTables) {
tableNameBytes = MultiTableHFileOutputFormat.getTableName(row.get());
+ tableNameBytes =
+ TableName.valueOf(tableNameBytes).getNameWithNamespaceInclAsString()
+ .getBytes(Charset.defaultCharset());
if (!allTableNames.contains(Bytes.toString(tableNameBytes))) {
throw new IllegalArgumentException("TableName '" + Bytes.toString(tableNameBytes) +
"' not" + " expected");
@@ -639,7 +643,10 @@ public class HFileOutputFormat2
for( TableInfo tableInfo : multiTableInfo )
{
regionLocators.add(tableInfo.getRegionLocator());
- allTableNames.add(tableInfo.getRegionLocator().getName().getNameAsString());
+ String tn = writeMultipleTables?
+ tableInfo.getRegionLocator().getName().getNameWithNamespaceInclAsString():
+ tableInfo.getRegionLocator().getName().getNameAsString();
+ allTableNames.add(tn);
tableDescriptors.add(tableInfo.getTableDescriptor());
}
// Record tablenames for creating writer by favored nodes, and decoding compression, block size and other attributes of columnfamily per table
http://git-wip-us.apache.org/repos/asf/hbase/blob/acbc3a22/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index 710a94c..09444ac 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -627,15 +627,19 @@ public class TestHFileOutputFormat2 {
Path testDir = util.getDataTestDirOnTestFS("testLocalMRIncrementalLoad");
// Generate the bulk load files
runIncrementalPELoad(conf, tableInfo, testDir, putSortReducer);
+ if (writeMultipleTables) {
+ testDir = new Path(testDir, "default");
+ }
for (Table tableSingle : allTables.values()) {
// This doesn't write into the table, just makes files
assertEquals("HFOF should not touch actual table", 0, util.countRows(tableSingle));
}
int numTableDirs = 0;
- for (FileStatus tf : testDir.getFileSystem(conf).listStatus(testDir)) {
+ FileStatus[] fss =
+ testDir.getFileSystem(conf).listStatus(testDir);
+ for (FileStatus tf: fss) {
Path tablePath = testDir;
-
if (writeMultipleTables) {
if (allTables.containsKey(tf.getPath().getName())) {
++numTableDirs;
@@ -648,7 +652,8 @@ public class TestHFileOutputFormat2 {
// Make sure that a directory was created for every CF
int dir = 0;
- for (FileStatus f : tablePath.getFileSystem(conf).listStatus(tablePath)) {
+ fss = tablePath.getFileSystem(conf).listStatus(tablePath);
+ for (FileStatus f: fss) {
for (byte[] family : FAMILIES) {
if (Bytes.toString(family).equals(f.getPath().getName())) {
++dir;
[09/14] hbase git commit: HBASE-20581 Fix documentation about schema
updates
Posted by md...@apache.org.
HBASE-20581 Fix documentation about schema updates
Signed-off-by: Balazs Meszaros <ba...@cloudera.com>
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f917f4e7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f917f4e7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f917f4e7
Branch: refs/heads/HBASE-20478
Commit: f917f4e7c8de2987bde42eccd14a901ad3ec9b27
Parents: dab0e90
Author: Josh Elser <el...@apache.org>
Authored: Mon May 14 13:44:40 2018 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Thu May 17 14:29:48 2018 -0400
----------------------------------------------------------------------
src/main/asciidoc/_chapters/external_apis.adoc | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/f917f4e7/src/main/asciidoc/_chapters/external_apis.adoc
----------------------------------------------------------------------
diff --git a/src/main/asciidoc/_chapters/external_apis.adoc b/src/main/asciidoc/_chapters/external_apis.adoc
index ffb6ee6..c1299a6 100644
--- a/src/main/asciidoc/_chapters/external_apis.adoc
+++ b/src/main/asciidoc/_chapters/external_apis.adoc
@@ -186,20 +186,20 @@ creation or mutation, and `DELETE` for deletion.
|/_table_/schema
|POST
-|Create a new table, or replace an existing table's schema
+|Update an existing table with the provided schema fragment
|curl -vi -X POST \
-H "Accept: text/xml" \
-H "Content-Type: text/xml" \
- -d '<?xml version="1.0" encoding="UTF-8"?><TableSchema name="users"><ColumnSchema name="cf" /></TableSchema>' \
+ -d '<?xml version="1.0" encoding="UTF-8"?><TableSchema name="users"><ColumnSchema name="cf" KEEP_DELETED_CELLS="true" /></TableSchema>' \
"http://example.com:8000/users/schema"
|/_table_/schema
|PUT
-|Update an existing table with the provided schema fragment
+|Create a new table, or replace an existing table's schema
|curl -vi -X PUT \
-H "Accept: text/xml" \
-H "Content-Type: text/xml" \
- -d '<?xml version="1.0" encoding="UTF-8"?><TableSchema name="users"><ColumnSchema name="cf" KEEP_DELETED_CELLS="true" /></TableSchema>' \
+ -d '<?xml version="1.0" encoding="UTF-8"?><TableSchema name="users"><ColumnSchema name="cf" /></TableSchema>' \
"http://example.com:8000/users/schema"
|/_table_/schema
[07/14] hbase git commit: HBASE-20488 Fix PE command help message
Posted by md...@apache.org.
HBASE-20488 Fix PE command help message
checkstyle fix
Signed-off-by: Peter Somogyi <ps...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cf529f18
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cf529f18
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cf529f18
Branch: refs/heads/HBASE-20478
Commit: cf529f18a9959589fa635f78df4840472526ea2c
Parents: 60bdaf7
Author: Xu Cang <xc...@salesforce.com>
Authored: Wed May 16 10:30:21 2018 -0700
Committer: Peter Somogyi <ps...@apache.org>
Committed: Thu May 17 11:56:04 2018 +0200
----------------------------------------------------------------------
.../org/apache/hadoop/hbase/PerformanceEvaluation.java | 13 +++++++------
1 file changed, 7 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/cf529f18/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
index 33267e0..42acb5c 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java
@@ -132,6 +132,7 @@ import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFacto
public class PerformanceEvaluation extends Configured implements Tool {
static final String RANDOM_SEEK_SCAN = "randomSeekScan";
static final String RANDOM_READ = "randomRead";
+ static final String PE_COMMAND_SHORTNAME = "pe";
private static final Logger LOG = LoggerFactory.getLogger(PerformanceEvaluation.class.getName());
private static final ObjectMapper MAPPER = new ObjectMapper();
static {
@@ -2356,11 +2357,11 @@ public class PerformanceEvaluation extends Configured implements Tool {
}
protected void printUsage() {
- printUsage(this.getClass().getName(), null);
+ printUsage(PE_COMMAND_SHORTNAME, null);
}
protected static void printUsage(final String message) {
- printUsage(PerformanceEvaluation.class.getName(), message);
+ printUsage(PE_COMMAND_SHORTNAME, message);
}
protected static void printUsageAndExit(final String message, final int exitCode) {
@@ -2368,11 +2369,11 @@ public class PerformanceEvaluation extends Configured implements Tool {
System.exit(exitCode);
}
- protected static void printUsage(final String className, final String message) {
+ protected static void printUsage(final String shortName, final String message) {
if (message != null && message.length() > 0) {
System.err.println(message);
}
- System.err.println("Usage: java " + className + " \\");
+ System.err.print("Usage: hbase " + shortName);
System.err.println(" <OPTIONS> [-D<property=value>]* <command> <nclients>");
System.err.println();
System.err.println("General Options:");
@@ -2464,9 +2465,9 @@ public class PerformanceEvaluation extends Configured implements Tool {
+ "(and HRegionServers) running. 1 <= value <= 500");
System.err.println("Examples:");
System.err.println(" To run a single client doing the default 1M sequentialWrites:");
- System.err.println(" $ hbase " + className + " sequentialWrite 1");
+ System.err.println(" $ hbase " + shortName + " sequentialWrite 1");
System.err.println(" To run 10 clients doing increments over ten rows:");
- System.err.println(" $ hbase " + className + " --rows=10 --nomapred increment 10");
+ System.err.println(" $ hbase " + shortName + " --rows=10 --nomapred increment 10");
}
/**
[06/14] hbase git commit: HBASE-20577 Make Log Level page design
consistent with the design of other pages in UI
Posted by md...@apache.org.
HBASE-20577 Make Log Level page design consistent with the design of other pages in UI
Signed-off-by: tedyu <yu...@gmail.com>
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/60bdaf78
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/60bdaf78
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/60bdaf78
Branch: refs/heads/HBASE-20478
Commit: 60bdaf7846446acb0c4b8208d02687452fdbd2b2
Parents: acbc3a2
Author: Nihal Jain <ni...@gmail.com>
Authored: Thu May 17 02:24:09 2018 +0530
Committer: tedyu <yu...@gmail.com>
Committed: Wed May 16 21:21:36 2018 -0700
----------------------------------------------------------------------
.../apache/hadoop/hbase/http/log/LogLevel.java | 48 ++++++++++++++------
1 file changed, 33 insertions(+), 15 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/60bdaf78/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
----------------------------------------------------------------------
diff --git a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
index 2f62313..aa223f3 100644
--- a/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
+++ b/hbase-http/src/main/java/org/apache/hadoop/hbase/http/log/LogLevel.java
@@ -105,13 +105,17 @@ public final class LogLevel {
response)) {
return;
}
+ response.setContentType("text/html");
+ String requestedURL = "header.jsp?pageTitle=Log Level";
+ request.getRequestDispatcher(requestedURL).include(request, response);
+ PrintWriter out = response.getWriter();
+ out.println(FORMS);
- PrintWriter out = ServletUtil.initHTML(response, "Log Level");
String logName = ServletUtil.getParameter(request, "log");
String level = ServletUtil.getParameter(request, "level");
if (logName != null) {
- out.println("<br /><hr /><h3>Results</h3>");
+ out.println("<p>Results:</p>");
out.println(MARKER
+ "Submitted Log Name: <b>" + logName + "</b><br />");
@@ -132,28 +136,42 @@ public final class LogLevel {
out.println("Sorry, " + log.getClass() + " not supported.<br />");
}
}
-
- out.println(FORMS);
- out.println(ServletUtil.HTML_TAIL);
+ out.println("</div>");
+ request.getRequestDispatcher("footer.jsp").include(request, response);
+ out.close();
}
- static final String FORMS = "\n<br /><hr /><h3>Get / Set</h3>"
- + "\n<form>Log: <input type='text' size='50' name='log' /> "
- + "<input type='submit' value='Get Log Level' />"
- + "</form>"
- + "\n<form>Log: <input type='text' size='50' name='log' /> "
- + "Level: <input type='text' name='level' /> "
- + "<input type='submit' value='Set Log Level' />"
- + "</form>";
+ static final String FORMS = "<div class='container-fluid content'>\n"
+ + "<div class='row inner_header'>\n" + "<div class='page-header'>\n"
+ + "<h1>Get/Set Log Level</h1>\n" + "</div>\n" + "</div>\n" + "Actions:" + "<p>"
+ + "<center>\n" + "<table class='table' style='border: 0;' width='95%' >\n" + "<tr>\n"
+ + "<form>\n" + "<td class='centered'>\n"
+ + "<input style='font-size: 12pt; width: 10em' type='submit' value='Get Log Level'"
+ + " class='btn' />\n" + "</td>\n" + "<td style='text-align: center;'>\n"
+ + "<input type='text' name='log' size='50' required='required'"
+ + " placeholder='Log Name (required)' />\n" + "</td>\n" + "<td width=\"40%\">"
+ + "Get the current log level for the specified log name." + "</td>\n" + "</form>\n"
+ + "</tr>\n" + "<tr>\n" + "<form>\n" + "<td class='centered'>\n"
+ + "<input style='font-size: 12pt; width: 10em' type='submit'"
+ + " value='Set Log Level' class='btn' />\n" + "</td>\n"
+ + "<td style='text-align: center;'>\n"
+ + "<input type='text' name='log' size='50' required='required'"
+ + " placeholder='Log Name (required)' />\n"
+ + "<input type='text' name='level' size='50' required='required'"
+ + " placeholder='Log Level (required)' />\n" + "</td>\n" + "<td width=\"40%\" style=\"\">"
+ + "Set the specified log level for the specified log name." + "</td>\n" + "</form>\n"
+ + "</tr>\n" + "</table>\n" + "</center>\n" + "</p>\n" + "<hr/>\n";
private static void process(org.apache.log4j.Logger log, String level,
PrintWriter out) throws IOException {
if (level != null) {
if (!level.equals(org.apache.log4j.Level.toLevel(level).toString())) {
- out.println(MARKER + "Bad level : <b>" + level + "</b><br />");
+ out.println(MARKER + "<div class='text-danger'>" + "Bad level : <strong>" + level
+ + "</strong><br />" + "</div>");
} else {
log.setLevel(org.apache.log4j.Level.toLevel(level));
- out.println(MARKER + "Setting Level to " + level + " ...<br />");
+ out.println(MARKER + "<div class='text-success'>" + "Setting Level to <strong>" + level
+ + "</strong> ...<br />" + "</div>");
}
}
out.println(MARKER