You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@accumulo.apache.org by ct...@apache.org on 2015/05/20 18:24:45 UTC
[01/21] accumulo git commit: Revert "ACCUMULO-3793 use numctl to turn
on memory interleaving"
Repository: accumulo
Updated Branches:
refs/heads/1.7 ed5b53b93 -> 1faee537d
refs/heads/master e22103ca9 -> a9d1ad4a7
Revert "ACCUMULO-3793 use numctl to turn on memory interleaving"
This reverts commit b82148777c023b21578d06ef451fdd70770107a5.
Conflicts:
assemble/bin/config.sh
assemble/bin/start-server.sh
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/4db7766d
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/4db7766d
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/4db7766d
Branch: refs/heads/1.7
Commit: 4db7766d77497c4ab8b8cbd997fe1b4c267308f3
Parents: 8e99ed2
Author: Josh Elser <el...@apache.org>
Authored: Tue May 12 11:59:11 2015 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue May 12 11:59:11 2015 -0400
----------------------------------------------------------------------
assemble/bin/config.sh | 9 ---------
assemble/bin/start-server.sh | 8 ++++----
2 files changed, 4 insertions(+), 13 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/4db7766d/assemble/bin/config.sh
----------------------------------------------------------------------
diff --git a/assemble/bin/config.sh b/assemble/bin/config.sh
index ed393bb..d9bf8d4 100755
--- a/assemble/bin/config.sh
+++ b/assemble/bin/config.sh
@@ -104,15 +104,6 @@ then
fi
export HADOOP_PREFIX
-NUMA=`which numactl 2>/dev/null`
-NUMACTL_EXISTS="$?"
-NUMACTL_ARGS="--interleave=all"
-if [[ ${NUMACTL_EXISTS} -eq 0 ]] ; then
- export NUMA_CMD="${NUMA} ${NUMACTL_ARGS}"
-else
- export NUMA_CMD=""
-fi
-
export HADOOP_HOME=$HADOOP_PREFIX
export HADOOP_HOME_WARN_SUPPRESS=true
http://git-wip-us.apache.org/repos/asf/accumulo/blob/4db7766d/assemble/bin/start-server.sh
----------------------------------------------------------------------
diff --git a/assemble/bin/start-server.sh b/assemble/bin/start-server.sh
index 3587ec9..2fb4c4c 100755
--- a/assemble/bin/start-server.sh
+++ b/assemble/bin/start-server.sh
@@ -77,12 +77,12 @@ if [[ -z "$PID" ]]; then
COMMAND="${bin}/accumulo_watcher.sh ${LOGHOST}"
fi
- if [[ $HOST == localhost || $HOST == "$(hostname -f)" || $HOST = "$IP" ]]; then
- ${NUMA_CMD} "$COMMAND" "${SERVICE}" --address "${ADDRESS}" >"${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out" 2>"${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err" &
+ if [ "$HOST" = "localhost" -o "$HOST" = "`hostname -f`" -o "$HOST" = "$ip" ]; then
+ ${bin}/accumulo ${SERVICE} --address ${ADDRESS} >${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out 2>${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err &
MAX_FILES_OPEN=$(ulimit -n)
else
- $SSH "$HOST" "bash -c 'exec nohup ${NUMA_CMD} $COMMAND ${SERVICE} --address ${ADDRESS} >${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out 2>${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err' &"
- MAX_FILES_OPEN=$($SSH "$HOST" "/usr/bin/env bash -c 'ulimit -n'")
+ $SSH $HOST "bash -c 'exec nohup ${bin}/accumulo ${SERVICE} --address ${ADDRESS} >${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out 2>${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err' &"
+ MAX_FILES_OPEN=$($SSH $HOST "/usr/bin/env bash -c 'ulimit -n'")
fi
if [[ -n $MAX_FILES_OPEN && -n $SLAVES ]] ; then
[13/21] accumulo git commit: [maven-release-plugin] prepare release
1.7.0
Posted by ct...@apache.org.
[maven-release-plugin] prepare release 1.7.0
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/8cba8128
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/8cba8128
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/8cba8128
Branch: refs/heads/1.7
Commit: 8cba8128fbc3238bdd9398cf5c36b7cb6dc3b61d
Parents: 9fda3bb
Author: Josh Elser <el...@apache.org>
Authored: Tue May 12 14:34:29 2015 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue May 12 14:34:29 2015 -0400
----------------------------------------------------------------------
assemble/pom.xml | 2 +-
core/pom.xml | 2 +-
docs/pom.xml | 2 +-
examples/simple/pom.xml | 2 +-
fate/pom.xml | 2 +-
maven-plugin/pom.xml | 2 +-
minicluster/pom.xml | 2 +-
pom.xml | 4 ++--
proxy/pom.xml | 2 +-
server/base/pom.xml | 2 +-
server/gc/pom.xml | 2 +-
server/master/pom.xml | 2 +-
server/monitor/pom.xml | 2 +-
server/native/pom.xml | 2 +-
server/tracer/pom.xml | 2 +-
server/tserver/pom.xml | 2 +-
shell/pom.xml | 2 +-
start/pom.xml | 2 +-
test/pom.xml | 2 +-
trace/pom.xml | 2 +-
20 files changed, 21 insertions(+), 21 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/assemble/pom.xml
----------------------------------------------------------------------
diff --git a/assemble/pom.xml b/assemble/pom.xml
index e88cd81..0cf690b 100644
--- a/assemble/pom.xml
+++ b/assemble/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo</artifactId>
<packaging>pom</packaging>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/core/pom.xml
----------------------------------------------------------------------
diff --git a/core/pom.xml b/core/pom.xml
index 12e3c1d..fe91d0a 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-core</artifactId>
<name>Core</name>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/docs/pom.xml
----------------------------------------------------------------------
diff --git a/docs/pom.xml b/docs/pom.xml
index 47d98da..519f534 100644
--- a/docs/pom.xml
+++ b/docs/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-docs</artifactId>
<packaging>pom</packaging>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/examples/simple/pom.xml
----------------------------------------------------------------------
diff --git a/examples/simple/pom.xml b/examples/simple/pom.xml
index aa2f118..7172831 100644
--- a/examples/simple/pom.xml
+++ b/examples/simple/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>accumulo-examples-simple</artifactId>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/fate/pom.xml
----------------------------------------------------------------------
diff --git a/fate/pom.xml b/fate/pom.xml
index 5fd9a7e..e282a14 100644
--- a/fate/pom.xml
+++ b/fate/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-fate</artifactId>
<name>Fate</name>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/maven-plugin/pom.xml
----------------------------------------------------------------------
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml
index 2c07d7d..ecb3bd0 100644
--- a/maven-plugin/pom.xml
+++ b/maven-plugin/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-maven-plugin</artifactId>
<packaging>maven-plugin</packaging>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/minicluster/pom.xml
----------------------------------------------------------------------
diff --git a/minicluster/pom.xml b/minicluster/pom.xml
index 7644269..c12c967 100644
--- a/minicluster/pom.xml
+++ b/minicluster/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-minicluster</artifactId>
<name>MiniCluster</name>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 5bf65e8..24f25c9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -24,7 +24,7 @@
</parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<packaging>pom</packaging>
<name>Apache Accumulo</name>
<description>Apache Accumulo is a sorted, distributed key/value store based on Google's BigTable design. It is built on top of Apache Hadoop, Zookeeper, and Thrift. It features a few novel improvements on the BigTable design in the form of cell-level access labels and a server-side programming mechanism that can modify key/value pairs at various points in the data management process.</description>
@@ -95,7 +95,7 @@
<scm>
<connection>scm:git:git://git.apache.org/accumulo.git</connection>
<developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/accumulo.git</developerConnection>
- <tag>${project.version}</tag>
+ <tag>1.7.0</tag>
<url>https://git-wip-us.apache.org/repos/asf?p=accumulo.git</url>
</scm>
<issueManagement>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/proxy/pom.xml
----------------------------------------------------------------------
diff --git a/proxy/pom.xml b/proxy/pom.xml
index eed79b1..92e0d48 100644
--- a/proxy/pom.xml
+++ b/proxy/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-proxy</artifactId>
<name>Proxy</name>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/server/base/pom.xml
----------------------------------------------------------------------
diff --git a/server/base/pom.xml b/server/base/pom.xml
index 94850b9..1c59fd4 100644
--- a/server/base/pom.xml
+++ b/server/base/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>accumulo-server-base</artifactId>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/server/gc/pom.xml
----------------------------------------------------------------------
diff --git a/server/gc/pom.xml b/server/gc/pom.xml
index 694ffe9..f92232b 100644
--- a/server/gc/pom.xml
+++ b/server/gc/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>accumulo-gc</artifactId>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/server/master/pom.xml
----------------------------------------------------------------------
diff --git a/server/master/pom.xml b/server/master/pom.xml
index cf92d4a..6024c97 100644
--- a/server/master/pom.xml
+++ b/server/master/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>accumulo-master</artifactId>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/server/monitor/pom.xml
----------------------------------------------------------------------
diff --git a/server/monitor/pom.xml b/server/monitor/pom.xml
index 51262c1..e5cd90b 100644
--- a/server/monitor/pom.xml
+++ b/server/monitor/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>accumulo-monitor</artifactId>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/server/native/pom.xml
----------------------------------------------------------------------
diff --git a/server/native/pom.xml b/server/native/pom.xml
index 5a18728..3452e56 100644
--- a/server/native/pom.xml
+++ b/server/native/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>accumulo-native</artifactId>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/server/tracer/pom.xml
----------------------------------------------------------------------
diff --git a/server/tracer/pom.xml b/server/tracer/pom.xml
index c44f498..d0d0288 100644
--- a/server/tracer/pom.xml
+++ b/server/tracer/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>accumulo-tracer</artifactId>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/server/tserver/pom.xml
----------------------------------------------------------------------
diff --git a/server/tserver/pom.xml b/server/tserver/pom.xml
index a4bc3de..da319aa 100644
--- a/server/tserver/pom.xml
+++ b/server/tserver/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>accumulo-tserver</artifactId>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/shell/pom.xml
----------------------------------------------------------------------
diff --git a/shell/pom.xml b/shell/pom.xml
index 6ac997f..f80eb0a 100644
--- a/shell/pom.xml
+++ b/shell/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-shell</artifactId>
<name>Shell</name>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/start/pom.xml
----------------------------------------------------------------------
diff --git a/start/pom.xml b/start/pom.xml
index eb75fe0..7be51a2 100644
--- a/start/pom.xml
+++ b/start/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-start</artifactId>
<name>Start</name>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/test/pom.xml
----------------------------------------------------------------------
diff --git a/test/pom.xml b/test/pom.xml
index 5862688..f1d5e2a 100644
--- a/test/pom.xml
+++ b/test/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-test</artifactId>
<name>Testing</name>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/trace/pom.xml
----------------------------------------------------------------------
diff --git a/trace/pom.xml b/trace/pom.xml
index 7777bb3..53e6851 100644
--- a/trace/pom.xml
+++ b/trace/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-trace</artifactId>
<name>Trace</name>
[08/21] accumulo git commit: ACCUMULO-3800 Add DEPENDENCIES file
Posted by ct...@apache.org.
ACCUMULO-3800 Add DEPENDENCIES file
* Prevent a new DEPENDENCIES file from being generated by the
maven-remote-resources-plugin's apache-jar-resource-bundle.
* Create a minimal DEPENDENCIES file which refers to the POMs.
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/eef4dfe1
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/eef4dfe1
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/eef4dfe1
Branch: refs/heads/1.7
Commit: eef4dfe1f074ee5197ef2d10f56915c43aba81a1
Parents: dc1d0de
Author: Christopher Tubbs <ct...@apache.org>
Authored: Tue May 12 12:10:28 2015 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue May 12 12:44:08 2015 -0400
----------------------------------------------------------------------
DEPENDENCIES | 21 +++++++++++++++++++++
1 file changed, 21 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/eef4dfe1/DEPENDENCIES
----------------------------------------------------------------------
diff --git a/DEPENDENCIES b/DEPENDENCIES
new file mode 100644
index 0000000..5a98c0f
--- /dev/null
+++ b/DEPENDENCIES
@@ -0,0 +1,21 @@
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+Apache Accumulo depends on artifacts which can be found in Maven Central.
+
+Each module has its own dependencies. Please refer to the individual
+modules' pom.xml files for a comprehensive listing.
[17/21] accumulo git commit: ACCUMULO-3819 Update findbugs and
checkstyle tools
Posted by ct...@apache.org.
ACCUMULO-3819 Update findbugs and checkstyle tools
* Bump findbugs and checkstyle build tools to check for more problems.
* Fix newly detected problems (mainly lack of checking for null when listing
directory contents) to ensure the build passes.
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/b577410c
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/b577410c
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/b577410c
Branch: refs/heads/1.7
Commit: b577410c677b5adb2f1c0eb1c1f8f6a9061cd0ad
Parents: 6524b07
Author: Christopher Tubbs <ct...@apache.org>
Authored: Wed May 20 11:50:32 2015 -0400
Committer: Christopher Tubbs <ct...@apache.org>
Committed: Wed May 20 11:50:32 2015 -0400
----------------------------------------------------------------------
.../core/client/impl/ConditionalWriterImpl.java | 2 +-
.../accumulo/core/file/rfile/bcfile/Utils.java | 2 +-
.../iterators/user/IntersectingIterator.java | 19 ++---
.../mapred/AccumuloFileOutputFormatTest.java | 2 +
.../mapreduce/AccumuloFileOutputFormatTest.java | 2 +
.../accumulo/examples/simple/shard/Index.java | 7 +-
.../impl/MiniAccumuloClusterImpl.java | 17 ++--
.../impl/MiniAccumuloConfigImpl.java | 8 +-
pom.xml | 6 +-
.../accumulo/server/util/SendLogToChainsaw.java | 2 +-
.../org/apache/accumulo/monitor/util/Table.java | 2 +-
.../monitor/util/celltypes/NumberType.java | 2 +-
.../accumulo/tserver/log/LocalWALRecovery.java | 75 ++++++++---------
.../accumulo/tserver/tablet/RootFilesTest.java | 7 +-
.../shell/commands/FormatterCommandTest.java | 2 +-
.../start/classloader/AccumuloClassLoader.java | 7 +-
.../classloader/vfs/UniqueFileReplicator.java | 3 +-
.../accumulo/test/continuous/TimeBinner.java | 3 +
.../test/continuous/UndefinedAnalyzer.java | 84 ++++++++++----------
.../test/functional/CacheTestWriter.java | 3 +
.../apache/accumulo/test/randomwalk/Node.java | 16 ++--
.../apache/accumulo/test/AuditMessageIT.java | 5 +-
22 files changed, 159 insertions(+), 117 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/core/src/main/java/org/apache/accumulo/core/client/impl/ConditionalWriterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/impl/ConditionalWriterImpl.java b/core/src/main/java/org/apache/accumulo/core/client/impl/ConditionalWriterImpl.java
index b8375dc..24040e6 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/impl/ConditionalWriterImpl.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/impl/ConditionalWriterImpl.java
@@ -182,7 +182,7 @@ class ConditionalWriterImpl implements ConditionalWriter {
@Override
public int compareTo(Delayed o) {
QCMutation oqcm = (QCMutation) o;
- return Long.valueOf(resetTime).compareTo(Long.valueOf(oqcm.resetTime));
+ return Long.compare(resetTime, oqcm.resetTime);
}
@Override
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/core/src/main/java/org/apache/accumulo/core/file/rfile/bcfile/Utils.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/file/rfile/bcfile/Utils.java b/core/src/main/java/org/apache/accumulo/core/file/rfile/bcfile/Utils.java
index 6cb04a1..5e84f10 100644
--- a/core/src/main/java/org/apache/accumulo/core/file/rfile/bcfile/Utils.java
+++ b/core/src/main/java/org/apache/accumulo/core/file/rfile/bcfile/Utils.java
@@ -351,7 +351,7 @@ public final class Utils {
@Override
public int hashCode() {
- return (major << 16 + minor);
+ return ((major << 16) + minor);
}
}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/core/src/main/java/org/apache/accumulo/core/iterators/user/IntersectingIterator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/iterators/user/IntersectingIterator.java b/core/src/main/java/org/apache/accumulo/core/iterators/user/IntersectingIterator.java
index 63d6a34..e7338f3 100644
--- a/core/src/main/java/org/apache/accumulo/core/iterators/user/IntersectingIterator.java
+++ b/core/src/main/java/org/apache/accumulo/core/iterators/user/IntersectingIterator.java
@@ -232,19 +232,20 @@ public class IntersectingIterator implements SortedKeyValueIterator<Key,Value> {
// If we are past the target, this is a valid result
if (docIDCompare < 0) {
break;
- }
- // if this source is not yet at the currentCQ then advance in this source
- if (docIDCompare > 0) {
+ } else if (docIDCompare > 0) {
+ // if this source is not yet at the currentCQ then advance in this source
+
// seek forwards
Key seekKey = buildKey(currentPartition, sources[sourceID].term, currentDocID);
sources[sourceID].iter.seek(new Range(seekKey, true, null, false), sources[sourceID].seekColfams, true);
continue;
- }
- // if we are equal to the target, this is an invalid result.
- // Force the entire process to go to the next row.
- // We are advancing column 0 because we forced that column to not contain a !
- // when we did the init()
- if (docIDCompare == 0) {
+ } else {
+ // docIDCompare == 0
+
+ // if we are equal to the target, this is an invalid result.
+ // Force the entire process to go to the next row.
+ // We are advancing column 0 because we forced that column to not contain a !
+ // when we did the init()
sources[0].iter.next();
advancedCursor = true;
break;
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormatTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormatTest.java b/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormatTest.java
index e389c0b..c4a4a29 100644
--- a/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormatTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormatTest.java
@@ -17,6 +17,7 @@
package org.apache.accumulo.core.client.mapred;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@@ -190,6 +191,7 @@ public class AccumuloFileOutputFormatTest {
return file.getName().startsWith("part-m-");
}
});
+ assertNotNull(files);
if (content) {
assertEquals(1, files.length);
assertTrue(files[0].exists());
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormatTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormatTest.java b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormatTest.java
index abc99c9..b8b3c47 100644
--- a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormatTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormatTest.java
@@ -17,6 +17,7 @@
package org.apache.accumulo.core.client.mapreduce;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
@@ -178,6 +179,7 @@ public class AccumuloFileOutputFormatTest {
return file.getName().startsWith("part-m-");
}
});
+ assertNotNull(files);
if (content) {
assertEquals(1, files.length);
assertTrue(files[0].exists());
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/examples/simple/src/main/java/org/apache/accumulo/examples/simple/shard/Index.java
----------------------------------------------------------------------
diff --git a/examples/simple/src/main/java/org/apache/accumulo/examples/simple/shard/Index.java b/examples/simple/src/main/java/org/apache/accumulo/examples/simple/shard/Index.java
index 3564be4..bc76c03 100644
--- a/examples/simple/src/main/java/org/apache/accumulo/examples/simple/shard/Index.java
+++ b/examples/simple/src/main/java/org/apache/accumulo/examples/simple/shard/Index.java
@@ -70,8 +70,11 @@ public class Index {
public static void index(int numPartitions, File src, String splitRegex, BatchWriter bw) throws Exception {
if (src.isDirectory()) {
- for (File child : src.listFiles()) {
- index(numPartitions, child, splitRegex, bw);
+ File[] files = src.listFiles();
+ if (files != null) {
+ for (File child : files) {
+ index(numPartitions, child, splitRegex, bw);
+ }
}
} else {
FileReader fr = new FileReader(src);
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloClusterImpl.java
----------------------------------------------------------------------
diff --git a/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloClusterImpl.java b/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloClusterImpl.java
index a21ba64..19aed0b 100644
--- a/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloClusterImpl.java
+++ b/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloClusterImpl.java
@@ -205,13 +205,18 @@ public class MiniAccumuloClusterImpl implements AccumuloCluster {
}
private boolean containsSiteFile(File f) {
- return f.isDirectory() && f.listFiles(new FileFilter() {
+ if (!f.isDirectory()) {
+ return false;
+ } else {
+ File[] files = f.listFiles(new FileFilter() {
- @Override
- public boolean accept(File pathname) {
- return pathname.getName().endsWith("site.xml");
- }
- }).length > 0;
+ @Override
+ public boolean accept(File pathname) {
+ return pathname.getName().endsWith("site.xml");
+ }
+ });
+ return files != null && files.length > 0;
+ }
}
private void append(StringBuilder classpathBuilder, URL url) throws URISyntaxException {
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloConfigImpl.java
----------------------------------------------------------------------
diff --git a/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloConfigImpl.java b/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloConfigImpl.java
index eab82ba..ef498bf 100644
--- a/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloConfigImpl.java
+++ b/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloConfigImpl.java
@@ -108,8 +108,12 @@ public class MiniAccumuloConfigImpl {
if (this.getDir().exists() && !this.getDir().isDirectory())
throw new IllegalArgumentException("Must pass in directory, " + this.getDir() + " is a file");
- if (this.getDir().exists() && this.getDir().list().length != 0)
- throw new IllegalArgumentException("Directory " + this.getDir() + " is not empty");
+ if (this.getDir().exists()) {
+ String[] children = this.getDir().list();
+ if (children != null && children.length != 0) {
+ throw new IllegalArgumentException("Directory " + this.getDir() + " is not empty");
+ }
+ }
if (!initialized) {
libDir = new File(dir, "lib");
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 33d4df8..19ff679 100644
--- a/pom.xml
+++ b/pom.xml
@@ -122,7 +122,7 @@
<!-- relative path for Eclipse format; should override in child modules if necessary -->
<eclipseFormatterStyle>${project.parent.basedir}/contrib/Eclipse-Accumulo-Codestyle.xml</eclipseFormatterStyle>
<!-- findbugs-maven-plugin won't work on jdk8 or later; set to 3.0.0 or newer -->
- <findbugs.version>3.0.0</findbugs.version>
+ <findbugs.version>3.0.1</findbugs.version>
<!-- surefire/failsafe plugin option -->
<forkCount>1</forkCount>
<!-- overwritten in hadoop profiles -->
@@ -540,7 +540,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
- <version>2.14</version>
+ <version>2.15</version>
</plugin>
<plugin>
<groupId>com.github.ekryd.sortpom</groupId>
@@ -1080,7 +1080,7 @@
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
- <version>6.3</version>
+ <version>6.6</version>
</dependency>
</dependencies>
<executions>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/server/base/src/main/java/org/apache/accumulo/server/util/SendLogToChainsaw.java
----------------------------------------------------------------------
diff --git a/server/base/src/main/java/org/apache/accumulo/server/util/SendLogToChainsaw.java b/server/base/src/main/java/org/apache/accumulo/server/util/SendLogToChainsaw.java
index 2c192cf..c6f78bb 100644
--- a/server/base/src/main/java/org/apache/accumulo/server/util/SendLogToChainsaw.java
+++ b/server/base/src/main/java/org/apache/accumulo/server/util/SendLogToChainsaw.java
@@ -90,7 +90,7 @@ public class SendLogToChainsaw extends XMLLayout {
throw new IllegalArgumentException(directory + " is not a directory or is not readable.");
}
- if (logFiles.length == 0) {
+ if (logFiles == null || logFiles.length == 0) {
throw new IllegalArgumentException("No files match the supplied filter.");
}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/server/monitor/src/main/java/org/apache/accumulo/monitor/util/Table.java
----------------------------------------------------------------------
diff --git a/server/monitor/src/main/java/org/apache/accumulo/monitor/util/Table.java b/server/monitor/src/main/java/org/apache/accumulo/monitor/util/Table.java
index b1a4582..522ebb6 100644
--- a/server/monitor/src/main/java/org/apache/accumulo/monitor/util/Table.java
+++ b/server/monitor/src/main/java/org/apache/accumulo/monitor/util/Table.java
@@ -160,7 +160,7 @@ public class Table {
String legendUrl = String.format("/op?action=toggleLegend&redir=%s&page=%s&table=%s&show=%s", redir, page, table, !showLegend);
sb.append("<a href='").append(legendUrl).append("'>").append(showLegend ? "Hide" : "Show").append(" Legend</a>\n");
if (showLegend)
- sb.append("<div class='left ").append(showLegend ? "show" : "hide").append("'><dl>\n");
+ sb.append("<div class='left show'><dl>\n");
}
for (int i = 0; i < columns.size(); ++i) {
TableColumn<?> col = columns.get(i);
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/server/monitor/src/main/java/org/apache/accumulo/monitor/util/celltypes/NumberType.java
----------------------------------------------------------------------
diff --git a/server/monitor/src/main/java/org/apache/accumulo/monitor/util/celltypes/NumberType.java b/server/monitor/src/main/java/org/apache/accumulo/monitor/util/celltypes/NumberType.java
index b2de91e..dfa40eb 100644
--- a/server/monitor/src/main/java/org/apache/accumulo/monitor/util/celltypes/NumberType.java
+++ b/server/monitor/src/main/java/org/apache/accumulo/monitor/util/celltypes/NumberType.java
@@ -73,7 +73,7 @@ public class NumberType<T extends Number> extends CellType<T> {
else if (o2 == null)
return 1;
else
- return Double.valueOf(o1.doubleValue()).compareTo(o2.doubleValue());
+ return Double.compare(o1.doubleValue(), o2.doubleValue());
}
public static String commas(long i) {
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/server/tserver/src/main/java/org/apache/accumulo/tserver/log/LocalWALRecovery.java
----------------------------------------------------------------------
diff --git a/server/tserver/src/main/java/org/apache/accumulo/tserver/log/LocalWALRecovery.java b/server/tserver/src/main/java/org/apache/accumulo/tserver/log/LocalWALRecovery.java
index 60c8e8d..2667b53 100644
--- a/server/tserver/src/main/java/org/apache/accumulo/tserver/log/LocalWALRecovery.java
+++ b/server/tserver/src/main/java/org/apache/accumulo/tserver/log/LocalWALRecovery.java
@@ -135,47 +135,50 @@ public class LocalWALRecovery implements Runnable {
}
log.info("Copying WALs to " + options.destination);
- for (File file : localDirectory.listFiles()) {
- String name = file.getName();
- try {
- UUID.fromString(name);
- } catch (IllegalArgumentException ex) {
- log.info("Ignoring non-log file " + file.getAbsolutePath());
- continue;
- }
-
- LogFileKey key = new LogFileKey();
- LogFileValue value = new LogFileValue();
-
- log.info("Openning local log " + file.getAbsolutePath());
-
- Path localWal = new Path(file.toURI());
- FileSystem localFs = FileSystem.getLocal(fs.getConf());
-
- Reader reader = new SequenceFile.Reader(localFs, localWal, localFs.getConf());
- // Reader reader = new SequenceFile.Reader(localFs.getConf(), SequenceFile.Reader.file(localWal));
- Path tmp = new Path(options.destination + "/" + name + ".copy");
- FSDataOutputStream writer = fs.create(tmp);
- while (reader.next(key, value)) {
+ File[] files = localDirectory.listFiles();
+ if (files != null) {
+ for (File file : files) {
+ String name = file.getName();
try {
- key.write(writer);
- value.write(writer);
- } catch (EOFException ex) {
- break;
+ UUID.fromString(name);
+ } catch (IllegalArgumentException ex) {
+ log.info("Ignoring non-log file " + file.getAbsolutePath());
+ continue;
}
- }
- writer.close();
- reader.close();
- fs.rename(tmp, new Path(tmp.getParent(), name));
- if (options.deleteLocal) {
- if (file.delete()) {
- log.info("Copied and deleted: " + name);
+ LogFileKey key = new LogFileKey();
+ LogFileValue value = new LogFileValue();
+
+ log.info("Openning local log " + file.getAbsolutePath());
+
+ Path localWal = new Path(file.toURI());
+ FileSystem localFs = FileSystem.getLocal(fs.getConf());
+
+ Reader reader = new SequenceFile.Reader(localFs, localWal, localFs.getConf());
+ // Reader reader = new SequenceFile.Reader(localFs.getConf(), SequenceFile.Reader.file(localWal));
+ Path tmp = new Path(options.destination + "/" + name + ".copy");
+ FSDataOutputStream writer = fs.create(tmp);
+ while (reader.next(key, value)) {
+ try {
+ key.write(writer);
+ value.write(writer);
+ } catch (EOFException ex) {
+ break;
+ }
+ }
+ writer.close();
+ reader.close();
+ fs.rename(tmp, new Path(tmp.getParent(), name));
+
+ if (options.deleteLocal) {
+ if (file.delete()) {
+ log.info("Copied and deleted: " + name);
+ } else {
+ log.info("Failed to delete: " + name + " (but it is safe for you to delete it manually).");
+ }
} else {
- log.info("Failed to delete: " + name + " (but it is safe for you to delete it manually).");
+ log.info("Safe to delete: " + name);
}
- } else {
- log.info("Safe to delete: " + name);
}
}
}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/server/tserver/src/test/java/org/apache/accumulo/tserver/tablet/RootFilesTest.java
----------------------------------------------------------------------
diff --git a/server/tserver/src/test/java/org/apache/accumulo/tserver/tablet/RootFilesTest.java b/server/tserver/src/test/java/org/apache/accumulo/tserver/tablet/RootFilesTest.java
index ea8874a..e5d893a 100644
--- a/server/tserver/src/test/java/org/apache/accumulo/tserver/tablet/RootFilesTest.java
+++ b/server/tserver/src/test/java/org/apache/accumulo/tserver/tablet/RootFilesTest.java
@@ -102,8 +102,11 @@ public class RootFilesTest {
public void assertFiles(String... files) {
HashSet<String> actual = new HashSet<String>();
- for (File file : rootTabletDir.listFiles()) {
- actual.add(file.getName());
+ File[] children = rootTabletDir.listFiles();
+ if (children != null) {
+ for (File file : children) {
+ actual.add(file.getName());
+ }
}
HashSet<String> expected = new HashSet<String>();
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/shell/src/test/java/org/apache/accumulo/shell/commands/FormatterCommandTest.java
----------------------------------------------------------------------
diff --git a/shell/src/test/java/org/apache/accumulo/shell/commands/FormatterCommandTest.java b/shell/src/test/java/org/apache/accumulo/shell/commands/FormatterCommandTest.java
index 866e716..704d0c3 100644
--- a/shell/src/test/java/org/apache/accumulo/shell/commands/FormatterCommandTest.java
+++ b/shell/src/test/java/org/apache/accumulo/shell/commands/FormatterCommandTest.java
@@ -167,7 +167,7 @@ public class FormatterCommandTest {
sb.append(key).append(tab);
for (byte b : v.get()) {
- if ((b >= 48 && b <= 57) || (b >= 97 || b <= 102)) {
+ if ((b >= 48 && b <= 57) || (b >= 97 && b <= 102)) {
sb.append(String.format("0x%x ", Integer.valueOf(b)));
}
}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/start/src/main/java/org/apache/accumulo/start/classloader/AccumuloClassLoader.java
----------------------------------------------------------------------
diff --git a/start/src/main/java/org/apache/accumulo/start/classloader/AccumuloClassLoader.java b/start/src/main/java/org/apache/accumulo/start/classloader/AccumuloClassLoader.java
index 53b36b4..9ebbae0 100644
--- a/start/src/main/java/org/apache/accumulo/start/classloader/AccumuloClassLoader.java
+++ b/start/src/main/java/org/apache/accumulo/start/classloader/AccumuloClassLoader.java
@@ -251,8 +251,11 @@ public class AccumuloClassLoader {
return;
if (file.isDirectory()) {
File[] children = file.listFiles();
- for (File child : children)
- findMavenTargetClasses(paths, child, depth + 1);
+ if (children != null) {
+ for (File child : children) {
+ findMavenTargetClasses(paths, child, depth + 1);
+ }
+ }
} else if ("pom.xml".equals(file.getName())) {
paths.add(file.getParentFile().getAbsolutePath() + File.separator + "target" + File.separator + "classes");
}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/start/src/main/java/org/apache/accumulo/start/classloader/vfs/UniqueFileReplicator.java
----------------------------------------------------------------------
diff --git a/start/src/main/java/org/apache/accumulo/start/classloader/vfs/UniqueFileReplicator.java b/start/src/main/java/org/apache/accumulo/start/classloader/vfs/UniqueFileReplicator.java
index 641da8a..85b47df 100644
--- a/start/src/main/java/org/apache/accumulo/start/classloader/vfs/UniqueFileReplicator.java
+++ b/start/src/main/java/org/apache/accumulo/start/classloader/vfs/UniqueFileReplicator.java
@@ -95,7 +95,8 @@ public class UniqueFileReplicator implements VfsComponent, FileReplicator {
}
if (tempDir.exists()) {
- int numChildren = tempDir.list().length;
+ String[] list = tempDir.list();
+ int numChildren = list == null ? 0 : list.length;
if (0 == numChildren && !tempDir.delete())
log.warn("Cannot delete empty directory: " + tempDir);
}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/test/src/main/java/org/apache/accumulo/test/continuous/TimeBinner.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/continuous/TimeBinner.java b/test/src/main/java/org/apache/accumulo/test/continuous/TimeBinner.java
index cfe8551..e40bc8e 100644
--- a/test/src/main/java/org/apache/accumulo/test/continuous/TimeBinner.java
+++ b/test/src/main/java/org/apache/accumulo/test/continuous/TimeBinner.java
@@ -94,6 +94,9 @@ public class TimeBinner {
switch (operation) {
case AMM_HACK1: {
+ if (opts.dataColumn < 2) {
+ throw new IllegalArgumentException("--dataColumn must be at least 2");
+ }
double data_min = Double.parseDouble(tokens[opts.dataColumn - 2]);
double data_max = Double.parseDouble(tokens[opts.dataColumn - 1]);
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/test/src/main/java/org/apache/accumulo/test/continuous/UndefinedAnalyzer.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/continuous/UndefinedAnalyzer.java b/test/src/main/java/org/apache/accumulo/test/continuous/UndefinedAnalyzer.java
index 7d2c65b..00c7eb0 100644
--- a/test/src/main/java/org/apache/accumulo/test/continuous/UndefinedAnalyzer.java
+++ b/test/src/main/java/org/apache/accumulo/test/continuous/UndefinedAnalyzer.java
@@ -79,8 +79,10 @@ public class UndefinedAnalyzer {
}
});
- for (File log : ingestLogs) {
- parseLog(log);
+ if (ingestLogs != null) {
+ for (File log : ingestLogs) {
+ parseLog(log);
+ }
}
}
@@ -175,53 +177,55 @@ public class UndefinedAnalyzer {
String currentYear = (Calendar.getInstance().get(Calendar.YEAR)) + "";
String currentMonth = (Calendar.getInstance().get(Calendar.MONTH) + 1) + "";
- for (File masterLog : masterLogs) {
-
- BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(masterLog), UTF_8));
- String line;
- try {
- while ((line = reader.readLine()) != null) {
- if (line.contains("TABLET_LOADED")) {
- String[] tokens = line.split("\\s+");
- String tablet = tokens[8];
- String server = tokens[10];
-
- int pos1 = -1;
- int pos2 = -1;
- int pos3 = -1;
-
- for (int i = 0; i < tablet.length(); i++) {
- if (tablet.charAt(i) == '<' || tablet.charAt(i) == ';') {
- if (pos1 == -1) {
- pos1 = i;
- } else if (pos2 == -1) {
- pos2 = i;
- } else {
- pos3 = i;
+ if (masterLogs != null) {
+ for (File masterLog : masterLogs) {
+
+ BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(masterLog), UTF_8));
+ String line;
+ try {
+ while ((line = reader.readLine()) != null) {
+ if (line.contains("TABLET_LOADED")) {
+ String[] tokens = line.split("\\s+");
+ String tablet = tokens[8];
+ String server = tokens[10];
+
+ int pos1 = -1;
+ int pos2 = -1;
+ int pos3 = -1;
+
+ for (int i = 0; i < tablet.length(); i++) {
+ if (tablet.charAt(i) == '<' || tablet.charAt(i) == ';') {
+ if (pos1 == -1) {
+ pos1 = i;
+ } else if (pos2 == -1) {
+ pos2 = i;
+ } else {
+ pos3 = i;
+ }
}
}
- }
- if (pos1 > 0 && pos2 > 0 && pos3 == -1) {
- String tid = tablet.substring(0, pos1);
- String endRow = tablet.charAt(pos1) == '<' ? "8000000000000000" : tablet.substring(pos1 + 1, pos2);
- String prevEndRow = tablet.charAt(pos2) == '<' ? "" : tablet.substring(pos2 + 1);
- if (tid.equals(tableId)) {
- // System.out.println(" "+server+" "+tid+" "+endRow+" "+prevEndRow);
- Date date = sdf.parse(tokens[0] + " " + tokens[1] + " " + currentYear + " " + currentMonth);
- // System.out.println(" "+date);
+ if (pos1 > 0 && pos2 > 0 && pos3 == -1) {
+ String tid = tablet.substring(0, pos1);
+ String endRow = tablet.charAt(pos1) == '<' ? "8000000000000000" : tablet.substring(pos1 + 1, pos2);
+ String prevEndRow = tablet.charAt(pos2) == '<' ? "" : tablet.substring(pos2 + 1);
+ if (tid.equals(tableId)) {
+ // System.out.println(" "+server+" "+tid+" "+endRow+" "+prevEndRow);
+ Date date = sdf.parse(tokens[0] + " " + tokens[1] + " " + currentYear + " " + currentMonth);
+ // System.out.println(" "+date);
- assignments.add(new TabletAssignment(tablet, endRow, prevEndRow, server, date.getTime()));
+ assignments.add(new TabletAssignment(tablet, endRow, prevEndRow, server, date.getTime()));
+ }
+ } else if (!tablet.startsWith("!0")) {
+ System.err.println("Cannot parse tablet " + tablet);
}
- } else if (!tablet.startsWith("!0")) {
- System.err.println("Cannot parse tablet " + tablet);
- }
+ }
}
+ } finally {
+ reader.close();
}
- } finally {
- reader.close();
}
}
}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/test/src/main/java/org/apache/accumulo/test/functional/CacheTestWriter.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/functional/CacheTestWriter.java b/test/src/main/java/org/apache/accumulo/test/functional/CacheTestWriter.java
index 3a3baf0..76e8168 100644
--- a/test/src/main/java/org/apache/accumulo/test/functional/CacheTestWriter.java
+++ b/test/src/main/java/org/apache/accumulo/test/functional/CacheTestWriter.java
@@ -120,6 +120,9 @@ public class CacheTestWriter {
while (true) {
File[] files = reportDir.listFiles();
+ if (files == null) {
+ throw new IllegalStateException("report directory is inaccessible");
+ }
System.out.println("files.length " + files.length);
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/test/src/main/java/org/apache/accumulo/test/randomwalk/Node.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/Node.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/Node.java
index fecced9..6df5aed 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/Node.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/Node.java
@@ -77,13 +77,15 @@ public abstract class Node {
File zkLib = new File(zkHome);
String[] files = zkLib.list();
- for (int i = 0; i < files.length; i++) {
- String f = files[i];
- if (f.matches("^zookeeper-.+jar$")) {
- if (retval == null) {
- retval = String.format("%s/%s", zkLib.getAbsolutePath(), f);
- } else {
- retval += String.format(",%s/%s", zkLib.getAbsolutePath(), f);
+ if (files != null) {
+ for (int i = 0; i < files.length; i++) {
+ String f = files[i];
+ if (f.matches("^zookeeper-.+jar$")) {
+ if (retval == null) {
+ retval = String.format("%s/%s", zkLib.getAbsolutePath(), f);
+ } else {
+ retval += String.format(",%s/%s", zkLib.getAbsolutePath(), f);
+ }
}
}
}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/test/src/test/java/org/apache/accumulo/test/AuditMessageIT.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/accumulo/test/AuditMessageIT.java b/test/src/test/java/org/apache/accumulo/test/AuditMessageIT.java
index 00a5749..14361a6 100644
--- a/test/src/test/java/org/apache/accumulo/test/AuditMessageIT.java
+++ b/test/src/test/java/org/apache/accumulo/test/AuditMessageIT.java
@@ -18,6 +18,7 @@ package org.apache.accumulo.test;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
@@ -124,7 +125,9 @@ public class AuditMessageIT extends ConfigurableMacIT {
System.out.println("Start of captured audit messages for step " + stepName);
ArrayList<String> result = new ArrayList<String>();
- for (File file : getCluster().getConfig().getLogDir().listFiles()) {
+ File[] files = getCluster().getConfig().getLogDir().listFiles();
+ assertNotNull(files);
+ for (File file : files) {
// We want to grab the files called .out
if (file.getName().contains(".out") && file.isFile() && file.canRead()) {
LineIterator it = FileUtils.lineIterator(file, UTF_8.name());
[09/21] accumulo git commit: ACCUMULO-3803 Resurected
mapreduce.lib.util
Posted by ct...@apache.org.
ACCUMULO-3803 Resurected mapreduce.lib.util
Ran the following command in the 1.7 branch to do this. The commit below is what was at head of the 1.6 branch at the time I ran this command.
git checkout c4eff0c2eb1320e411ac3e41b6f2db89c2d3ba33 -- core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util
I checked in the 1.6 branch and found that this code has no references (like test).
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/0d9c05e7
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/0d9c05e7
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/0d9c05e7
Branch: refs/heads/1.7
Commit: 0d9c05e716774f7904c7da2664063a778ab4640b
Parents: eef4dfe
Author: Keith Turner <kt...@apache.org>
Authored: Tue May 12 12:51:18 2015 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue May 12 13:40:37 2015 -0400
----------------------------------------------------------------------
.../mapreduce/lib/util/ConfiguratorBase.java | 277 +++++++++++
.../lib/util/FileOutputConfigurator.java | 170 +++++++
.../mapreduce/lib/util/InputConfigurator.java | 462 +++++++++++++++++++
.../mapreduce/lib/util/OutputConfigurator.java | 196 ++++++++
.../client/mapreduce/lib/util/package-info.java | 22 +
5 files changed, 1127 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/0d9c05e7/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/ConfiguratorBase.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/ConfiguratorBase.java b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/ConfiguratorBase.java
new file mode 100644
index 0000000..20fbbea
--- /dev/null
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/ConfiguratorBase.java
@@ -0,0 +1,277 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.accumulo.core.client.mapreduce.lib.util;
+
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.ClientConfiguration;
+import org.apache.accumulo.core.client.Instance;
+import org.apache.accumulo.core.client.ZooKeeperInstance;
+import org.apache.accumulo.core.client.mock.MockInstance;
+import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
+import org.apache.accumulo.core.client.security.tokens.AuthenticationToken.AuthenticationTokenSerializer;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.log4j.Level;
+
+/**
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+@Deprecated
+public class ConfiguratorBase {
+
+ /**
+ * Configuration keys for {@link Instance#getConnector(String, AuthenticationToken)}.
+ *
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static enum ConnectorInfo {
+ IS_CONFIGURED, PRINCIPAL, TOKEN, TOKEN_CLASS
+ }
+
+ /**
+ * Configuration keys for {@link Instance}, {@link ZooKeeperInstance}, and {@link MockInstance}.
+ *
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ protected static enum InstanceOpts {
+ TYPE, NAME, ZOO_KEEPERS;
+ }
+
+ /**
+ * Configuration keys for general configuration options.
+ *
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ protected static enum GeneralOpts {
+ LOG_LEVEL
+ }
+
+ /**
+ * Provides a configuration key for a given feature enum, prefixed by the implementingClass
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param e
+ * the enum used to provide the unique part of the configuration key
+ * @return the configuration key
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ protected static String enumToConfKey(Class<?> implementingClass, Enum<?> e) {
+ return implementingClass.getSimpleName() + "." + e.getDeclaringClass().getSimpleName() + "." + StringUtils.camelize(e.name().toLowerCase());
+ }
+
+ /**
+ * Sets the connector information needed to communicate with Accumulo in this job.
+ *
+ * <p>
+ * <b>WARNING:</b> The serialized token is stored in the configuration and shared with all MapReduce tasks. It is BASE64 encoded to provide a charset safe
+ * conversion to a string, and is not intended to be secure.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param principal
+ * a valid Accumulo user name
+ * @param token
+ * the user's password
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setConnectorInfo(Class<?> implementingClass, Configuration conf, String principal, AuthenticationToken token)
+ throws AccumuloSecurityException {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.setConnectorInfo(implementingClass, conf, principal, token);
+ }
+
+ /**
+ * Determines if the connector info has already been set for this instance.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return true if the connector info has already been set, false otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setConnectorInfo(Class, Configuration, String, AuthenticationToken)
+ */
+ @Deprecated
+ public static Boolean isConnectorInfoSet(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.isConnectorInfoSet(implementingClass, conf);
+ }
+
+ /**
+ * Gets the user name from the configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the principal
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setConnectorInfo(Class, Configuration, String, AuthenticationToken)
+ */
+ @Deprecated
+ public static String getPrincipal(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.getPrincipal(implementingClass, conf);
+ }
+
+ /**
+ * DON'T USE THIS. No, really, don't use this. You already have an {@link AuthenticationToken} with
+ * {@link org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase#getAuthenticationToken(Class, Configuration)}. You don't need to construct it
+ * yourself.
+ * <p>
+ * Gets the serialized token class from the configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the principal
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setConnectorInfo(Class, Configuration, String, AuthenticationToken)
+ */
+ @Deprecated
+ public static String getTokenClass(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.getAuthenticationToken(implementingClass, conf).getClass().getName();
+ }
+
+ /**
+ * DON'T USE THIS. No, really, don't use this. You already have an {@link AuthenticationToken} with
+ * {@link org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase#getAuthenticationToken(Class, Configuration)}. You don't need to construct it
+ * yourself.
+ * <p>
+ * Gets the password from the configuration. WARNING: The password is stored in the Configuration and shared with all MapReduce tasks; It is BASE64 encoded to
+ * provide a charset safe conversion to a string, and is not intended to be secure.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the decoded principal's authentication token
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setConnectorInfo(Class, Configuration, String, AuthenticationToken)
+ */
+ @Deprecated
+ public static byte[] getToken(Class<?> implementingClass, Configuration conf) {
+ return AuthenticationTokenSerializer.serialize(org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.getAuthenticationToken(
+ implementingClass, conf));
+ }
+
+ /**
+ * Configures a {@link ZooKeeperInstance} for this job.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param instanceName
+ * the Accumulo instance name
+ * @param zooKeepers
+ * a comma-separated list of zookeeper servers
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setZooKeeperInstance(Class<?> implementingClass, Configuration conf, String instanceName, String zooKeepers) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.setZooKeeperInstance(implementingClass, conf,
+ new ClientConfiguration().withInstance(instanceName).withZkHosts(zooKeepers));
+ }
+
+ /**
+ * Configures a {@link MockInstance} for this job.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param instanceName
+ * the Accumulo instance name
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setMockInstance(Class<?> implementingClass, Configuration conf, String instanceName) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.setMockInstance(implementingClass, conf, instanceName);
+ }
+
+ /**
+ * Initializes an Accumulo {@link Instance} based on the configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return an Accumulo instance
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setZooKeeperInstance(Class, Configuration, String, String)
+ * @see #setMockInstance(Class, Configuration, String)
+ */
+ @Deprecated
+ public static Instance getInstance(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.getInstance(implementingClass, conf);
+ }
+
+ /**
+ * Sets the log level for this job.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param level
+ * the logging level
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setLogLevel(Class<?> implementingClass, Configuration conf, Level level) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.setLogLevel(implementingClass, conf, level);
+ }
+
+ /**
+ * Gets the log level from this configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the log level
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setLogLevel(Class, Configuration, Level)
+ */
+ @Deprecated
+ public static Level getLogLevel(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.getLogLevel(implementingClass, conf);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/0d9c05e7/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/FileOutputConfigurator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/FileOutputConfigurator.java b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/FileOutputConfigurator.java
new file mode 100644
index 0000000..d43ecda
--- /dev/null
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/FileOutputConfigurator.java
@@ -0,0 +1,170 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.accumulo.core.client.mapreduce.lib.util;
+
+import org.apache.accumulo.core.conf.AccumuloConfiguration;
+import org.apache.accumulo.core.conf.Property;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+@Deprecated
+public class FileOutputConfigurator extends ConfiguratorBase {
+
+ /**
+ * Configuration keys for {@link AccumuloConfiguration}.
+ *
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static enum Opts {
+ ACCUMULO_PROPERTIES;
+ }
+
+ /**
+ * The supported Accumulo properties we set in this OutputFormat, that change the behavior of the RecordWriter.<br />
+ * These properties correspond to the supported public static setter methods available to this class.
+ *
+ * @param property
+ * the Accumulo property to check
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ protected static Boolean isSupportedAccumuloProperty(Property property) {
+ switch (property) {
+ case TABLE_FILE_COMPRESSION_TYPE:
+ case TABLE_FILE_COMPRESSED_BLOCK_SIZE:
+ case TABLE_FILE_BLOCK_SIZE:
+ case TABLE_FILE_COMPRESSED_BLOCK_SIZE_INDEX:
+ case TABLE_FILE_REPLICATION:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ /**
+ * This helper method provides an AccumuloConfiguration object constructed from the Accumulo defaults, and overridden with Accumulo properties that have been
+ * stored in the Job's configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static AccumuloConfiguration getAccumuloConfiguration(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.FileOutputConfigurator.getAccumuloConfiguration(implementingClass, conf);
+ }
+
+ /**
+ * Sets the compression type to use for data blocks. Specifying a compression may require additional libraries to be available to your Job.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param compressionType
+ * one of "none", "gz", "lzo", or "snappy"
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setCompressionType(Class<?> implementingClass, Configuration conf, String compressionType) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.FileOutputConfigurator.setCompressionType(implementingClass, conf, compressionType);
+ }
+
+ /**
+ * Sets the size for data blocks within each file.<br />
+ * Data blocks are a span of key/value pairs stored in the file that are compressed and indexed as a group.
+ *
+ * <p>
+ * Making this value smaller may increase seek performance, but at the cost of increasing the size of the indexes (which can also affect seek performance).
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param dataBlockSize
+ * the block size, in bytes
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setDataBlockSize(Class<?> implementingClass, Configuration conf, long dataBlockSize) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.FileOutputConfigurator.setDataBlockSize(implementingClass, conf, dataBlockSize);
+ }
+
+ /**
+ * Sets the size for file blocks in the file system; file blocks are managed, and replicated, by the underlying file system.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param fileBlockSize
+ * the block size, in bytes
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setFileBlockSize(Class<?> implementingClass, Configuration conf, long fileBlockSize) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.FileOutputConfigurator.setFileBlockSize(implementingClass, conf, fileBlockSize);
+ }
+
+ /**
+ * Sets the size for index blocks within each file; smaller blocks means a deeper index hierarchy within the file, while larger blocks mean a more shallow
+ * index hierarchy within the file. This can affect the performance of queries.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param indexBlockSize
+ * the block size, in bytes
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setIndexBlockSize(Class<?> implementingClass, Configuration conf, long indexBlockSize) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.FileOutputConfigurator.setIndexBlockSize(implementingClass, conf, indexBlockSize);
+ }
+
+ /**
+ * Sets the file system replication factor for the resulting file, overriding the file system default.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param replication
+ * the number of replicas for produced files
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setReplication(Class<?> implementingClass, Configuration conf, int replication) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.FileOutputConfigurator.setReplication(implementingClass, conf, replication);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/0d9c05e7/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
new file mode 100644
index 0000000..8d0c4b1
--- /dev/null
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
@@ -0,0 +1,462 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.accumulo.core.client.mapreduce.lib.util;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.accumulo.core.client.ClientSideIteratorScanner;
+import org.apache.accumulo.core.client.IsolatedScanner;
+import org.apache.accumulo.core.client.IteratorSetting;
+import org.apache.accumulo.core.client.Scanner;
+import org.apache.accumulo.core.client.TableNotFoundException;
+import org.apache.accumulo.core.client.impl.Tables;
+import org.apache.accumulo.core.client.impl.TabletLocator;
+import org.apache.accumulo.core.data.Range;
+import org.apache.accumulo.core.security.Authorizations;
+import org.apache.accumulo.core.util.Pair;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Text;
+
+/**
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+@Deprecated
+public class InputConfigurator extends ConfiguratorBase {
+
+ /**
+ * Configuration keys for {@link Scanner}.
+ *
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static enum ScanOpts {
+ TABLE_NAME, AUTHORIZATIONS, RANGES, COLUMNS, ITERATORS
+ }
+
+ /**
+ * Configuration keys for various features.
+ *
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static enum Features {
+ AUTO_ADJUST_RANGES, SCAN_ISOLATION, USE_LOCAL_ITERATORS, SCAN_OFFLINE
+ }
+
+ /**
+ * Sets the name of the input table, over which this job will scan.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param tableName
+ * the table to use when the tablename is null in the write call
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setInputTableName(Class<?> implementingClass, Configuration conf, String tableName) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.setInputTableName(implementingClass, conf, tableName);
+ }
+
+ /**
+ * Gets the table name from the configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the table name
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setInputTableName(Class, Configuration, String)
+ */
+ @Deprecated
+ public static String getInputTableName(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.getInputTableName(implementingClass, conf);
+ }
+
+ /**
+ * Sets the {@link Authorizations} used to scan. Must be a subset of the user's authorization. Defaults to the empty set.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param auths
+ * the user's authorizations
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setScanAuthorizations(Class<?> implementingClass, Configuration conf, Authorizations auths) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.setScanAuthorizations(implementingClass, conf, auths);
+ }
+
+ /**
+ * Gets the authorizations to set for the scans from the configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the Accumulo scan authorizations
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setScanAuthorizations(Class, Configuration, Authorizations)
+ */
+ @Deprecated
+ public static Authorizations getScanAuthorizations(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.getScanAuthorizations(implementingClass, conf);
+ }
+
+ /**
+ * Sets the input ranges to scan for this job. If not set, the entire table will be scanned.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param ranges
+ * the ranges that will be mapped over
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setRanges(Class<?> implementingClass, Configuration conf, Collection<Range> ranges) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.setRanges(implementingClass, conf, ranges);
+ }
+
+ /**
+ * Gets the ranges to scan over from a job.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the ranges
+ * @throws IOException
+ * if the ranges have been encoded improperly
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setRanges(Class, Configuration, Collection)
+ */
+ @Deprecated
+ public static List<Range> getRanges(Class<?> implementingClass, Configuration conf) throws IOException {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.getRanges(implementingClass, conf);
+ }
+
+ /**
+ * Restricts the columns that will be mapped over for this job.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param columnFamilyColumnQualifierPairs
+ * a pair of {@link Text} objects corresponding to column family and column qualifier. If the column qualifier is null, the entire column family is
+ * selected. An empty set is the default and is equivalent to scanning the all columns.
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void fetchColumns(Class<?> implementingClass, Configuration conf, Collection<Pair<Text,Text>> columnFamilyColumnQualifierPairs) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.fetchColumns(implementingClass, conf, columnFamilyColumnQualifierPairs);
+ }
+
+ /**
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ */
+ @Deprecated
+ public static String[] serializeColumns(Collection<Pair<Text,Text>> columnFamilyColumnQualifierPairs) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.serializeColumns(columnFamilyColumnQualifierPairs);
+ }
+
+ /**
+ * Gets the columns to be mapped over from this job.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return a set of columns
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #fetchColumns(Class, Configuration, Collection)
+ */
+ @Deprecated
+ public static Set<Pair<Text,Text>> getFetchedColumns(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.getFetchedColumns(implementingClass, conf);
+ }
+
+ /**
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ */
+ @Deprecated
+ public static Set<Pair<Text,Text>> deserializeFetchedColumns(Collection<String> serialized) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.deserializeFetchedColumns(serialized);
+ }
+
+ /**
+ * Encode an iterator on the input for this job.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param cfg
+ * the configuration of the iterator
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void addIterator(Class<?> implementingClass, Configuration conf, IteratorSetting cfg) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.addIterator(implementingClass, conf, cfg);
+ }
+
+ /**
+ * Gets a list of the iterator settings (for iterators to apply to a scanner) from this configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return a list of iterators
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #addIterator(Class, Configuration, IteratorSetting)
+ */
+ @Deprecated
+ public static List<IteratorSetting> getIterators(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.getIterators(implementingClass, conf);
+ }
+
+ /**
+ * Controls the automatic adjustment of ranges for this job. This feature merges overlapping ranges, then splits them to align with tablet boundaries.
+ * Disabling this feature will cause exactly one Map task to be created for each specified range. The default setting is enabled. *
+ *
+ * <p>
+ * By default, this feature is <b>enabled</b>.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param enableFeature
+ * the feature is enabled if true, disabled otherwise
+ * @see #setRanges(Class, Configuration, Collection)
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setAutoAdjustRanges(Class<?> implementingClass, Configuration conf, boolean enableFeature) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.setAutoAdjustRanges(implementingClass, conf, enableFeature);
+ }
+
+ /**
+ * Determines whether a configuration has auto-adjust ranges enabled.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return false if the feature is disabled, true otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setAutoAdjustRanges(Class, Configuration, boolean)
+ */
+ @Deprecated
+ public static Boolean getAutoAdjustRanges(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.getAutoAdjustRanges(implementingClass, conf);
+ }
+
+ /**
+ * Controls the use of the {@link IsolatedScanner} in this job.
+ *
+ * <p>
+ * By default, this feature is <b>disabled</b>.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param enableFeature
+ * the feature is enabled if true, disabled otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setScanIsolation(Class<?> implementingClass, Configuration conf, boolean enableFeature) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.setScanIsolation(implementingClass, conf, enableFeature);
+ }
+
+ /**
+ * Determines whether a configuration has isolation enabled.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return true if the feature is enabled, false otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setScanIsolation(Class, Configuration, boolean)
+ */
+ @Deprecated
+ public static Boolean isIsolated(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.isIsolated(implementingClass, conf);
+ }
+
+ /**
+ * Controls the use of the {@link ClientSideIteratorScanner} in this job. Enabling this feature will cause the iterator stack to be constructed within the Map
+ * task, rather than within the Accumulo TServer. To use this feature, all classes needed for those iterators must be available on the classpath for the task.
+ *
+ * <p>
+ * By default, this feature is <b>disabled</b>.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param enableFeature
+ * the feature is enabled if true, disabled otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setLocalIterators(Class<?> implementingClass, Configuration conf, boolean enableFeature) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.setLocalIterators(implementingClass, conf, enableFeature);
+ }
+
+ /**
+ * Determines whether a configuration uses local iterators.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return true if the feature is enabled, false otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setLocalIterators(Class, Configuration, boolean)
+ */
+ @Deprecated
+ public static Boolean usesLocalIterators(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.usesLocalIterators(implementingClass, conf);
+ }
+
+ /**
+ * <p>
+ * Enable reading offline tables. By default, this feature is disabled and only online tables are scanned. This will make the map reduce job directly read the
+ * table's files. If the table is not offline, then the job will fail. If the table comes online during the map reduce job, it is likely that the job will
+ * fail.
+ *
+ * <p>
+ * To use this option, the map reduce user will need access to read the Accumulo directory in HDFS.
+ *
+ * <p>
+ * Reading the offline table will create the scan time iterator stack in the map process. So any iterators that are configured for the table will need to be
+ * on the mapper's classpath. The accumulo-site.xml may need to be on the mapper's classpath if HDFS or the Accumulo directory in HDFS are non-standard.
+ *
+ * <p>
+ * One way to use this feature is to clone a table, take the clone offline, and use the clone as the input table for a map reduce job. If you plan to map
+ * reduce over the data many times, it may be better to the compact the table, clone it, take it offline, and use the clone for all map reduce jobs. The
+ * reason to do this is that compaction will reduce each tablet in the table to one file, and it is faster to read from one file.
+ *
+ * <p>
+ * There are two possible advantages to reading a tables file directly out of HDFS. First, you may see better read performance. Second, it will support
+ * speculative execution better. When reading an online table speculative execution can put more load on an already slow tablet server.
+ *
+ * <p>
+ * By default, this feature is <b>disabled</b>.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param enableFeature
+ * the feature is enabled if true, disabled otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setOfflineTableScan(Class<?> implementingClass, Configuration conf, boolean enableFeature) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.setOfflineTableScan(implementingClass, conf, enableFeature);
+ }
+
+ /**
+ * Determines whether a configuration has the offline table scan feature enabled.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return true if the feature is enabled, false otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setOfflineTableScan(Class, Configuration, boolean)
+ */
+ @Deprecated
+ public static Boolean isOfflineScan(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.isOfflineScan(implementingClass, conf);
+ }
+
+ /**
+ * Initializes an Accumulo {@link TabletLocator} based on the configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return an Accumulo tablet locator
+ * @throws TableNotFoundException
+ * if the table name set on the configuration doesn't exist
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static TabletLocator getTabletLocator(Class<?> implementingClass, Configuration conf) throws TableNotFoundException {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.getTabletLocator(implementingClass, conf,
+ Tables.getTableId(getInstance(implementingClass, conf), getInputTableName(implementingClass, conf)));
+ }
+
+ // InputFormat doesn't have the equivalent of OutputFormat's checkOutputSpecs(JobContext job)
+ /**
+ * Check whether a configuration is fully configured to be used with an Accumulo {@link org.apache.hadoop.mapreduce.InputFormat}.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @throws IOException
+ * if the context is improperly configured
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void validateOptions(Class<?> implementingClass, Configuration conf) throws IOException {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.validateOptions(implementingClass, conf);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/0d9c05e7/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/OutputConfigurator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/OutputConfigurator.java b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/OutputConfigurator.java
new file mode 100644
index 0000000..39163a6
--- /dev/null
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/OutputConfigurator.java
@@ -0,0 +1,196 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.accumulo.core.client.mapreduce.lib.util;
+
+import org.apache.accumulo.core.client.BatchWriter;
+import org.apache.accumulo.core.client.BatchWriterConfig;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+@Deprecated
+public class OutputConfigurator extends ConfiguratorBase {
+
+ /**
+ * Configuration keys for {@link BatchWriter}.
+ *
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static enum WriteOpts {
+ DEFAULT_TABLE_NAME, BATCH_WRITER_CONFIG
+ }
+
+ /**
+ * Configuration keys for various features.
+ *
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static enum Features {
+ CAN_CREATE_TABLES, SIMULATION_MODE
+ }
+
+ /**
+ * Sets the default table name to use if one emits a null in place of a table name for a given mutation. Table names can only be alpha-numeric and
+ * underscores.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param tableName
+ * the table to use when the tablename is null in the write call
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setDefaultTableName(Class<?> implementingClass, Configuration conf, String tableName) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator.setDefaultTableName(implementingClass, conf, tableName);
+ }
+
+ /**
+ * Gets the default table name from the configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the default table name
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setDefaultTableName(Class, Configuration, String)
+ */
+ @Deprecated
+ public static String getDefaultTableName(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator.getDefaultTableName(implementingClass, conf);
+ }
+
+ /**
+ * Sets the configuration for for the job's {@link BatchWriter} instances. If not set, a new {@link BatchWriterConfig}, with sensible built-in defaults is
+ * used. Setting the configuration multiple times overwrites any previous configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param bwConfig
+ * the configuration for the {@link BatchWriter}
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setBatchWriterOptions(Class<?> implementingClass, Configuration conf, BatchWriterConfig bwConfig) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator.setBatchWriterOptions(implementingClass, conf, bwConfig);
+ }
+
+ /**
+ * Gets the {@link BatchWriterConfig} settings.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the configuration object
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setBatchWriterOptions(Class, Configuration, BatchWriterConfig)
+ */
+ @Deprecated
+ public static BatchWriterConfig getBatchWriterOptions(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator.getBatchWriterOptions(implementingClass, conf);
+ }
+
+ /**
+ * Sets the directive to create new tables, as necessary. Table names can only be alpha-numeric and underscores.
+ *
+ * <p>
+ * By default, this feature is <b>disabled</b>.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param enableFeature
+ * the feature is enabled if true, disabled otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setCreateTables(Class<?> implementingClass, Configuration conf, boolean enableFeature) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator.setCreateTables(implementingClass, conf, enableFeature);
+ }
+
+ /**
+ * Determines whether tables are permitted to be created as needed.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return true if the feature is disabled, false otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setCreateTables(Class, Configuration, boolean)
+ */
+ @Deprecated
+ public static Boolean canCreateTables(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator.canCreateTables(implementingClass, conf);
+ }
+
+ /**
+ * Sets the directive to use simulation mode for this job. In simulation mode, no output is produced. This is useful for testing.
+ *
+ * <p>
+ * By default, this feature is <b>disabled</b>.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param enableFeature
+ * the feature is enabled if true, disabled otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setSimulationMode(Class<?> implementingClass, Configuration conf, boolean enableFeature) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator.setSimulationMode(implementingClass, conf, enableFeature);
+ }
+
+ /**
+ * Determines whether this feature is enabled.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return true if the feature is enabled, false otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setSimulationMode(Class, Configuration, boolean)
+ */
+ @Deprecated
+ public static Boolean getSimulationMode(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator.getSimulationMode(implementingClass, conf);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/0d9c05e7/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/package-info.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/package-info.java b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/package-info.java
new file mode 100644
index 0000000..269ffea
--- /dev/null
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * @deprecated since 1.6.0; This package was moved out of the public API.
+ * @since 1.5.0
+ */
+package org.apache.accumulo.core.client.mapreduce.lib.util;
+
[10/21] accumulo git commit: ACCUMULO-3803 Resurected
mapreduce.lib.util
Posted by ct...@apache.org.
ACCUMULO-3803 Resurected mapreduce.lib.util
Ran the following command in the 1.7 branch to do this. The commit below is what was at head of the 1.6 branch at the time I ran this command.
git checkout c4eff0c2eb1320e411ac3e41b6f2db89c2d3ba33 -- core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util
I checked in the 1.6 branch and found that this code has no references (like test).
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/0d9c05e7
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/0d9c05e7
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/0d9c05e7
Branch: refs/heads/master
Commit: 0d9c05e716774f7904c7da2664063a778ab4640b
Parents: eef4dfe
Author: Keith Turner <kt...@apache.org>
Authored: Tue May 12 12:51:18 2015 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue May 12 13:40:37 2015 -0400
----------------------------------------------------------------------
.../mapreduce/lib/util/ConfiguratorBase.java | 277 +++++++++++
.../lib/util/FileOutputConfigurator.java | 170 +++++++
.../mapreduce/lib/util/InputConfigurator.java | 462 +++++++++++++++++++
.../mapreduce/lib/util/OutputConfigurator.java | 196 ++++++++
.../client/mapreduce/lib/util/package-info.java | 22 +
5 files changed, 1127 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/0d9c05e7/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/ConfiguratorBase.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/ConfiguratorBase.java b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/ConfiguratorBase.java
new file mode 100644
index 0000000..20fbbea
--- /dev/null
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/ConfiguratorBase.java
@@ -0,0 +1,277 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.accumulo.core.client.mapreduce.lib.util;
+
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.ClientConfiguration;
+import org.apache.accumulo.core.client.Instance;
+import org.apache.accumulo.core.client.ZooKeeperInstance;
+import org.apache.accumulo.core.client.mock.MockInstance;
+import org.apache.accumulo.core.client.security.tokens.AuthenticationToken;
+import org.apache.accumulo.core.client.security.tokens.AuthenticationToken.AuthenticationTokenSerializer;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.log4j.Level;
+
+/**
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+@Deprecated
+public class ConfiguratorBase {
+
+ /**
+ * Configuration keys for {@link Instance#getConnector(String, AuthenticationToken)}.
+ *
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static enum ConnectorInfo {
+ IS_CONFIGURED, PRINCIPAL, TOKEN, TOKEN_CLASS
+ }
+
+ /**
+ * Configuration keys for {@link Instance}, {@link ZooKeeperInstance}, and {@link MockInstance}.
+ *
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ protected static enum InstanceOpts {
+ TYPE, NAME, ZOO_KEEPERS;
+ }
+
+ /**
+ * Configuration keys for general configuration options.
+ *
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ protected static enum GeneralOpts {
+ LOG_LEVEL
+ }
+
+ /**
+ * Provides a configuration key for a given feature enum, prefixed by the implementingClass
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param e
+ * the enum used to provide the unique part of the configuration key
+ * @return the configuration key
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ protected static String enumToConfKey(Class<?> implementingClass, Enum<?> e) {
+ return implementingClass.getSimpleName() + "." + e.getDeclaringClass().getSimpleName() + "." + StringUtils.camelize(e.name().toLowerCase());
+ }
+
+ /**
+ * Sets the connector information needed to communicate with Accumulo in this job.
+ *
+ * <p>
+ * <b>WARNING:</b> The serialized token is stored in the configuration and shared with all MapReduce tasks. It is BASE64 encoded to provide a charset safe
+ * conversion to a string, and is not intended to be secure.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param principal
+ * a valid Accumulo user name
+ * @param token
+ * the user's password
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setConnectorInfo(Class<?> implementingClass, Configuration conf, String principal, AuthenticationToken token)
+ throws AccumuloSecurityException {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.setConnectorInfo(implementingClass, conf, principal, token);
+ }
+
+ /**
+ * Determines if the connector info has already been set for this instance.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return true if the connector info has already been set, false otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setConnectorInfo(Class, Configuration, String, AuthenticationToken)
+ */
+ @Deprecated
+ public static Boolean isConnectorInfoSet(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.isConnectorInfoSet(implementingClass, conf);
+ }
+
+ /**
+ * Gets the user name from the configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the principal
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setConnectorInfo(Class, Configuration, String, AuthenticationToken)
+ */
+ @Deprecated
+ public static String getPrincipal(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.getPrincipal(implementingClass, conf);
+ }
+
+ /**
+ * DON'T USE THIS. No, really, don't use this. You already have an {@link AuthenticationToken} with
+ * {@link org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase#getAuthenticationToken(Class, Configuration)}. You don't need to construct it
+ * yourself.
+ * <p>
+ * Gets the serialized token class from the configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the principal
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setConnectorInfo(Class, Configuration, String, AuthenticationToken)
+ */
+ @Deprecated
+ public static String getTokenClass(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.getAuthenticationToken(implementingClass, conf).getClass().getName();
+ }
+
+ /**
+ * DON'T USE THIS. No, really, don't use this. You already have an {@link AuthenticationToken} with
+ * {@link org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase#getAuthenticationToken(Class, Configuration)}. You don't need to construct it
+ * yourself.
+ * <p>
+ * Gets the password from the configuration. WARNING: The password is stored in the Configuration and shared with all MapReduce tasks; It is BASE64 encoded to
+ * provide a charset safe conversion to a string, and is not intended to be secure.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the decoded principal's authentication token
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setConnectorInfo(Class, Configuration, String, AuthenticationToken)
+ */
+ @Deprecated
+ public static byte[] getToken(Class<?> implementingClass, Configuration conf) {
+ return AuthenticationTokenSerializer.serialize(org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.getAuthenticationToken(
+ implementingClass, conf));
+ }
+
+ /**
+ * Configures a {@link ZooKeeperInstance} for this job.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param instanceName
+ * the Accumulo instance name
+ * @param zooKeepers
+ * a comma-separated list of zookeeper servers
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setZooKeeperInstance(Class<?> implementingClass, Configuration conf, String instanceName, String zooKeepers) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.setZooKeeperInstance(implementingClass, conf,
+ new ClientConfiguration().withInstance(instanceName).withZkHosts(zooKeepers));
+ }
+
+ /**
+ * Configures a {@link MockInstance} for this job.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param instanceName
+ * the Accumulo instance name
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setMockInstance(Class<?> implementingClass, Configuration conf, String instanceName) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.setMockInstance(implementingClass, conf, instanceName);
+ }
+
+ /**
+ * Initializes an Accumulo {@link Instance} based on the configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return an Accumulo instance
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setZooKeeperInstance(Class, Configuration, String, String)
+ * @see #setMockInstance(Class, Configuration, String)
+ */
+ @Deprecated
+ public static Instance getInstance(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.getInstance(implementingClass, conf);
+ }
+
+ /**
+ * Sets the log level for this job.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param level
+ * the logging level
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setLogLevel(Class<?> implementingClass, Configuration conf, Level level) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.setLogLevel(implementingClass, conf, level);
+ }
+
+ /**
+ * Gets the log level from this configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the log level
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setLogLevel(Class, Configuration, Level)
+ */
+ @Deprecated
+ public static Level getLogLevel(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase.getLogLevel(implementingClass, conf);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/0d9c05e7/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/FileOutputConfigurator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/FileOutputConfigurator.java b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/FileOutputConfigurator.java
new file mode 100644
index 0000000..d43ecda
--- /dev/null
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/FileOutputConfigurator.java
@@ -0,0 +1,170 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.accumulo.core.client.mapreduce.lib.util;
+
+import org.apache.accumulo.core.conf.AccumuloConfiguration;
+import org.apache.accumulo.core.conf.Property;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+@Deprecated
+public class FileOutputConfigurator extends ConfiguratorBase {
+
+ /**
+ * Configuration keys for {@link AccumuloConfiguration}.
+ *
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static enum Opts {
+ ACCUMULO_PROPERTIES;
+ }
+
+ /**
+ * The supported Accumulo properties we set in this OutputFormat, that change the behavior of the RecordWriter.<br />
+ * These properties correspond to the supported public static setter methods available to this class.
+ *
+ * @param property
+ * the Accumulo property to check
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ protected static Boolean isSupportedAccumuloProperty(Property property) {
+ switch (property) {
+ case TABLE_FILE_COMPRESSION_TYPE:
+ case TABLE_FILE_COMPRESSED_BLOCK_SIZE:
+ case TABLE_FILE_BLOCK_SIZE:
+ case TABLE_FILE_COMPRESSED_BLOCK_SIZE_INDEX:
+ case TABLE_FILE_REPLICATION:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ /**
+ * This helper method provides an AccumuloConfiguration object constructed from the Accumulo defaults, and overridden with Accumulo properties that have been
+ * stored in the Job's configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static AccumuloConfiguration getAccumuloConfiguration(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.FileOutputConfigurator.getAccumuloConfiguration(implementingClass, conf);
+ }
+
+ /**
+ * Sets the compression type to use for data blocks. Specifying a compression may require additional libraries to be available to your Job.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param compressionType
+ * one of "none", "gz", "lzo", or "snappy"
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setCompressionType(Class<?> implementingClass, Configuration conf, String compressionType) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.FileOutputConfigurator.setCompressionType(implementingClass, conf, compressionType);
+ }
+
+ /**
+ * Sets the size for data blocks within each file.<br />
+ * Data blocks are a span of key/value pairs stored in the file that are compressed and indexed as a group.
+ *
+ * <p>
+ * Making this value smaller may increase seek performance, but at the cost of increasing the size of the indexes (which can also affect seek performance).
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param dataBlockSize
+ * the block size, in bytes
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setDataBlockSize(Class<?> implementingClass, Configuration conf, long dataBlockSize) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.FileOutputConfigurator.setDataBlockSize(implementingClass, conf, dataBlockSize);
+ }
+
+ /**
+ * Sets the size for file blocks in the file system; file blocks are managed, and replicated, by the underlying file system.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param fileBlockSize
+ * the block size, in bytes
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setFileBlockSize(Class<?> implementingClass, Configuration conf, long fileBlockSize) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.FileOutputConfigurator.setFileBlockSize(implementingClass, conf, fileBlockSize);
+ }
+
+ /**
+ * Sets the size for index blocks within each file; smaller blocks means a deeper index hierarchy within the file, while larger blocks mean a more shallow
+ * index hierarchy within the file. This can affect the performance of queries.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param indexBlockSize
+ * the block size, in bytes
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setIndexBlockSize(Class<?> implementingClass, Configuration conf, long indexBlockSize) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.FileOutputConfigurator.setIndexBlockSize(implementingClass, conf, indexBlockSize);
+ }
+
+ /**
+ * Sets the file system replication factor for the resulting file, overriding the file system default.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param replication
+ * the number of replicas for produced files
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setReplication(Class<?> implementingClass, Configuration conf, int replication) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.FileOutputConfigurator.setReplication(implementingClass, conf, replication);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/0d9c05e7/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
new file mode 100644
index 0000000..8d0c4b1
--- /dev/null
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/InputConfigurator.java
@@ -0,0 +1,462 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.accumulo.core.client.mapreduce.lib.util;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.accumulo.core.client.ClientSideIteratorScanner;
+import org.apache.accumulo.core.client.IsolatedScanner;
+import org.apache.accumulo.core.client.IteratorSetting;
+import org.apache.accumulo.core.client.Scanner;
+import org.apache.accumulo.core.client.TableNotFoundException;
+import org.apache.accumulo.core.client.impl.Tables;
+import org.apache.accumulo.core.client.impl.TabletLocator;
+import org.apache.accumulo.core.data.Range;
+import org.apache.accumulo.core.security.Authorizations;
+import org.apache.accumulo.core.util.Pair;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Text;
+
+/**
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+@Deprecated
+public class InputConfigurator extends ConfiguratorBase {
+
+ /**
+ * Configuration keys for {@link Scanner}.
+ *
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static enum ScanOpts {
+ TABLE_NAME, AUTHORIZATIONS, RANGES, COLUMNS, ITERATORS
+ }
+
+ /**
+ * Configuration keys for various features.
+ *
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static enum Features {
+ AUTO_ADJUST_RANGES, SCAN_ISOLATION, USE_LOCAL_ITERATORS, SCAN_OFFLINE
+ }
+
+ /**
+ * Sets the name of the input table, over which this job will scan.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param tableName
+ * the table to use when the tablename is null in the write call
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setInputTableName(Class<?> implementingClass, Configuration conf, String tableName) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.setInputTableName(implementingClass, conf, tableName);
+ }
+
+ /**
+ * Gets the table name from the configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the table name
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setInputTableName(Class, Configuration, String)
+ */
+ @Deprecated
+ public static String getInputTableName(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.getInputTableName(implementingClass, conf);
+ }
+
+ /**
+ * Sets the {@link Authorizations} used to scan. Must be a subset of the user's authorization. Defaults to the empty set.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param auths
+ * the user's authorizations
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setScanAuthorizations(Class<?> implementingClass, Configuration conf, Authorizations auths) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.setScanAuthorizations(implementingClass, conf, auths);
+ }
+
+ /**
+ * Gets the authorizations to set for the scans from the configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the Accumulo scan authorizations
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setScanAuthorizations(Class, Configuration, Authorizations)
+ */
+ @Deprecated
+ public static Authorizations getScanAuthorizations(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.getScanAuthorizations(implementingClass, conf);
+ }
+
+ /**
+ * Sets the input ranges to scan for this job. If not set, the entire table will be scanned.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param ranges
+ * the ranges that will be mapped over
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setRanges(Class<?> implementingClass, Configuration conf, Collection<Range> ranges) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.setRanges(implementingClass, conf, ranges);
+ }
+
+ /**
+ * Gets the ranges to scan over from a job.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the ranges
+ * @throws IOException
+ * if the ranges have been encoded improperly
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setRanges(Class, Configuration, Collection)
+ */
+ @Deprecated
+ public static List<Range> getRanges(Class<?> implementingClass, Configuration conf) throws IOException {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.getRanges(implementingClass, conf);
+ }
+
+ /**
+ * Restricts the columns that will be mapped over for this job.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param columnFamilyColumnQualifierPairs
+ * a pair of {@link Text} objects corresponding to column family and column qualifier. If the column qualifier is null, the entire column family is
+ * selected. An empty set is the default and is equivalent to scanning the all columns.
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void fetchColumns(Class<?> implementingClass, Configuration conf, Collection<Pair<Text,Text>> columnFamilyColumnQualifierPairs) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.fetchColumns(implementingClass, conf, columnFamilyColumnQualifierPairs);
+ }
+
+ /**
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ */
+ @Deprecated
+ public static String[] serializeColumns(Collection<Pair<Text,Text>> columnFamilyColumnQualifierPairs) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.serializeColumns(columnFamilyColumnQualifierPairs);
+ }
+
+ /**
+ * Gets the columns to be mapped over from this job.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return a set of columns
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #fetchColumns(Class, Configuration, Collection)
+ */
+ @Deprecated
+ public static Set<Pair<Text,Text>> getFetchedColumns(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.getFetchedColumns(implementingClass, conf);
+ }
+
+ /**
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ */
+ @Deprecated
+ public static Set<Pair<Text,Text>> deserializeFetchedColumns(Collection<String> serialized) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.deserializeFetchedColumns(serialized);
+ }
+
+ /**
+ * Encode an iterator on the input for this job.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param cfg
+ * the configuration of the iterator
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void addIterator(Class<?> implementingClass, Configuration conf, IteratorSetting cfg) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.addIterator(implementingClass, conf, cfg);
+ }
+
+ /**
+ * Gets a list of the iterator settings (for iterators to apply to a scanner) from this configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return a list of iterators
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #addIterator(Class, Configuration, IteratorSetting)
+ */
+ @Deprecated
+ public static List<IteratorSetting> getIterators(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.getIterators(implementingClass, conf);
+ }
+
+ /**
+ * Controls the automatic adjustment of ranges for this job. This feature merges overlapping ranges, then splits them to align with tablet boundaries.
+ * Disabling this feature will cause exactly one Map task to be created for each specified range. The default setting is enabled. *
+ *
+ * <p>
+ * By default, this feature is <b>enabled</b>.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param enableFeature
+ * the feature is enabled if true, disabled otherwise
+ * @see #setRanges(Class, Configuration, Collection)
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setAutoAdjustRanges(Class<?> implementingClass, Configuration conf, boolean enableFeature) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.setAutoAdjustRanges(implementingClass, conf, enableFeature);
+ }
+
+ /**
+ * Determines whether a configuration has auto-adjust ranges enabled.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return false if the feature is disabled, true otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setAutoAdjustRanges(Class, Configuration, boolean)
+ */
+ @Deprecated
+ public static Boolean getAutoAdjustRanges(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.getAutoAdjustRanges(implementingClass, conf);
+ }
+
+ /**
+ * Controls the use of the {@link IsolatedScanner} in this job.
+ *
+ * <p>
+ * By default, this feature is <b>disabled</b>.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param enableFeature
+ * the feature is enabled if true, disabled otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setScanIsolation(Class<?> implementingClass, Configuration conf, boolean enableFeature) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.setScanIsolation(implementingClass, conf, enableFeature);
+ }
+
+ /**
+ * Determines whether a configuration has isolation enabled.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return true if the feature is enabled, false otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setScanIsolation(Class, Configuration, boolean)
+ */
+ @Deprecated
+ public static Boolean isIsolated(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.isIsolated(implementingClass, conf);
+ }
+
+ /**
+ * Controls the use of the {@link ClientSideIteratorScanner} in this job. Enabling this feature will cause the iterator stack to be constructed within the Map
+ * task, rather than within the Accumulo TServer. To use this feature, all classes needed for those iterators must be available on the classpath for the task.
+ *
+ * <p>
+ * By default, this feature is <b>disabled</b>.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param enableFeature
+ * the feature is enabled if true, disabled otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setLocalIterators(Class<?> implementingClass, Configuration conf, boolean enableFeature) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.setLocalIterators(implementingClass, conf, enableFeature);
+ }
+
+ /**
+ * Determines whether a configuration uses local iterators.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return true if the feature is enabled, false otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setLocalIterators(Class, Configuration, boolean)
+ */
+ @Deprecated
+ public static Boolean usesLocalIterators(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.usesLocalIterators(implementingClass, conf);
+ }
+
+ /**
+ * <p>
+ * Enable reading offline tables. By default, this feature is disabled and only online tables are scanned. This will make the map reduce job directly read the
+ * table's files. If the table is not offline, then the job will fail. If the table comes online during the map reduce job, it is likely that the job will
+ * fail.
+ *
+ * <p>
+ * To use this option, the map reduce user will need access to read the Accumulo directory in HDFS.
+ *
+ * <p>
+ * Reading the offline table will create the scan time iterator stack in the map process. So any iterators that are configured for the table will need to be
+ * on the mapper's classpath. The accumulo-site.xml may need to be on the mapper's classpath if HDFS or the Accumulo directory in HDFS are non-standard.
+ *
+ * <p>
+ * One way to use this feature is to clone a table, take the clone offline, and use the clone as the input table for a map reduce job. If you plan to map
+ * reduce over the data many times, it may be better to the compact the table, clone it, take it offline, and use the clone for all map reduce jobs. The
+ * reason to do this is that compaction will reduce each tablet in the table to one file, and it is faster to read from one file.
+ *
+ * <p>
+ * There are two possible advantages to reading a tables file directly out of HDFS. First, you may see better read performance. Second, it will support
+ * speculative execution better. When reading an online table speculative execution can put more load on an already slow tablet server.
+ *
+ * <p>
+ * By default, this feature is <b>disabled</b>.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param enableFeature
+ * the feature is enabled if true, disabled otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setOfflineTableScan(Class<?> implementingClass, Configuration conf, boolean enableFeature) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.setOfflineTableScan(implementingClass, conf, enableFeature);
+ }
+
+ /**
+ * Determines whether a configuration has the offline table scan feature enabled.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return true if the feature is enabled, false otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setOfflineTableScan(Class, Configuration, boolean)
+ */
+ @Deprecated
+ public static Boolean isOfflineScan(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.isOfflineScan(implementingClass, conf);
+ }
+
+ /**
+ * Initializes an Accumulo {@link TabletLocator} based on the configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return an Accumulo tablet locator
+ * @throws TableNotFoundException
+ * if the table name set on the configuration doesn't exist
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static TabletLocator getTabletLocator(Class<?> implementingClass, Configuration conf) throws TableNotFoundException {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.getTabletLocator(implementingClass, conf,
+ Tables.getTableId(getInstance(implementingClass, conf), getInputTableName(implementingClass, conf)));
+ }
+
+ // InputFormat doesn't have the equivalent of OutputFormat's checkOutputSpecs(JobContext job)
+ /**
+ * Check whether a configuration is fully configured to be used with an Accumulo {@link org.apache.hadoop.mapreduce.InputFormat}.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @throws IOException
+ * if the context is improperly configured
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void validateOptions(Class<?> implementingClass, Configuration conf) throws IOException {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.InputConfigurator.validateOptions(implementingClass, conf);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/0d9c05e7/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/OutputConfigurator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/OutputConfigurator.java b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/OutputConfigurator.java
new file mode 100644
index 0000000..39163a6
--- /dev/null
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/OutputConfigurator.java
@@ -0,0 +1,196 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.accumulo.core.client.mapreduce.lib.util;
+
+import org.apache.accumulo.core.client.BatchWriter;
+import org.apache.accumulo.core.client.BatchWriterConfig;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+@Deprecated
+public class OutputConfigurator extends ConfiguratorBase {
+
+ /**
+ * Configuration keys for {@link BatchWriter}.
+ *
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static enum WriteOpts {
+ DEFAULT_TABLE_NAME, BATCH_WRITER_CONFIG
+ }
+
+ /**
+ * Configuration keys for various features.
+ *
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static enum Features {
+ CAN_CREATE_TABLES, SIMULATION_MODE
+ }
+
+ /**
+ * Sets the default table name to use if one emits a null in place of a table name for a given mutation. Table names can only be alpha-numeric and
+ * underscores.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param tableName
+ * the table to use when the tablename is null in the write call
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setDefaultTableName(Class<?> implementingClass, Configuration conf, String tableName) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator.setDefaultTableName(implementingClass, conf, tableName);
+ }
+
+ /**
+ * Gets the default table name from the configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the default table name
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setDefaultTableName(Class, Configuration, String)
+ */
+ @Deprecated
+ public static String getDefaultTableName(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator.getDefaultTableName(implementingClass, conf);
+ }
+
+ /**
+ * Sets the configuration for for the job's {@link BatchWriter} instances. If not set, a new {@link BatchWriterConfig}, with sensible built-in defaults is
+ * used. Setting the configuration multiple times overwrites any previous configuration.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param bwConfig
+ * the configuration for the {@link BatchWriter}
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setBatchWriterOptions(Class<?> implementingClass, Configuration conf, BatchWriterConfig bwConfig) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator.setBatchWriterOptions(implementingClass, conf, bwConfig);
+ }
+
+ /**
+ * Gets the {@link BatchWriterConfig} settings.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return the configuration object
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setBatchWriterOptions(Class, Configuration, BatchWriterConfig)
+ */
+ @Deprecated
+ public static BatchWriterConfig getBatchWriterOptions(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator.getBatchWriterOptions(implementingClass, conf);
+ }
+
+ /**
+ * Sets the directive to create new tables, as necessary. Table names can only be alpha-numeric and underscores.
+ *
+ * <p>
+ * By default, this feature is <b>disabled</b>.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param enableFeature
+ * the feature is enabled if true, disabled otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setCreateTables(Class<?> implementingClass, Configuration conf, boolean enableFeature) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator.setCreateTables(implementingClass, conf, enableFeature);
+ }
+
+ /**
+ * Determines whether tables are permitted to be created as needed.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return true if the feature is disabled, false otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setCreateTables(Class, Configuration, boolean)
+ */
+ @Deprecated
+ public static Boolean canCreateTables(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator.canCreateTables(implementingClass, conf);
+ }
+
+ /**
+ * Sets the directive to use simulation mode for this job. In simulation mode, no output is produced. This is useful for testing.
+ *
+ * <p>
+ * By default, this feature is <b>disabled</b>.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @param enableFeature
+ * the feature is enabled if true, disabled otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ */
+ @Deprecated
+ public static void setSimulationMode(Class<?> implementingClass, Configuration conf, boolean enableFeature) {
+ org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator.setSimulationMode(implementingClass, conf, enableFeature);
+ }
+
+ /**
+ * Determines whether this feature is enabled.
+ *
+ * @param implementingClass
+ * the class whose name will be used as a prefix for the property configuration key
+ * @param conf
+ * the Hadoop configuration object to configure
+ * @return true if the feature is enabled, false otherwise
+ * @deprecated since 1.6.0; Configure your job with the appropriate InputFormat or OutputFormat.
+ * @since 1.5.0
+ * @see #setSimulationMode(Class, Configuration, boolean)
+ */
+ @Deprecated
+ public static Boolean getSimulationMode(Class<?> implementingClass, Configuration conf) {
+ return org.apache.accumulo.core.client.mapreduce.lib.impl.OutputConfigurator.getSimulationMode(implementingClass, conf);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/0d9c05e7/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/package-info.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/package-info.java b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/package-info.java
new file mode 100644
index 0000000..269ffea
--- /dev/null
+++ b/core/src/main/java/org/apache/accumulo/core/client/mapreduce/lib/util/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * @deprecated since 1.6.0; This package was moved out of the public API.
+ * @since 1.5.0
+ */
+package org.apache.accumulo.core.client.mapreduce.lib.util;
+
[12/21] accumulo git commit: ACCUMULO-3743 One more CHANGES change.
Posted by ct...@apache.org.
ACCUMULO-3743 One more CHANGES change.
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/9fda3bbb
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/9fda3bbb
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/9fda3bbb
Branch: refs/heads/master
Commit: 9fda3bbb2c5a06764412c2ed665e47e75a9762da
Parents: 0d9c05e
Author: Josh Elser <el...@apache.org>
Authored: Tue May 12 13:42:23 2015 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue May 12 13:42:23 2015 -0400
----------------------------------------------------------------------
CHANGES | 1 +
1 file changed, 1 insertion(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/9fda3bbb/CHANGES
----------------------------------------------------------------------
diff --git a/CHANGES b/CHANGES
index 7750d66..d966db4 100644
--- a/CHANGES
+++ b/CHANGES
@@ -474,6 +474,7 @@ Release Notes - Accumulo - Version 1.7.0
* [ACCUMULO-3791] - Agitator start/stop scripts collide variable names
* [ACCUMULO-3792] - org.apache.accumulo.test.randomwalk.Node#getMapReduceJars needs to include htrace
* [ACCUMULO-3800] - Extra DEPENDENCIES file after new apache.pom
+ * [ACCUMULO-3803] - Resurrect mapreduce.lib.util for 1.7.0
** Improvement
* [ACCUMULO-898] - look into replacing cloudtrace
[04/21] accumulo git commit: ACCUMULO-3801 moved
GarbageCollectionLogger to proper module
Posted by ct...@apache.org.
ACCUMULO-3801 moved GarbageCollectionLogger to proper module
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/c16fd9d1
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/c16fd9d1
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/c16fd9d1
Branch: refs/heads/master
Commit: c16fd9d1552085bf135c399e1ca32fad3737edb3
Parents: 4db7766
Author: Keith Turner <kt...@apache.org>
Authored: Tue May 12 11:08:53 2015 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue May 12 12:00:09 2015 -0400
----------------------------------------------------------------------
.../server/GarbageCollectionLogger.java | 116 +++++++++++++++++++
.../server/GarbageCollectionLogger.java | 116 -------------------
2 files changed, 116 insertions(+), 116 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/c16fd9d1/server/base/src/main/java/org/apache/accumulo/server/GarbageCollectionLogger.java
----------------------------------------------------------------------
diff --git a/server/base/src/main/java/org/apache/accumulo/server/GarbageCollectionLogger.java b/server/base/src/main/java/org/apache/accumulo/server/GarbageCollectionLogger.java
new file mode 100644
index 0000000..389a544
--- /dev/null
+++ b/server/base/src/main/java/org/apache/accumulo/server/GarbageCollectionLogger.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.accumulo.server;
+
+import java.lang.management.GarbageCollectorMXBean;
+import java.lang.management.ManagementFactory;
+import java.util.HashMap;
+import java.util.List;
+
+import org.apache.accumulo.core.conf.AccumuloConfiguration;
+import org.apache.accumulo.core.conf.Property;
+import org.apache.accumulo.server.util.Halt;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class GarbageCollectionLogger {
+ private static final Logger log = LoggerFactory.getLogger(GarbageCollectionLogger.class);
+
+ private final HashMap<String,Long> prevGcTime = new HashMap<String,Long>();
+ private long lastMemorySize = 0;
+ private long gcTimeIncreasedCount = 0;
+ private static long lastMemoryCheckTime = 0;
+
+ public GarbageCollectionLogger() {}
+
+ public synchronized void logGCInfo(AccumuloConfiguration conf) {
+ final long now = System.currentTimeMillis();
+
+ List<GarbageCollectorMXBean> gcmBeans = ManagementFactory.getGarbageCollectorMXBeans();
+ Runtime rt = Runtime.getRuntime();
+
+ StringBuilder sb = new StringBuilder("gc");
+
+ boolean sawChange = false;
+
+ long maxIncreaseInCollectionTime = 0;
+
+ for (GarbageCollectorMXBean gcBean : gcmBeans) {
+ Long prevTime = prevGcTime.get(gcBean.getName());
+ long pt = 0;
+ if (prevTime != null) {
+ pt = prevTime;
+ }
+
+ long time = gcBean.getCollectionTime();
+
+ if (time - pt != 0) {
+ sawChange = true;
+ }
+
+ long increaseInCollectionTime = time - pt;
+ sb.append(String.format(" %s=%,.2f(+%,.2f) secs", gcBean.getName(), time / 1000.0, increaseInCollectionTime / 1000.0));
+ maxIncreaseInCollectionTime = Math.max(increaseInCollectionTime, maxIncreaseInCollectionTime);
+ prevGcTime.put(gcBean.getName(), time);
+ }
+
+ long mem = rt.freeMemory();
+ if (maxIncreaseInCollectionTime == 0) {
+ gcTimeIncreasedCount = 0;
+ } else {
+ gcTimeIncreasedCount++;
+ if (gcTimeIncreasedCount > 3 && mem < rt.maxMemory() * 0.05) {
+ log.warn("Running low on memory");
+ gcTimeIncreasedCount = 0;
+ }
+ }
+
+ if (mem > lastMemorySize) {
+ sawChange = true;
+ }
+
+ String sign = "+";
+ if (mem - lastMemorySize <= 0) {
+ sign = "";
+ }
+
+ sb.append(String.format(" freemem=%,d(%s%,d) totalmem=%,d", mem, sign, (mem - lastMemorySize), rt.totalMemory()));
+
+ if (sawChange) {
+ log.debug(sb.toString());
+ }
+
+ final long keepAliveTimeout = conf.getTimeInMillis(Property.INSTANCE_ZK_TIMEOUT);
+ if (lastMemoryCheckTime > 0 && lastMemoryCheckTime < now) {
+ final long diff = now - lastMemoryCheckTime;
+ if (diff > keepAliveTimeout + 1000) {
+ log.warn(String.format("GC pause checker not called in a timely fashion. Expected every %.1f seconds but was %.1f seconds since last check",
+ keepAliveTimeout / 1000., diff / 1000.));
+ }
+ lastMemoryCheckTime = now;
+ return;
+ }
+
+ if (maxIncreaseInCollectionTime > keepAliveTimeout) {
+ Halt.halt("Garbage collection may be interfering with lock keep-alive. Halting.", -1);
+ }
+
+ lastMemorySize = mem;
+ lastMemoryCheckTime = now;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/c16fd9d1/server/tserver/src/main/java/org/apache/accumulo/server/GarbageCollectionLogger.java
----------------------------------------------------------------------
diff --git a/server/tserver/src/main/java/org/apache/accumulo/server/GarbageCollectionLogger.java b/server/tserver/src/main/java/org/apache/accumulo/server/GarbageCollectionLogger.java
deleted file mode 100644
index 389a544..0000000
--- a/server/tserver/src/main/java/org/apache/accumulo/server/GarbageCollectionLogger.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.server;
-
-import java.lang.management.GarbageCollectorMXBean;
-import java.lang.management.ManagementFactory;
-import java.util.HashMap;
-import java.util.List;
-
-import org.apache.accumulo.core.conf.AccumuloConfiguration;
-import org.apache.accumulo.core.conf.Property;
-import org.apache.accumulo.server.util.Halt;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class GarbageCollectionLogger {
- private static final Logger log = LoggerFactory.getLogger(GarbageCollectionLogger.class);
-
- private final HashMap<String,Long> prevGcTime = new HashMap<String,Long>();
- private long lastMemorySize = 0;
- private long gcTimeIncreasedCount = 0;
- private static long lastMemoryCheckTime = 0;
-
- public GarbageCollectionLogger() {}
-
- public synchronized void logGCInfo(AccumuloConfiguration conf) {
- final long now = System.currentTimeMillis();
-
- List<GarbageCollectorMXBean> gcmBeans = ManagementFactory.getGarbageCollectorMXBeans();
- Runtime rt = Runtime.getRuntime();
-
- StringBuilder sb = new StringBuilder("gc");
-
- boolean sawChange = false;
-
- long maxIncreaseInCollectionTime = 0;
-
- for (GarbageCollectorMXBean gcBean : gcmBeans) {
- Long prevTime = prevGcTime.get(gcBean.getName());
- long pt = 0;
- if (prevTime != null) {
- pt = prevTime;
- }
-
- long time = gcBean.getCollectionTime();
-
- if (time - pt != 0) {
- sawChange = true;
- }
-
- long increaseInCollectionTime = time - pt;
- sb.append(String.format(" %s=%,.2f(+%,.2f) secs", gcBean.getName(), time / 1000.0, increaseInCollectionTime / 1000.0));
- maxIncreaseInCollectionTime = Math.max(increaseInCollectionTime, maxIncreaseInCollectionTime);
- prevGcTime.put(gcBean.getName(), time);
- }
-
- long mem = rt.freeMemory();
- if (maxIncreaseInCollectionTime == 0) {
- gcTimeIncreasedCount = 0;
- } else {
- gcTimeIncreasedCount++;
- if (gcTimeIncreasedCount > 3 && mem < rt.maxMemory() * 0.05) {
- log.warn("Running low on memory");
- gcTimeIncreasedCount = 0;
- }
- }
-
- if (mem > lastMemorySize) {
- sawChange = true;
- }
-
- String sign = "+";
- if (mem - lastMemorySize <= 0) {
- sign = "";
- }
-
- sb.append(String.format(" freemem=%,d(%s%,d) totalmem=%,d", mem, sign, (mem - lastMemorySize), rt.totalMemory()));
-
- if (sawChange) {
- log.debug(sb.toString());
- }
-
- final long keepAliveTimeout = conf.getTimeInMillis(Property.INSTANCE_ZK_TIMEOUT);
- if (lastMemoryCheckTime > 0 && lastMemoryCheckTime < now) {
- final long diff = now - lastMemoryCheckTime;
- if (diff > keepAliveTimeout + 1000) {
- log.warn(String.format("GC pause checker not called in a timely fashion. Expected every %.1f seconds but was %.1f seconds since last check",
- keepAliveTimeout / 1000., diff / 1000.));
- }
- lastMemoryCheckTime = now;
- return;
- }
-
- if (maxIncreaseInCollectionTime > keepAliveTimeout) {
- Halt.halt("Garbage collection may be interfering with lock keep-alive. Halting.", -1);
- }
-
- lastMemorySize = mem;
- lastMemoryCheckTime = now;
- }
-
-}
[11/21] accumulo git commit: ACCUMULO-3743 One more CHANGES change.
Posted by ct...@apache.org.
ACCUMULO-3743 One more CHANGES change.
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/9fda3bbb
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/9fda3bbb
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/9fda3bbb
Branch: refs/heads/1.7
Commit: 9fda3bbb2c5a06764412c2ed665e47e75a9762da
Parents: 0d9c05e
Author: Josh Elser <el...@apache.org>
Authored: Tue May 12 13:42:23 2015 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue May 12 13:42:23 2015 -0400
----------------------------------------------------------------------
CHANGES | 1 +
1 file changed, 1 insertion(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/9fda3bbb/CHANGES
----------------------------------------------------------------------
diff --git a/CHANGES b/CHANGES
index 7750d66..d966db4 100644
--- a/CHANGES
+++ b/CHANGES
@@ -474,6 +474,7 @@ Release Notes - Accumulo - Version 1.7.0
* [ACCUMULO-3791] - Agitator start/stop scripts collide variable names
* [ACCUMULO-3792] - org.apache.accumulo.test.randomwalk.Node#getMapReduceJars needs to include htrace
* [ACCUMULO-3800] - Extra DEPENDENCIES file after new apache.pom
+ * [ACCUMULO-3803] - Resurrect mapreduce.lib.util for 1.7.0
** Improvement
* [ACCUMULO-898] - look into replacing cloudtrace
[02/21] accumulo git commit: Revert "ACCUMULO-3793 use numctl to turn
on memory interleaving"
Posted by ct...@apache.org.
Revert "ACCUMULO-3793 use numctl to turn on memory interleaving"
This reverts commit b82148777c023b21578d06ef451fdd70770107a5.
Conflicts:
assemble/bin/config.sh
assemble/bin/start-server.sh
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/4db7766d
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/4db7766d
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/4db7766d
Branch: refs/heads/master
Commit: 4db7766d77497c4ab8b8cbd997fe1b4c267308f3
Parents: 8e99ed2
Author: Josh Elser <el...@apache.org>
Authored: Tue May 12 11:59:11 2015 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue May 12 11:59:11 2015 -0400
----------------------------------------------------------------------
assemble/bin/config.sh | 9 ---------
assemble/bin/start-server.sh | 8 ++++----
2 files changed, 4 insertions(+), 13 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/4db7766d/assemble/bin/config.sh
----------------------------------------------------------------------
diff --git a/assemble/bin/config.sh b/assemble/bin/config.sh
index ed393bb..d9bf8d4 100755
--- a/assemble/bin/config.sh
+++ b/assemble/bin/config.sh
@@ -104,15 +104,6 @@ then
fi
export HADOOP_PREFIX
-NUMA=`which numactl 2>/dev/null`
-NUMACTL_EXISTS="$?"
-NUMACTL_ARGS="--interleave=all"
-if [[ ${NUMACTL_EXISTS} -eq 0 ]] ; then
- export NUMA_CMD="${NUMA} ${NUMACTL_ARGS}"
-else
- export NUMA_CMD=""
-fi
-
export HADOOP_HOME=$HADOOP_PREFIX
export HADOOP_HOME_WARN_SUPPRESS=true
http://git-wip-us.apache.org/repos/asf/accumulo/blob/4db7766d/assemble/bin/start-server.sh
----------------------------------------------------------------------
diff --git a/assemble/bin/start-server.sh b/assemble/bin/start-server.sh
index 3587ec9..2fb4c4c 100755
--- a/assemble/bin/start-server.sh
+++ b/assemble/bin/start-server.sh
@@ -77,12 +77,12 @@ if [[ -z "$PID" ]]; then
COMMAND="${bin}/accumulo_watcher.sh ${LOGHOST}"
fi
- if [[ $HOST == localhost || $HOST == "$(hostname -f)" || $HOST = "$IP" ]]; then
- ${NUMA_CMD} "$COMMAND" "${SERVICE}" --address "${ADDRESS}" >"${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out" 2>"${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err" &
+ if [ "$HOST" = "localhost" -o "$HOST" = "`hostname -f`" -o "$HOST" = "$ip" ]; then
+ ${bin}/accumulo ${SERVICE} --address ${ADDRESS} >${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out 2>${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err &
MAX_FILES_OPEN=$(ulimit -n)
else
- $SSH "$HOST" "bash -c 'exec nohup ${NUMA_CMD} $COMMAND ${SERVICE} --address ${ADDRESS} >${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out 2>${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err' &"
- MAX_FILES_OPEN=$($SSH "$HOST" "/usr/bin/env bash -c 'ulimit -n'")
+ $SSH $HOST "bash -c 'exec nohup ${bin}/accumulo ${SERVICE} --address ${ADDRESS} >${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out 2>${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err' &"
+ MAX_FILES_OPEN=$($SSH $HOST "/usr/bin/env bash -c 'ulimit -n'")
fi
if [[ -n $MAX_FILES_OPEN && -n $SLAVES ]] ; then
[05/21] accumulo git commit: Revert "ACCUMULO-3800 Add exclusion for
DEPENDENCIES"
Posted by ct...@apache.org.
Revert "ACCUMULO-3800 Add exclusion for DEPENDENCIES"
This reverts commit 8e99ed222db49e0f9a4f03ae90d1cbcf44bb1a87.
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/dc1d0def
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/dc1d0def
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/dc1d0def
Branch: refs/heads/1.7
Commit: dc1d0defb79b5fde68631f1063be021fb3420f74
Parents: c16fd9d
Author: Christopher Tubbs <ct...@apache.org>
Authored: Tue May 12 11:42:12 2015 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue May 12 12:44:01 2015 -0400
----------------------------------------------------------------------
pom.xml | 9 ---------
1 file changed, 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/dc1d0def/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index f4c734b..5bf65e8 100644
--- a/pom.xml
+++ b/pom.xml
@@ -955,15 +955,6 @@
</execution>
</executions>
</plugin>
- <plugin>
- <groupId>org.apache.rat</groupId>
- <artifactId>apache-rat-plugin</artifactId>
- <configuration>
- <excludes>
- <exclude>DEPENDENCIES</exclude>
- </excludes>
- </configuration>
- </plugin>
</plugins>
</pluginManagement>
<plugins>
[06/21] accumulo git commit: Revert "ACCUMULO-3800 Add exclusion for
DEPENDENCIES"
Posted by ct...@apache.org.
Revert "ACCUMULO-3800 Add exclusion for DEPENDENCIES"
This reverts commit 8e99ed222db49e0f9a4f03ae90d1cbcf44bb1a87.
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/dc1d0def
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/dc1d0def
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/dc1d0def
Branch: refs/heads/master
Commit: dc1d0defb79b5fde68631f1063be021fb3420f74
Parents: c16fd9d
Author: Christopher Tubbs <ct...@apache.org>
Authored: Tue May 12 11:42:12 2015 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue May 12 12:44:01 2015 -0400
----------------------------------------------------------------------
pom.xml | 9 ---------
1 file changed, 9 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/dc1d0def/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index f4c734b..5bf65e8 100644
--- a/pom.xml
+++ b/pom.xml
@@ -955,15 +955,6 @@
</execution>
</executions>
</plugin>
- <plugin>
- <groupId>org.apache.rat</groupId>
- <artifactId>apache-rat-plugin</artifactId>
- <configuration>
- <excludes>
- <exclude>DEPENDENCIES</exclude>
- </excludes>
- </configuration>
- </plugin>
</plugins>
</pluginManagement>
<plugins>
[19/21] accumulo git commit: Merge tag '1.7.0' into 1.7
Posted by ct...@apache.org.
Merge tag '1.7.0' into 1.7
Apache Accumulo 1.7.0
Conflicts:
assemble/pom.xml
core/pom.xml
docs/pom.xml
examples/simple/pom.xml
fate/pom.xml
maven-plugin/pom.xml
minicluster/pom.xml
pom.xml
proxy/pom.xml
server/base/pom.xml
server/gc/pom.xml
server/master/pom.xml
server/monitor/pom.xml
server/native/pom.xml
server/tracer/pom.xml
server/tserver/pom.xml
shell/pom.xml
start/pom.xml
test/pom.xml
trace/pom.xml
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/1faee537
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/1faee537
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/1faee537
Branch: refs/heads/1.7
Commit: 1faee537d8d557297d20f3d084b4e16127d00600
Parents: b577410 8cba812
Author: Christopher Tubbs <ct...@apache.org>
Authored: Wed May 20 12:07:59 2015 -0400
Committer: Christopher Tubbs <ct...@apache.org>
Committed: Wed May 20 12:07:59 2015 -0400
----------------------------------------------------------------------
CHANGES | 1 +
assemble/bin/config.sh | 6 +++---
assemble/bin/start-server.sh | 4 ++--
pom.xml | 2 +-
4 files changed, 7 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/1faee537/assemble/bin/config.sh
----------------------------------------------------------------------
diff --cc assemble/bin/config.sh
index ed393bb,d9bf8d4..9dfd164
--- a/assemble/bin/config.sh
+++ b/assemble/bin/config.sh
@@@ -104,15 -104,6 +104,15 @@@ the
fi
export HADOOP_PREFIX
- NUMA=`which numactl 2>/dev/null`
- NUMACTL_EXISTS="$?"
- NUMACTL_ARGS="--interleave=all"
++NUMA=$(which numactl 2>/dev/null)
++NUMACTL_EXISTS=$?
++NUMACTL_ARGS='--interleave=all'
+if [[ ${NUMACTL_EXISTS} -eq 0 ]] ; then
+ export NUMA_CMD="${NUMA} ${NUMACTL_ARGS}"
+else
+ export NUMA_CMD=""
+fi
+
export HADOOP_HOME=$HADOOP_PREFIX
export HADOOP_HOME_WARN_SUPPRESS=true
http://git-wip-us.apache.org/repos/asf/accumulo/blob/1faee537/assemble/bin/start-server.sh
----------------------------------------------------------------------
diff --cc assemble/bin/start-server.sh
index 3587ec9,2fb4c4c..8266457
--- a/assemble/bin/start-server.sh
+++ b/assemble/bin/start-server.sh
@@@ -77,12 -77,12 +77,12 @@@ if [[ -z "$PID" ]]; the
COMMAND="${bin}/accumulo_watcher.sh ${LOGHOST}"
fi
- if [ "$HOST" = "localhost" -o "$HOST" = "`hostname -f`" -o "$HOST" = "$ip" ]; then
- ${bin}/accumulo ${SERVICE} --address ${ADDRESS} >${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out 2>${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err &
+ if [[ $HOST == localhost || $HOST == "$(hostname -f)" || $HOST = "$IP" ]]; then
- ${NUMA_CMD} "$COMMAND" "${SERVICE}" --address "${ADDRESS}" >"${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out" 2>"${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err" &
++ ${NUMA_CMD} "$COMMAND" "${SERVICE}" --address "${ADDRESS}" >"${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out" 2>"${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err" &
MAX_FILES_OPEN=$(ulimit -n)
else
- $SSH $HOST "bash -c 'exec nohup ${bin}/accumulo ${SERVICE} --address ${ADDRESS} >${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out 2>${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err' &"
- MAX_FILES_OPEN=$($SSH $HOST "/usr/bin/env bash -c 'ulimit -n'")
+ $SSH "$HOST" "bash -c 'exec nohup ${NUMA_CMD} $COMMAND ${SERVICE} --address ${ADDRESS} >${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out 2>${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err' &"
- MAX_FILES_OPEN=$($SSH "$HOST" "/usr/bin/env bash -c 'ulimit -n'")
++ MAX_FILES_OPEN=$($SSH "$HOST" "/usr/bin/env bash -c 'ulimit -n'")
fi
if [[ -n $MAX_FILES_OPEN && -n $SLAVES ]] ; then
http://git-wip-us.apache.org/repos/asf/accumulo/blob/1faee537/pom.xml
----------------------------------------------------------------------
diff --cc pom.xml
index 19ff679,24f25c9..3815ad7
--- a/pom.xml
+++ b/pom.xml
@@@ -95,7 -95,7 +95,7 @@@
<scm>
<connection>scm:git:git://git.apache.org/accumulo.git</connection>
<developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/accumulo.git</developerConnection>
- <tag>${project.version}</tag>
- <tag>1.7.0</tag>
++ <tag>HEAD</tag>
<url>https://git-wip-us.apache.org/repos/asf?p=accumulo.git</url>
</scm>
<issueManagement>
[14/21] accumulo git commit: [maven-release-plugin] prepare release
1.7.0
Posted by ct...@apache.org.
[maven-release-plugin] prepare release 1.7.0
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/8cba8128
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/8cba8128
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/8cba8128
Branch: refs/heads/master
Commit: 8cba8128fbc3238bdd9398cf5c36b7cb6dc3b61d
Parents: 9fda3bb
Author: Josh Elser <el...@apache.org>
Authored: Tue May 12 14:34:29 2015 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue May 12 14:34:29 2015 -0400
----------------------------------------------------------------------
assemble/pom.xml | 2 +-
core/pom.xml | 2 +-
docs/pom.xml | 2 +-
examples/simple/pom.xml | 2 +-
fate/pom.xml | 2 +-
maven-plugin/pom.xml | 2 +-
minicluster/pom.xml | 2 +-
pom.xml | 4 ++--
proxy/pom.xml | 2 +-
server/base/pom.xml | 2 +-
server/gc/pom.xml | 2 +-
server/master/pom.xml | 2 +-
server/monitor/pom.xml | 2 +-
server/native/pom.xml | 2 +-
server/tracer/pom.xml | 2 +-
server/tserver/pom.xml | 2 +-
shell/pom.xml | 2 +-
start/pom.xml | 2 +-
test/pom.xml | 2 +-
trace/pom.xml | 2 +-
20 files changed, 21 insertions(+), 21 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/assemble/pom.xml
----------------------------------------------------------------------
diff --git a/assemble/pom.xml b/assemble/pom.xml
index e88cd81..0cf690b 100644
--- a/assemble/pom.xml
+++ b/assemble/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo</artifactId>
<packaging>pom</packaging>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/core/pom.xml
----------------------------------------------------------------------
diff --git a/core/pom.xml b/core/pom.xml
index 12e3c1d..fe91d0a 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-core</artifactId>
<name>Core</name>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/docs/pom.xml
----------------------------------------------------------------------
diff --git a/docs/pom.xml b/docs/pom.xml
index 47d98da..519f534 100644
--- a/docs/pom.xml
+++ b/docs/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-docs</artifactId>
<packaging>pom</packaging>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/examples/simple/pom.xml
----------------------------------------------------------------------
diff --git a/examples/simple/pom.xml b/examples/simple/pom.xml
index aa2f118..7172831 100644
--- a/examples/simple/pom.xml
+++ b/examples/simple/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>accumulo-examples-simple</artifactId>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/fate/pom.xml
----------------------------------------------------------------------
diff --git a/fate/pom.xml b/fate/pom.xml
index 5fd9a7e..e282a14 100644
--- a/fate/pom.xml
+++ b/fate/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-fate</artifactId>
<name>Fate</name>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/maven-plugin/pom.xml
----------------------------------------------------------------------
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml
index 2c07d7d..ecb3bd0 100644
--- a/maven-plugin/pom.xml
+++ b/maven-plugin/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-maven-plugin</artifactId>
<packaging>maven-plugin</packaging>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/minicluster/pom.xml
----------------------------------------------------------------------
diff --git a/minicluster/pom.xml b/minicluster/pom.xml
index 7644269..c12c967 100644
--- a/minicluster/pom.xml
+++ b/minicluster/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-minicluster</artifactId>
<name>MiniCluster</name>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 5bf65e8..24f25c9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -24,7 +24,7 @@
</parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<packaging>pom</packaging>
<name>Apache Accumulo</name>
<description>Apache Accumulo is a sorted, distributed key/value store based on Google's BigTable design. It is built on top of Apache Hadoop, Zookeeper, and Thrift. It features a few novel improvements on the BigTable design in the form of cell-level access labels and a server-side programming mechanism that can modify key/value pairs at various points in the data management process.</description>
@@ -95,7 +95,7 @@
<scm>
<connection>scm:git:git://git.apache.org/accumulo.git</connection>
<developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/accumulo.git</developerConnection>
- <tag>${project.version}</tag>
+ <tag>1.7.0</tag>
<url>https://git-wip-us.apache.org/repos/asf?p=accumulo.git</url>
</scm>
<issueManagement>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/proxy/pom.xml
----------------------------------------------------------------------
diff --git a/proxy/pom.xml b/proxy/pom.xml
index eed79b1..92e0d48 100644
--- a/proxy/pom.xml
+++ b/proxy/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-proxy</artifactId>
<name>Proxy</name>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/server/base/pom.xml
----------------------------------------------------------------------
diff --git a/server/base/pom.xml b/server/base/pom.xml
index 94850b9..1c59fd4 100644
--- a/server/base/pom.xml
+++ b/server/base/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>accumulo-server-base</artifactId>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/server/gc/pom.xml
----------------------------------------------------------------------
diff --git a/server/gc/pom.xml b/server/gc/pom.xml
index 694ffe9..f92232b 100644
--- a/server/gc/pom.xml
+++ b/server/gc/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>accumulo-gc</artifactId>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/server/master/pom.xml
----------------------------------------------------------------------
diff --git a/server/master/pom.xml b/server/master/pom.xml
index cf92d4a..6024c97 100644
--- a/server/master/pom.xml
+++ b/server/master/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>accumulo-master</artifactId>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/server/monitor/pom.xml
----------------------------------------------------------------------
diff --git a/server/monitor/pom.xml b/server/monitor/pom.xml
index 51262c1..e5cd90b 100644
--- a/server/monitor/pom.xml
+++ b/server/monitor/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>accumulo-monitor</artifactId>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/server/native/pom.xml
----------------------------------------------------------------------
diff --git a/server/native/pom.xml b/server/native/pom.xml
index 5a18728..3452e56 100644
--- a/server/native/pom.xml
+++ b/server/native/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>accumulo-native</artifactId>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/server/tracer/pom.xml
----------------------------------------------------------------------
diff --git a/server/tracer/pom.xml b/server/tracer/pom.xml
index c44f498..d0d0288 100644
--- a/server/tracer/pom.xml
+++ b/server/tracer/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>accumulo-tracer</artifactId>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/server/tserver/pom.xml
----------------------------------------------------------------------
diff --git a/server/tserver/pom.xml b/server/tserver/pom.xml
index a4bc3de..da319aa 100644
--- a/server/tserver/pom.xml
+++ b/server/tserver/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<artifactId>accumulo-tserver</artifactId>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/shell/pom.xml
----------------------------------------------------------------------
diff --git a/shell/pom.xml b/shell/pom.xml
index 6ac997f..f80eb0a 100644
--- a/shell/pom.xml
+++ b/shell/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-shell</artifactId>
<name>Shell</name>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/start/pom.xml
----------------------------------------------------------------------
diff --git a/start/pom.xml b/start/pom.xml
index eb75fe0..7be51a2 100644
--- a/start/pom.xml
+++ b/start/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-start</artifactId>
<name>Start</name>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/test/pom.xml
----------------------------------------------------------------------
diff --git a/test/pom.xml b/test/pom.xml
index 5862688..f1d5e2a 100644
--- a/test/pom.xml
+++ b/test/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-test</artifactId>
<name>Testing</name>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/8cba8128/trace/pom.xml
----------------------------------------------------------------------
diff --git a/trace/pom.xml b/trace/pom.xml
index 7777bb3..53e6851 100644
--- a/trace/pom.xml
+++ b/trace/pom.xml
@@ -20,7 +20,7 @@
<parent>
<groupId>org.apache.accumulo</groupId>
<artifactId>accumulo-project</artifactId>
- <version>1.7.0-SNAPSHOT</version>
+ <version>1.7.0</version>
</parent>
<artifactId>accumulo-trace</artifactId>
<name>Trace</name>
[15/21] accumulo git commit: ACCUMULO-3820 Remove unused variable
Posted by ct...@apache.org.
ACCUMULO-3820 Remove unused variable
* Remove warning for unused variable.
* Update comment explaining why the parameter wasn't used.
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/6524b072
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/6524b072
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/6524b072
Branch: refs/heads/master
Commit: 6524b0720350170696c1692430ad47dd5f4bffd0
Parents: ed5b53b
Author: Christopher Tubbs <ct...@apache.org>
Authored: Wed May 20 11:08:07 2015 -0400
Committer: Christopher Tubbs <ct...@apache.org>
Committed: Wed May 20 11:08:07 2015 -0400
----------------------------------------------------------------------
.../apache/accumulo/test/randomwalk/multitable/CopyTool.java | 6 ++----
1 file changed, 2 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/6524b072/test/src/main/java/org/apache/accumulo/test/randomwalk/multitable/CopyTool.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/multitable/CopyTool.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/multitable/CopyTool.java
index b4e509c..251a064 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/multitable/CopyTool.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/multitable/CopyTool.java
@@ -19,9 +19,9 @@ package org.apache.accumulo.test.randomwalk.multitable;
import java.io.IOException;
import org.apache.accumulo.core.client.ClientConfiguration;
+import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.ZooKeeperInstance;
-import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
import org.apache.accumulo.core.client.admin.DelegationTokenConfig;
import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;
import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat;
@@ -74,9 +74,7 @@ public class CopyTool extends Configured implements Tool {
final AuthenticationToken token;
if (clientConf.getBoolean(ClientProperty.INSTANCE_RPC_SASL_ENABLED.getKey(), false)) {
// Use the Kerberos creds to request a DelegationToken for MapReduce to use
- final String keytab = args[1];
-
- // Better be logged in. Could use the keytab, but we're already logged in soo..
+ // We could use the specified keytab (args[1]), but we're already logged in and don't need to, so we can just use the current user
KerberosToken kt = new KerberosToken();
try {
UserGroupInformation user = UserGroupInformation.getCurrentUser();
[21/21] accumulo git commit: Merge branch '1.7'
Posted by ct...@apache.org.
Merge branch '1.7'
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/a9d1ad4a
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/a9d1ad4a
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/a9d1ad4a
Branch: refs/heads/master
Commit: a9d1ad4a7d470071e59382dfad0e10d49c9ee464
Parents: e22103c 1faee53
Author: Christopher Tubbs <ct...@apache.org>
Authored: Wed May 20 12:09:26 2015 -0400
Committer: Christopher Tubbs <ct...@apache.org>
Committed: Wed May 20 12:09:26 2015 -0400
----------------------------------------------------------------------
CHANGES | 1 +
assemble/bin/config.sh | 6 +-
assemble/bin/start-server.sh | 4 +-
.../core/client/impl/ConditionalWriterImpl.java | 2 +-
.../accumulo/core/file/rfile/bcfile/Utils.java | 2 +-
.../iterators/user/IntersectingIterator.java | 19 ++---
.../mapred/AccumuloFileOutputFormatTest.java | 2 +
.../mapreduce/AccumuloFileOutputFormatTest.java | 2 +
.../accumulo/examples/simple/shard/Index.java | 7 +-
.../impl/MiniAccumuloClusterImpl.java | 17 ++--
.../impl/MiniAccumuloConfigImpl.java | 8 +-
pom.xml | 8 +-
.../accumulo/server/util/SendLogToChainsaw.java | 2 +-
.../org/apache/accumulo/monitor/util/Table.java | 2 +-
.../monitor/util/celltypes/NumberType.java | 2 +-
.../accumulo/tserver/log/LocalWALRecovery.java | 75 ++++++++---------
.../accumulo/tserver/tablet/RootFilesTest.java | 7 +-
.../shell/commands/FormatterCommandTest.java | 2 +-
.../start/classloader/AccumuloClassLoader.java | 7 +-
.../classloader/vfs/UniqueFileReplicator.java | 3 +-
.../accumulo/test/continuous/TimeBinner.java | 3 +
.../test/continuous/UndefinedAnalyzer.java | 84 ++++++++++----------
.../test/functional/CacheTestWriter.java | 3 +
.../apache/accumulo/test/randomwalk/Node.java | 16 ++--
.../test/randomwalk/multitable/CopyTool.java | 6 +-
.../apache/accumulo/test/AuditMessageIT.java | 5 +-
26 files changed, 168 insertions(+), 127 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/a9d1ad4a/pom.xml
----------------------------------------------------------------------
[03/21] accumulo git commit: ACCUMULO-3801 moved
GarbageCollectionLogger to proper module
Posted by ct...@apache.org.
ACCUMULO-3801 moved GarbageCollectionLogger to proper module
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/c16fd9d1
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/c16fd9d1
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/c16fd9d1
Branch: refs/heads/1.7
Commit: c16fd9d1552085bf135c399e1ca32fad3737edb3
Parents: 4db7766
Author: Keith Turner <kt...@apache.org>
Authored: Tue May 12 11:08:53 2015 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue May 12 12:00:09 2015 -0400
----------------------------------------------------------------------
.../server/GarbageCollectionLogger.java | 116 +++++++++++++++++++
.../server/GarbageCollectionLogger.java | 116 -------------------
2 files changed, 116 insertions(+), 116 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/c16fd9d1/server/base/src/main/java/org/apache/accumulo/server/GarbageCollectionLogger.java
----------------------------------------------------------------------
diff --git a/server/base/src/main/java/org/apache/accumulo/server/GarbageCollectionLogger.java b/server/base/src/main/java/org/apache/accumulo/server/GarbageCollectionLogger.java
new file mode 100644
index 0000000..389a544
--- /dev/null
+++ b/server/base/src/main/java/org/apache/accumulo/server/GarbageCollectionLogger.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.accumulo.server;
+
+import java.lang.management.GarbageCollectorMXBean;
+import java.lang.management.ManagementFactory;
+import java.util.HashMap;
+import java.util.List;
+
+import org.apache.accumulo.core.conf.AccumuloConfiguration;
+import org.apache.accumulo.core.conf.Property;
+import org.apache.accumulo.server.util.Halt;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class GarbageCollectionLogger {
+ private static final Logger log = LoggerFactory.getLogger(GarbageCollectionLogger.class);
+
+ private final HashMap<String,Long> prevGcTime = new HashMap<String,Long>();
+ private long lastMemorySize = 0;
+ private long gcTimeIncreasedCount = 0;
+ private static long lastMemoryCheckTime = 0;
+
+ public GarbageCollectionLogger() {}
+
+ public synchronized void logGCInfo(AccumuloConfiguration conf) {
+ final long now = System.currentTimeMillis();
+
+ List<GarbageCollectorMXBean> gcmBeans = ManagementFactory.getGarbageCollectorMXBeans();
+ Runtime rt = Runtime.getRuntime();
+
+ StringBuilder sb = new StringBuilder("gc");
+
+ boolean sawChange = false;
+
+ long maxIncreaseInCollectionTime = 0;
+
+ for (GarbageCollectorMXBean gcBean : gcmBeans) {
+ Long prevTime = prevGcTime.get(gcBean.getName());
+ long pt = 0;
+ if (prevTime != null) {
+ pt = prevTime;
+ }
+
+ long time = gcBean.getCollectionTime();
+
+ if (time - pt != 0) {
+ sawChange = true;
+ }
+
+ long increaseInCollectionTime = time - pt;
+ sb.append(String.format(" %s=%,.2f(+%,.2f) secs", gcBean.getName(), time / 1000.0, increaseInCollectionTime / 1000.0));
+ maxIncreaseInCollectionTime = Math.max(increaseInCollectionTime, maxIncreaseInCollectionTime);
+ prevGcTime.put(gcBean.getName(), time);
+ }
+
+ long mem = rt.freeMemory();
+ if (maxIncreaseInCollectionTime == 0) {
+ gcTimeIncreasedCount = 0;
+ } else {
+ gcTimeIncreasedCount++;
+ if (gcTimeIncreasedCount > 3 && mem < rt.maxMemory() * 0.05) {
+ log.warn("Running low on memory");
+ gcTimeIncreasedCount = 0;
+ }
+ }
+
+ if (mem > lastMemorySize) {
+ sawChange = true;
+ }
+
+ String sign = "+";
+ if (mem - lastMemorySize <= 0) {
+ sign = "";
+ }
+
+ sb.append(String.format(" freemem=%,d(%s%,d) totalmem=%,d", mem, sign, (mem - lastMemorySize), rt.totalMemory()));
+
+ if (sawChange) {
+ log.debug(sb.toString());
+ }
+
+ final long keepAliveTimeout = conf.getTimeInMillis(Property.INSTANCE_ZK_TIMEOUT);
+ if (lastMemoryCheckTime > 0 && lastMemoryCheckTime < now) {
+ final long diff = now - lastMemoryCheckTime;
+ if (diff > keepAliveTimeout + 1000) {
+ log.warn(String.format("GC pause checker not called in a timely fashion. Expected every %.1f seconds but was %.1f seconds since last check",
+ keepAliveTimeout / 1000., diff / 1000.));
+ }
+ lastMemoryCheckTime = now;
+ return;
+ }
+
+ if (maxIncreaseInCollectionTime > keepAliveTimeout) {
+ Halt.halt("Garbage collection may be interfering with lock keep-alive. Halting.", -1);
+ }
+
+ lastMemorySize = mem;
+ lastMemoryCheckTime = now;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/c16fd9d1/server/tserver/src/main/java/org/apache/accumulo/server/GarbageCollectionLogger.java
----------------------------------------------------------------------
diff --git a/server/tserver/src/main/java/org/apache/accumulo/server/GarbageCollectionLogger.java b/server/tserver/src/main/java/org/apache/accumulo/server/GarbageCollectionLogger.java
deleted file mode 100644
index 389a544..0000000
--- a/server/tserver/src/main/java/org/apache/accumulo/server/GarbageCollectionLogger.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.server;
-
-import java.lang.management.GarbageCollectorMXBean;
-import java.lang.management.ManagementFactory;
-import java.util.HashMap;
-import java.util.List;
-
-import org.apache.accumulo.core.conf.AccumuloConfiguration;
-import org.apache.accumulo.core.conf.Property;
-import org.apache.accumulo.server.util.Halt;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class GarbageCollectionLogger {
- private static final Logger log = LoggerFactory.getLogger(GarbageCollectionLogger.class);
-
- private final HashMap<String,Long> prevGcTime = new HashMap<String,Long>();
- private long lastMemorySize = 0;
- private long gcTimeIncreasedCount = 0;
- private static long lastMemoryCheckTime = 0;
-
- public GarbageCollectionLogger() {}
-
- public synchronized void logGCInfo(AccumuloConfiguration conf) {
- final long now = System.currentTimeMillis();
-
- List<GarbageCollectorMXBean> gcmBeans = ManagementFactory.getGarbageCollectorMXBeans();
- Runtime rt = Runtime.getRuntime();
-
- StringBuilder sb = new StringBuilder("gc");
-
- boolean sawChange = false;
-
- long maxIncreaseInCollectionTime = 0;
-
- for (GarbageCollectorMXBean gcBean : gcmBeans) {
- Long prevTime = prevGcTime.get(gcBean.getName());
- long pt = 0;
- if (prevTime != null) {
- pt = prevTime;
- }
-
- long time = gcBean.getCollectionTime();
-
- if (time - pt != 0) {
- sawChange = true;
- }
-
- long increaseInCollectionTime = time - pt;
- sb.append(String.format(" %s=%,.2f(+%,.2f) secs", gcBean.getName(), time / 1000.0, increaseInCollectionTime / 1000.0));
- maxIncreaseInCollectionTime = Math.max(increaseInCollectionTime, maxIncreaseInCollectionTime);
- prevGcTime.put(gcBean.getName(), time);
- }
-
- long mem = rt.freeMemory();
- if (maxIncreaseInCollectionTime == 0) {
- gcTimeIncreasedCount = 0;
- } else {
- gcTimeIncreasedCount++;
- if (gcTimeIncreasedCount > 3 && mem < rt.maxMemory() * 0.05) {
- log.warn("Running low on memory");
- gcTimeIncreasedCount = 0;
- }
- }
-
- if (mem > lastMemorySize) {
- sawChange = true;
- }
-
- String sign = "+";
- if (mem - lastMemorySize <= 0) {
- sign = "";
- }
-
- sb.append(String.format(" freemem=%,d(%s%,d) totalmem=%,d", mem, sign, (mem - lastMemorySize), rt.totalMemory()));
-
- if (sawChange) {
- log.debug(sb.toString());
- }
-
- final long keepAliveTimeout = conf.getTimeInMillis(Property.INSTANCE_ZK_TIMEOUT);
- if (lastMemoryCheckTime > 0 && lastMemoryCheckTime < now) {
- final long diff = now - lastMemoryCheckTime;
- if (diff > keepAliveTimeout + 1000) {
- log.warn(String.format("GC pause checker not called in a timely fashion. Expected every %.1f seconds but was %.1f seconds since last check",
- keepAliveTimeout / 1000., diff / 1000.));
- }
- lastMemoryCheckTime = now;
- return;
- }
-
- if (maxIncreaseInCollectionTime > keepAliveTimeout) {
- Halt.halt("Garbage collection may be interfering with lock keep-alive. Halting.", -1);
- }
-
- lastMemorySize = mem;
- lastMemoryCheckTime = now;
- }
-
-}
[20/21] accumulo git commit: Merge tag '1.7.0' into 1.7
Posted by ct...@apache.org.
Merge tag '1.7.0' into 1.7
Apache Accumulo 1.7.0
Conflicts:
assemble/pom.xml
core/pom.xml
docs/pom.xml
examples/simple/pom.xml
fate/pom.xml
maven-plugin/pom.xml
minicluster/pom.xml
pom.xml
proxy/pom.xml
server/base/pom.xml
server/gc/pom.xml
server/master/pom.xml
server/monitor/pom.xml
server/native/pom.xml
server/tracer/pom.xml
server/tserver/pom.xml
shell/pom.xml
start/pom.xml
test/pom.xml
trace/pom.xml
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/1faee537
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/1faee537
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/1faee537
Branch: refs/heads/master
Commit: 1faee537d8d557297d20f3d084b4e16127d00600
Parents: b577410 8cba812
Author: Christopher Tubbs <ct...@apache.org>
Authored: Wed May 20 12:07:59 2015 -0400
Committer: Christopher Tubbs <ct...@apache.org>
Committed: Wed May 20 12:07:59 2015 -0400
----------------------------------------------------------------------
CHANGES | 1 +
assemble/bin/config.sh | 6 +++---
assemble/bin/start-server.sh | 4 ++--
pom.xml | 2 +-
4 files changed, 7 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/1faee537/assemble/bin/config.sh
----------------------------------------------------------------------
diff --cc assemble/bin/config.sh
index ed393bb,d9bf8d4..9dfd164
--- a/assemble/bin/config.sh
+++ b/assemble/bin/config.sh
@@@ -104,15 -104,6 +104,15 @@@ the
fi
export HADOOP_PREFIX
- NUMA=`which numactl 2>/dev/null`
- NUMACTL_EXISTS="$?"
- NUMACTL_ARGS="--interleave=all"
++NUMA=$(which numactl 2>/dev/null)
++NUMACTL_EXISTS=$?
++NUMACTL_ARGS='--interleave=all'
+if [[ ${NUMACTL_EXISTS} -eq 0 ]] ; then
+ export NUMA_CMD="${NUMA} ${NUMACTL_ARGS}"
+else
+ export NUMA_CMD=""
+fi
+
export HADOOP_HOME=$HADOOP_PREFIX
export HADOOP_HOME_WARN_SUPPRESS=true
http://git-wip-us.apache.org/repos/asf/accumulo/blob/1faee537/assemble/bin/start-server.sh
----------------------------------------------------------------------
diff --cc assemble/bin/start-server.sh
index 3587ec9,2fb4c4c..8266457
--- a/assemble/bin/start-server.sh
+++ b/assemble/bin/start-server.sh
@@@ -77,12 -77,12 +77,12 @@@ if [[ -z "$PID" ]]; the
COMMAND="${bin}/accumulo_watcher.sh ${LOGHOST}"
fi
- if [ "$HOST" = "localhost" -o "$HOST" = "`hostname -f`" -o "$HOST" = "$ip" ]; then
- ${bin}/accumulo ${SERVICE} --address ${ADDRESS} >${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out 2>${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err &
+ if [[ $HOST == localhost || $HOST == "$(hostname -f)" || $HOST = "$IP" ]]; then
- ${NUMA_CMD} "$COMMAND" "${SERVICE}" --address "${ADDRESS}" >"${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out" 2>"${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err" &
++ ${NUMA_CMD} "$COMMAND" "${SERVICE}" --address "${ADDRESS}" >"${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out" 2>"${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err" &
MAX_FILES_OPEN=$(ulimit -n)
else
- $SSH $HOST "bash -c 'exec nohup ${bin}/accumulo ${SERVICE} --address ${ADDRESS} >${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out 2>${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err' &"
- MAX_FILES_OPEN=$($SSH $HOST "/usr/bin/env bash -c 'ulimit -n'")
+ $SSH "$HOST" "bash -c 'exec nohup ${NUMA_CMD} $COMMAND ${SERVICE} --address ${ADDRESS} >${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.out 2>${ACCUMULO_LOG_DIR}/${SERVICE}_${LOGHOST}.err' &"
- MAX_FILES_OPEN=$($SSH "$HOST" "/usr/bin/env bash -c 'ulimit -n'")
++ MAX_FILES_OPEN=$($SSH "$HOST" "/usr/bin/env bash -c 'ulimit -n'")
fi
if [[ -n $MAX_FILES_OPEN && -n $SLAVES ]] ; then
http://git-wip-us.apache.org/repos/asf/accumulo/blob/1faee537/pom.xml
----------------------------------------------------------------------
diff --cc pom.xml
index 19ff679,24f25c9..3815ad7
--- a/pom.xml
+++ b/pom.xml
@@@ -95,7 -95,7 +95,7 @@@
<scm>
<connection>scm:git:git://git.apache.org/accumulo.git</connection>
<developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/accumulo.git</developerConnection>
- <tag>${project.version}</tag>
- <tag>1.7.0</tag>
++ <tag>HEAD</tag>
<url>https://git-wip-us.apache.org/repos/asf?p=accumulo.git</url>
</scm>
<issueManagement>
[16/21] accumulo git commit: ACCUMULO-3820 Remove unused variable
Posted by ct...@apache.org.
ACCUMULO-3820 Remove unused variable
* Remove warning for unused variable.
* Update comment explaining why the parameter wasn't used.
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/6524b072
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/6524b072
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/6524b072
Branch: refs/heads/1.7
Commit: 6524b0720350170696c1692430ad47dd5f4bffd0
Parents: ed5b53b
Author: Christopher Tubbs <ct...@apache.org>
Authored: Wed May 20 11:08:07 2015 -0400
Committer: Christopher Tubbs <ct...@apache.org>
Committed: Wed May 20 11:08:07 2015 -0400
----------------------------------------------------------------------
.../apache/accumulo/test/randomwalk/multitable/CopyTool.java | 6 ++----
1 file changed, 2 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/6524b072/test/src/main/java/org/apache/accumulo/test/randomwalk/multitable/CopyTool.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/multitable/CopyTool.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/multitable/CopyTool.java
index b4e509c..251a064 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/multitable/CopyTool.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/multitable/CopyTool.java
@@ -19,9 +19,9 @@ package org.apache.accumulo.test.randomwalk.multitable;
import java.io.IOException;
import org.apache.accumulo.core.client.ClientConfiguration;
+import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
import org.apache.accumulo.core.client.Connector;
import org.apache.accumulo.core.client.ZooKeeperInstance;
-import org.apache.accumulo.core.client.ClientConfiguration.ClientProperty;
import org.apache.accumulo.core.client.admin.DelegationTokenConfig;
import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;
import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat;
@@ -74,9 +74,7 @@ public class CopyTool extends Configured implements Tool {
final AuthenticationToken token;
if (clientConf.getBoolean(ClientProperty.INSTANCE_RPC_SASL_ENABLED.getKey(), false)) {
// Use the Kerberos creds to request a DelegationToken for MapReduce to use
- final String keytab = args[1];
-
- // Better be logged in. Could use the keytab, but we're already logged in soo..
+ // We could use the specified keytab (args[1]), but we're already logged in and don't need to, so we can just use the current user
KerberosToken kt = new KerberosToken();
try {
UserGroupInformation user = UserGroupInformation.getCurrentUser();
[07/21] accumulo git commit: ACCUMULO-3800 Add DEPENDENCIES file
Posted by ct...@apache.org.
ACCUMULO-3800 Add DEPENDENCIES file
* Prevent a new DEPENDENCIES file from being generated by the
maven-remote-resources-plugin's apache-jar-resource-bundle.
* Create a minimal DEPENDENCIES file which refers to the POMs.
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/eef4dfe1
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/eef4dfe1
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/eef4dfe1
Branch: refs/heads/master
Commit: eef4dfe1f074ee5197ef2d10f56915c43aba81a1
Parents: dc1d0de
Author: Christopher Tubbs <ct...@apache.org>
Authored: Tue May 12 12:10:28 2015 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Tue May 12 12:44:08 2015 -0400
----------------------------------------------------------------------
DEPENDENCIES | 21 +++++++++++++++++++++
1 file changed, 21 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/eef4dfe1/DEPENDENCIES
----------------------------------------------------------------------
diff --git a/DEPENDENCIES b/DEPENDENCIES
new file mode 100644
index 0000000..5a98c0f
--- /dev/null
+++ b/DEPENDENCIES
@@ -0,0 +1,21 @@
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+Apache Accumulo depends on artifacts which can be found in Maven Central.
+
+Each module has its own dependencies. Please refer to the individual
+modules' pom.xml files for a comprehensive listing.
[18/21] accumulo git commit: ACCUMULO-3819 Update findbugs and
checkstyle tools
Posted by ct...@apache.org.
ACCUMULO-3819 Update findbugs and checkstyle tools
* Bump findbugs and checkstyle build tools to check for more problems.
* Fix newly detected problems (mainly lack of checking for null when listing
directory contents) to ensure the build passes.
Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/b577410c
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/b577410c
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/b577410c
Branch: refs/heads/master
Commit: b577410c677b5adb2f1c0eb1c1f8f6a9061cd0ad
Parents: 6524b07
Author: Christopher Tubbs <ct...@apache.org>
Authored: Wed May 20 11:50:32 2015 -0400
Committer: Christopher Tubbs <ct...@apache.org>
Committed: Wed May 20 11:50:32 2015 -0400
----------------------------------------------------------------------
.../core/client/impl/ConditionalWriterImpl.java | 2 +-
.../accumulo/core/file/rfile/bcfile/Utils.java | 2 +-
.../iterators/user/IntersectingIterator.java | 19 ++---
.../mapred/AccumuloFileOutputFormatTest.java | 2 +
.../mapreduce/AccumuloFileOutputFormatTest.java | 2 +
.../accumulo/examples/simple/shard/Index.java | 7 +-
.../impl/MiniAccumuloClusterImpl.java | 17 ++--
.../impl/MiniAccumuloConfigImpl.java | 8 +-
pom.xml | 6 +-
.../accumulo/server/util/SendLogToChainsaw.java | 2 +-
.../org/apache/accumulo/monitor/util/Table.java | 2 +-
.../monitor/util/celltypes/NumberType.java | 2 +-
.../accumulo/tserver/log/LocalWALRecovery.java | 75 ++++++++---------
.../accumulo/tserver/tablet/RootFilesTest.java | 7 +-
.../shell/commands/FormatterCommandTest.java | 2 +-
.../start/classloader/AccumuloClassLoader.java | 7 +-
.../classloader/vfs/UniqueFileReplicator.java | 3 +-
.../accumulo/test/continuous/TimeBinner.java | 3 +
.../test/continuous/UndefinedAnalyzer.java | 84 ++++++++++----------
.../test/functional/CacheTestWriter.java | 3 +
.../apache/accumulo/test/randomwalk/Node.java | 16 ++--
.../apache/accumulo/test/AuditMessageIT.java | 5 +-
22 files changed, 159 insertions(+), 117 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/core/src/main/java/org/apache/accumulo/core/client/impl/ConditionalWriterImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/client/impl/ConditionalWriterImpl.java b/core/src/main/java/org/apache/accumulo/core/client/impl/ConditionalWriterImpl.java
index b8375dc..24040e6 100644
--- a/core/src/main/java/org/apache/accumulo/core/client/impl/ConditionalWriterImpl.java
+++ b/core/src/main/java/org/apache/accumulo/core/client/impl/ConditionalWriterImpl.java
@@ -182,7 +182,7 @@ class ConditionalWriterImpl implements ConditionalWriter {
@Override
public int compareTo(Delayed o) {
QCMutation oqcm = (QCMutation) o;
- return Long.valueOf(resetTime).compareTo(Long.valueOf(oqcm.resetTime));
+ return Long.compare(resetTime, oqcm.resetTime);
}
@Override
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/core/src/main/java/org/apache/accumulo/core/file/rfile/bcfile/Utils.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/file/rfile/bcfile/Utils.java b/core/src/main/java/org/apache/accumulo/core/file/rfile/bcfile/Utils.java
index 6cb04a1..5e84f10 100644
--- a/core/src/main/java/org/apache/accumulo/core/file/rfile/bcfile/Utils.java
+++ b/core/src/main/java/org/apache/accumulo/core/file/rfile/bcfile/Utils.java
@@ -351,7 +351,7 @@ public final class Utils {
@Override
public int hashCode() {
- return (major << 16 + minor);
+ return ((major << 16) + minor);
}
}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/core/src/main/java/org/apache/accumulo/core/iterators/user/IntersectingIterator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/iterators/user/IntersectingIterator.java b/core/src/main/java/org/apache/accumulo/core/iterators/user/IntersectingIterator.java
index 63d6a34..e7338f3 100644
--- a/core/src/main/java/org/apache/accumulo/core/iterators/user/IntersectingIterator.java
+++ b/core/src/main/java/org/apache/accumulo/core/iterators/user/IntersectingIterator.java
@@ -232,19 +232,20 @@ public class IntersectingIterator implements SortedKeyValueIterator<Key,Value> {
// If we are past the target, this is a valid result
if (docIDCompare < 0) {
break;
- }
- // if this source is not yet at the currentCQ then advance in this source
- if (docIDCompare > 0) {
+ } else if (docIDCompare > 0) {
+ // if this source is not yet at the currentCQ then advance in this source
+
// seek forwards
Key seekKey = buildKey(currentPartition, sources[sourceID].term, currentDocID);
sources[sourceID].iter.seek(new Range(seekKey, true, null, false), sources[sourceID].seekColfams, true);
continue;
- }
- // if we are equal to the target, this is an invalid result.
- // Force the entire process to go to the next row.
- // We are advancing column 0 because we forced that column to not contain a !
- // when we did the init()
- if (docIDCompare == 0) {
+ } else {
+ // docIDCompare == 0
+
+ // if we are equal to the target, this is an invalid result.
+ // Force the entire process to go to the next row.
+ // We are advancing column 0 because we forced that column to not contain a !
+ // when we did the init()
sources[0].iter.next();
advancedCursor = true;
break;
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormatTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormatTest.java b/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormatTest.java
index e389c0b..c4a4a29 100644
--- a/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormatTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/client/mapred/AccumuloFileOutputFormatTest.java
@@ -17,6 +17,7 @@
package org.apache.accumulo.core.client.mapred;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@@ -190,6 +191,7 @@ public class AccumuloFileOutputFormatTest {
return file.getName().startsWith("part-m-");
}
});
+ assertNotNull(files);
if (content) {
assertEquals(1, files.length);
assertTrue(files[0].exists());
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormatTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormatTest.java b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormatTest.java
index abc99c9..b8b3c47 100644
--- a/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormatTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/client/mapreduce/AccumuloFileOutputFormatTest.java
@@ -17,6 +17,7 @@
package org.apache.accumulo.core.client.mapreduce;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
@@ -178,6 +179,7 @@ public class AccumuloFileOutputFormatTest {
return file.getName().startsWith("part-m-");
}
});
+ assertNotNull(files);
if (content) {
assertEquals(1, files.length);
assertTrue(files[0].exists());
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/examples/simple/src/main/java/org/apache/accumulo/examples/simple/shard/Index.java
----------------------------------------------------------------------
diff --git a/examples/simple/src/main/java/org/apache/accumulo/examples/simple/shard/Index.java b/examples/simple/src/main/java/org/apache/accumulo/examples/simple/shard/Index.java
index 3564be4..bc76c03 100644
--- a/examples/simple/src/main/java/org/apache/accumulo/examples/simple/shard/Index.java
+++ b/examples/simple/src/main/java/org/apache/accumulo/examples/simple/shard/Index.java
@@ -70,8 +70,11 @@ public class Index {
public static void index(int numPartitions, File src, String splitRegex, BatchWriter bw) throws Exception {
if (src.isDirectory()) {
- for (File child : src.listFiles()) {
- index(numPartitions, child, splitRegex, bw);
+ File[] files = src.listFiles();
+ if (files != null) {
+ for (File child : files) {
+ index(numPartitions, child, splitRegex, bw);
+ }
}
} else {
FileReader fr = new FileReader(src);
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloClusterImpl.java
----------------------------------------------------------------------
diff --git a/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloClusterImpl.java b/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloClusterImpl.java
index a21ba64..19aed0b 100644
--- a/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloClusterImpl.java
+++ b/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloClusterImpl.java
@@ -205,13 +205,18 @@ public class MiniAccumuloClusterImpl implements AccumuloCluster {
}
private boolean containsSiteFile(File f) {
- return f.isDirectory() && f.listFiles(new FileFilter() {
+ if (!f.isDirectory()) {
+ return false;
+ } else {
+ File[] files = f.listFiles(new FileFilter() {
- @Override
- public boolean accept(File pathname) {
- return pathname.getName().endsWith("site.xml");
- }
- }).length > 0;
+ @Override
+ public boolean accept(File pathname) {
+ return pathname.getName().endsWith("site.xml");
+ }
+ });
+ return files != null && files.length > 0;
+ }
}
private void append(StringBuilder classpathBuilder, URL url) throws URISyntaxException {
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloConfigImpl.java
----------------------------------------------------------------------
diff --git a/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloConfigImpl.java b/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloConfigImpl.java
index eab82ba..ef498bf 100644
--- a/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloConfigImpl.java
+++ b/minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloConfigImpl.java
@@ -108,8 +108,12 @@ public class MiniAccumuloConfigImpl {
if (this.getDir().exists() && !this.getDir().isDirectory())
throw new IllegalArgumentException("Must pass in directory, " + this.getDir() + " is a file");
- if (this.getDir().exists() && this.getDir().list().length != 0)
- throw new IllegalArgumentException("Directory " + this.getDir() + " is not empty");
+ if (this.getDir().exists()) {
+ String[] children = this.getDir().list();
+ if (children != null && children.length != 0) {
+ throw new IllegalArgumentException("Directory " + this.getDir() + " is not empty");
+ }
+ }
if (!initialized) {
libDir = new File(dir, "lib");
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 33d4df8..19ff679 100644
--- a/pom.xml
+++ b/pom.xml
@@ -122,7 +122,7 @@
<!-- relative path for Eclipse format; should override in child modules if necessary -->
<eclipseFormatterStyle>${project.parent.basedir}/contrib/Eclipse-Accumulo-Codestyle.xml</eclipseFormatterStyle>
<!-- findbugs-maven-plugin won't work on jdk8 or later; set to 3.0.0 or newer -->
- <findbugs.version>3.0.0</findbugs.version>
+ <findbugs.version>3.0.1</findbugs.version>
<!-- surefire/failsafe plugin option -->
<forkCount>1</forkCount>
<!-- overwritten in hadoop profiles -->
@@ -540,7 +540,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
- <version>2.14</version>
+ <version>2.15</version>
</plugin>
<plugin>
<groupId>com.github.ekryd.sortpom</groupId>
@@ -1080,7 +1080,7 @@
<dependency>
<groupId>com.puppycrawl.tools</groupId>
<artifactId>checkstyle</artifactId>
- <version>6.3</version>
+ <version>6.6</version>
</dependency>
</dependencies>
<executions>
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/server/base/src/main/java/org/apache/accumulo/server/util/SendLogToChainsaw.java
----------------------------------------------------------------------
diff --git a/server/base/src/main/java/org/apache/accumulo/server/util/SendLogToChainsaw.java b/server/base/src/main/java/org/apache/accumulo/server/util/SendLogToChainsaw.java
index 2c192cf..c6f78bb 100644
--- a/server/base/src/main/java/org/apache/accumulo/server/util/SendLogToChainsaw.java
+++ b/server/base/src/main/java/org/apache/accumulo/server/util/SendLogToChainsaw.java
@@ -90,7 +90,7 @@ public class SendLogToChainsaw extends XMLLayout {
throw new IllegalArgumentException(directory + " is not a directory or is not readable.");
}
- if (logFiles.length == 0) {
+ if (logFiles == null || logFiles.length == 0) {
throw new IllegalArgumentException("No files match the supplied filter.");
}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/server/monitor/src/main/java/org/apache/accumulo/monitor/util/Table.java
----------------------------------------------------------------------
diff --git a/server/monitor/src/main/java/org/apache/accumulo/monitor/util/Table.java b/server/monitor/src/main/java/org/apache/accumulo/monitor/util/Table.java
index b1a4582..522ebb6 100644
--- a/server/monitor/src/main/java/org/apache/accumulo/monitor/util/Table.java
+++ b/server/monitor/src/main/java/org/apache/accumulo/monitor/util/Table.java
@@ -160,7 +160,7 @@ public class Table {
String legendUrl = String.format("/op?action=toggleLegend&redir=%s&page=%s&table=%s&show=%s", redir, page, table, !showLegend);
sb.append("<a href='").append(legendUrl).append("'>").append(showLegend ? "Hide" : "Show").append(" Legend</a>\n");
if (showLegend)
- sb.append("<div class='left ").append(showLegend ? "show" : "hide").append("'><dl>\n");
+ sb.append("<div class='left show'><dl>\n");
}
for (int i = 0; i < columns.size(); ++i) {
TableColumn<?> col = columns.get(i);
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/server/monitor/src/main/java/org/apache/accumulo/monitor/util/celltypes/NumberType.java
----------------------------------------------------------------------
diff --git a/server/monitor/src/main/java/org/apache/accumulo/monitor/util/celltypes/NumberType.java b/server/monitor/src/main/java/org/apache/accumulo/monitor/util/celltypes/NumberType.java
index b2de91e..dfa40eb 100644
--- a/server/monitor/src/main/java/org/apache/accumulo/monitor/util/celltypes/NumberType.java
+++ b/server/monitor/src/main/java/org/apache/accumulo/monitor/util/celltypes/NumberType.java
@@ -73,7 +73,7 @@ public class NumberType<T extends Number> extends CellType<T> {
else if (o2 == null)
return 1;
else
- return Double.valueOf(o1.doubleValue()).compareTo(o2.doubleValue());
+ return Double.compare(o1.doubleValue(), o2.doubleValue());
}
public static String commas(long i) {
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/server/tserver/src/main/java/org/apache/accumulo/tserver/log/LocalWALRecovery.java
----------------------------------------------------------------------
diff --git a/server/tserver/src/main/java/org/apache/accumulo/tserver/log/LocalWALRecovery.java b/server/tserver/src/main/java/org/apache/accumulo/tserver/log/LocalWALRecovery.java
index 60c8e8d..2667b53 100644
--- a/server/tserver/src/main/java/org/apache/accumulo/tserver/log/LocalWALRecovery.java
+++ b/server/tserver/src/main/java/org/apache/accumulo/tserver/log/LocalWALRecovery.java
@@ -135,47 +135,50 @@ public class LocalWALRecovery implements Runnable {
}
log.info("Copying WALs to " + options.destination);
- for (File file : localDirectory.listFiles()) {
- String name = file.getName();
- try {
- UUID.fromString(name);
- } catch (IllegalArgumentException ex) {
- log.info("Ignoring non-log file " + file.getAbsolutePath());
- continue;
- }
-
- LogFileKey key = new LogFileKey();
- LogFileValue value = new LogFileValue();
-
- log.info("Openning local log " + file.getAbsolutePath());
-
- Path localWal = new Path(file.toURI());
- FileSystem localFs = FileSystem.getLocal(fs.getConf());
-
- Reader reader = new SequenceFile.Reader(localFs, localWal, localFs.getConf());
- // Reader reader = new SequenceFile.Reader(localFs.getConf(), SequenceFile.Reader.file(localWal));
- Path tmp = new Path(options.destination + "/" + name + ".copy");
- FSDataOutputStream writer = fs.create(tmp);
- while (reader.next(key, value)) {
+ File[] files = localDirectory.listFiles();
+ if (files != null) {
+ for (File file : files) {
+ String name = file.getName();
try {
- key.write(writer);
- value.write(writer);
- } catch (EOFException ex) {
- break;
+ UUID.fromString(name);
+ } catch (IllegalArgumentException ex) {
+ log.info("Ignoring non-log file " + file.getAbsolutePath());
+ continue;
}
- }
- writer.close();
- reader.close();
- fs.rename(tmp, new Path(tmp.getParent(), name));
- if (options.deleteLocal) {
- if (file.delete()) {
- log.info("Copied and deleted: " + name);
+ LogFileKey key = new LogFileKey();
+ LogFileValue value = new LogFileValue();
+
+ log.info("Openning local log " + file.getAbsolutePath());
+
+ Path localWal = new Path(file.toURI());
+ FileSystem localFs = FileSystem.getLocal(fs.getConf());
+
+ Reader reader = new SequenceFile.Reader(localFs, localWal, localFs.getConf());
+ // Reader reader = new SequenceFile.Reader(localFs.getConf(), SequenceFile.Reader.file(localWal));
+ Path tmp = new Path(options.destination + "/" + name + ".copy");
+ FSDataOutputStream writer = fs.create(tmp);
+ while (reader.next(key, value)) {
+ try {
+ key.write(writer);
+ value.write(writer);
+ } catch (EOFException ex) {
+ break;
+ }
+ }
+ writer.close();
+ reader.close();
+ fs.rename(tmp, new Path(tmp.getParent(), name));
+
+ if (options.deleteLocal) {
+ if (file.delete()) {
+ log.info("Copied and deleted: " + name);
+ } else {
+ log.info("Failed to delete: " + name + " (but it is safe for you to delete it manually).");
+ }
} else {
- log.info("Failed to delete: " + name + " (but it is safe for you to delete it manually).");
+ log.info("Safe to delete: " + name);
}
- } else {
- log.info("Safe to delete: " + name);
}
}
}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/server/tserver/src/test/java/org/apache/accumulo/tserver/tablet/RootFilesTest.java
----------------------------------------------------------------------
diff --git a/server/tserver/src/test/java/org/apache/accumulo/tserver/tablet/RootFilesTest.java b/server/tserver/src/test/java/org/apache/accumulo/tserver/tablet/RootFilesTest.java
index ea8874a..e5d893a 100644
--- a/server/tserver/src/test/java/org/apache/accumulo/tserver/tablet/RootFilesTest.java
+++ b/server/tserver/src/test/java/org/apache/accumulo/tserver/tablet/RootFilesTest.java
@@ -102,8 +102,11 @@ public class RootFilesTest {
public void assertFiles(String... files) {
HashSet<String> actual = new HashSet<String>();
- for (File file : rootTabletDir.listFiles()) {
- actual.add(file.getName());
+ File[] children = rootTabletDir.listFiles();
+ if (children != null) {
+ for (File file : children) {
+ actual.add(file.getName());
+ }
}
HashSet<String> expected = new HashSet<String>();
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/shell/src/test/java/org/apache/accumulo/shell/commands/FormatterCommandTest.java
----------------------------------------------------------------------
diff --git a/shell/src/test/java/org/apache/accumulo/shell/commands/FormatterCommandTest.java b/shell/src/test/java/org/apache/accumulo/shell/commands/FormatterCommandTest.java
index 866e716..704d0c3 100644
--- a/shell/src/test/java/org/apache/accumulo/shell/commands/FormatterCommandTest.java
+++ b/shell/src/test/java/org/apache/accumulo/shell/commands/FormatterCommandTest.java
@@ -167,7 +167,7 @@ public class FormatterCommandTest {
sb.append(key).append(tab);
for (byte b : v.get()) {
- if ((b >= 48 && b <= 57) || (b >= 97 || b <= 102)) {
+ if ((b >= 48 && b <= 57) || (b >= 97 && b <= 102)) {
sb.append(String.format("0x%x ", Integer.valueOf(b)));
}
}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/start/src/main/java/org/apache/accumulo/start/classloader/AccumuloClassLoader.java
----------------------------------------------------------------------
diff --git a/start/src/main/java/org/apache/accumulo/start/classloader/AccumuloClassLoader.java b/start/src/main/java/org/apache/accumulo/start/classloader/AccumuloClassLoader.java
index 53b36b4..9ebbae0 100644
--- a/start/src/main/java/org/apache/accumulo/start/classloader/AccumuloClassLoader.java
+++ b/start/src/main/java/org/apache/accumulo/start/classloader/AccumuloClassLoader.java
@@ -251,8 +251,11 @@ public class AccumuloClassLoader {
return;
if (file.isDirectory()) {
File[] children = file.listFiles();
- for (File child : children)
- findMavenTargetClasses(paths, child, depth + 1);
+ if (children != null) {
+ for (File child : children) {
+ findMavenTargetClasses(paths, child, depth + 1);
+ }
+ }
} else if ("pom.xml".equals(file.getName())) {
paths.add(file.getParentFile().getAbsolutePath() + File.separator + "target" + File.separator + "classes");
}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/start/src/main/java/org/apache/accumulo/start/classloader/vfs/UniqueFileReplicator.java
----------------------------------------------------------------------
diff --git a/start/src/main/java/org/apache/accumulo/start/classloader/vfs/UniqueFileReplicator.java b/start/src/main/java/org/apache/accumulo/start/classloader/vfs/UniqueFileReplicator.java
index 641da8a..85b47df 100644
--- a/start/src/main/java/org/apache/accumulo/start/classloader/vfs/UniqueFileReplicator.java
+++ b/start/src/main/java/org/apache/accumulo/start/classloader/vfs/UniqueFileReplicator.java
@@ -95,7 +95,8 @@ public class UniqueFileReplicator implements VfsComponent, FileReplicator {
}
if (tempDir.exists()) {
- int numChildren = tempDir.list().length;
+ String[] list = tempDir.list();
+ int numChildren = list == null ? 0 : list.length;
if (0 == numChildren && !tempDir.delete())
log.warn("Cannot delete empty directory: " + tempDir);
}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/test/src/main/java/org/apache/accumulo/test/continuous/TimeBinner.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/continuous/TimeBinner.java b/test/src/main/java/org/apache/accumulo/test/continuous/TimeBinner.java
index cfe8551..e40bc8e 100644
--- a/test/src/main/java/org/apache/accumulo/test/continuous/TimeBinner.java
+++ b/test/src/main/java/org/apache/accumulo/test/continuous/TimeBinner.java
@@ -94,6 +94,9 @@ public class TimeBinner {
switch (operation) {
case AMM_HACK1: {
+ if (opts.dataColumn < 2) {
+ throw new IllegalArgumentException("--dataColumn must be at least 2");
+ }
double data_min = Double.parseDouble(tokens[opts.dataColumn - 2]);
double data_max = Double.parseDouble(tokens[opts.dataColumn - 1]);
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/test/src/main/java/org/apache/accumulo/test/continuous/UndefinedAnalyzer.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/continuous/UndefinedAnalyzer.java b/test/src/main/java/org/apache/accumulo/test/continuous/UndefinedAnalyzer.java
index 7d2c65b..00c7eb0 100644
--- a/test/src/main/java/org/apache/accumulo/test/continuous/UndefinedAnalyzer.java
+++ b/test/src/main/java/org/apache/accumulo/test/continuous/UndefinedAnalyzer.java
@@ -79,8 +79,10 @@ public class UndefinedAnalyzer {
}
});
- for (File log : ingestLogs) {
- parseLog(log);
+ if (ingestLogs != null) {
+ for (File log : ingestLogs) {
+ parseLog(log);
+ }
}
}
@@ -175,53 +177,55 @@ public class UndefinedAnalyzer {
String currentYear = (Calendar.getInstance().get(Calendar.YEAR)) + "";
String currentMonth = (Calendar.getInstance().get(Calendar.MONTH) + 1) + "";
- for (File masterLog : masterLogs) {
-
- BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(masterLog), UTF_8));
- String line;
- try {
- while ((line = reader.readLine()) != null) {
- if (line.contains("TABLET_LOADED")) {
- String[] tokens = line.split("\\s+");
- String tablet = tokens[8];
- String server = tokens[10];
-
- int pos1 = -1;
- int pos2 = -1;
- int pos3 = -1;
-
- for (int i = 0; i < tablet.length(); i++) {
- if (tablet.charAt(i) == '<' || tablet.charAt(i) == ';') {
- if (pos1 == -1) {
- pos1 = i;
- } else if (pos2 == -1) {
- pos2 = i;
- } else {
- pos3 = i;
+ if (masterLogs != null) {
+ for (File masterLog : masterLogs) {
+
+ BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(masterLog), UTF_8));
+ String line;
+ try {
+ while ((line = reader.readLine()) != null) {
+ if (line.contains("TABLET_LOADED")) {
+ String[] tokens = line.split("\\s+");
+ String tablet = tokens[8];
+ String server = tokens[10];
+
+ int pos1 = -1;
+ int pos2 = -1;
+ int pos3 = -1;
+
+ for (int i = 0; i < tablet.length(); i++) {
+ if (tablet.charAt(i) == '<' || tablet.charAt(i) == ';') {
+ if (pos1 == -1) {
+ pos1 = i;
+ } else if (pos2 == -1) {
+ pos2 = i;
+ } else {
+ pos3 = i;
+ }
}
}
- }
- if (pos1 > 0 && pos2 > 0 && pos3 == -1) {
- String tid = tablet.substring(0, pos1);
- String endRow = tablet.charAt(pos1) == '<' ? "8000000000000000" : tablet.substring(pos1 + 1, pos2);
- String prevEndRow = tablet.charAt(pos2) == '<' ? "" : tablet.substring(pos2 + 1);
- if (tid.equals(tableId)) {
- // System.out.println(" "+server+" "+tid+" "+endRow+" "+prevEndRow);
- Date date = sdf.parse(tokens[0] + " " + tokens[1] + " " + currentYear + " " + currentMonth);
- // System.out.println(" "+date);
+ if (pos1 > 0 && pos2 > 0 && pos3 == -1) {
+ String tid = tablet.substring(0, pos1);
+ String endRow = tablet.charAt(pos1) == '<' ? "8000000000000000" : tablet.substring(pos1 + 1, pos2);
+ String prevEndRow = tablet.charAt(pos2) == '<' ? "" : tablet.substring(pos2 + 1);
+ if (tid.equals(tableId)) {
+ // System.out.println(" "+server+" "+tid+" "+endRow+" "+prevEndRow);
+ Date date = sdf.parse(tokens[0] + " " + tokens[1] + " " + currentYear + " " + currentMonth);
+ // System.out.println(" "+date);
- assignments.add(new TabletAssignment(tablet, endRow, prevEndRow, server, date.getTime()));
+ assignments.add(new TabletAssignment(tablet, endRow, prevEndRow, server, date.getTime()));
+ }
+ } else if (!tablet.startsWith("!0")) {
+ System.err.println("Cannot parse tablet " + tablet);
}
- } else if (!tablet.startsWith("!0")) {
- System.err.println("Cannot parse tablet " + tablet);
- }
+ }
}
+ } finally {
+ reader.close();
}
- } finally {
- reader.close();
}
}
}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/test/src/main/java/org/apache/accumulo/test/functional/CacheTestWriter.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/functional/CacheTestWriter.java b/test/src/main/java/org/apache/accumulo/test/functional/CacheTestWriter.java
index 3a3baf0..76e8168 100644
--- a/test/src/main/java/org/apache/accumulo/test/functional/CacheTestWriter.java
+++ b/test/src/main/java/org/apache/accumulo/test/functional/CacheTestWriter.java
@@ -120,6 +120,9 @@ public class CacheTestWriter {
while (true) {
File[] files = reportDir.listFiles();
+ if (files == null) {
+ throw new IllegalStateException("report directory is inaccessible");
+ }
System.out.println("files.length " + files.length);
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/test/src/main/java/org/apache/accumulo/test/randomwalk/Node.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/accumulo/test/randomwalk/Node.java b/test/src/main/java/org/apache/accumulo/test/randomwalk/Node.java
index fecced9..6df5aed 100644
--- a/test/src/main/java/org/apache/accumulo/test/randomwalk/Node.java
+++ b/test/src/main/java/org/apache/accumulo/test/randomwalk/Node.java
@@ -77,13 +77,15 @@ public abstract class Node {
File zkLib = new File(zkHome);
String[] files = zkLib.list();
- for (int i = 0; i < files.length; i++) {
- String f = files[i];
- if (f.matches("^zookeeper-.+jar$")) {
- if (retval == null) {
- retval = String.format("%s/%s", zkLib.getAbsolutePath(), f);
- } else {
- retval += String.format(",%s/%s", zkLib.getAbsolutePath(), f);
+ if (files != null) {
+ for (int i = 0; i < files.length; i++) {
+ String f = files[i];
+ if (f.matches("^zookeeper-.+jar$")) {
+ if (retval == null) {
+ retval = String.format("%s/%s", zkLib.getAbsolutePath(), f);
+ } else {
+ retval += String.format(",%s/%s", zkLib.getAbsolutePath(), f);
+ }
}
}
}
http://git-wip-us.apache.org/repos/asf/accumulo/blob/b577410c/test/src/test/java/org/apache/accumulo/test/AuditMessageIT.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/accumulo/test/AuditMessageIT.java b/test/src/test/java/org/apache/accumulo/test/AuditMessageIT.java
index 00a5749..14361a6 100644
--- a/test/src/test/java/org/apache/accumulo/test/AuditMessageIT.java
+++ b/test/src/test/java/org/apache/accumulo/test/AuditMessageIT.java
@@ -18,6 +18,7 @@ package org.apache.accumulo.test;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
@@ -124,7 +125,9 @@ public class AuditMessageIT extends ConfigurableMacIT {
System.out.println("Start of captured audit messages for step " + stepName);
ArrayList<String> result = new ArrayList<String>();
- for (File file : getCluster().getConfig().getLogDir().listFiles()) {
+ File[] files = getCluster().getConfig().getLogDir().listFiles();
+ assertNotNull(files);
+ for (File file : files) {
// We want to grab the files called .out
if (file.getName().contains(".out") && file.isFile() && file.canRead()) {
LineIterator it = FileUtils.lineIterator(file, UTF_8.name());