You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@accumulo.apache.org by ct...@apache.org on 2013/11/01 01:56:31 UTC

[52/54] ACCUMULO-210 Create separate RPMs

http://git-wip-us.apache.org/repos/asf/accumulo/blob/05d5921c/server/tserver/src/main/packaging/etc/init.d/accumulo-tserver
----------------------------------------------------------------------
diff --git a/server/tserver/src/main/packaging/etc/init.d/accumulo-tserver b/server/tserver/src/main/packaging/etc/init.d/accumulo-tserver
new file mode 100755
index 0000000..902368b
--- /dev/null
+++ b/server/tserver/src/main/packaging/etc/init.d/accumulo-tserver
@@ -0,0 +1,162 @@
+#! /bin/sh
+# chkconfig: 2345 21 15
+### BEGIN INIT INFO
+# Provides:         accumulo-tserver
+# Required-Start:   $network $local_fs hadoop-namenode hadoop-datanode zookeeper-server accumulo-master
+# Required-Stop:    $network $local_fs hadoop-datanode hadoop-namenode zookeeper-server accumulo-master
+# Default-Start:    2 3 4 5
+# Default-Stop:     0 1 6
+# Short-Description: accumulo tserver and logger processes
+# Description:      The accumulo tserver process for accumulo
+### END INIT INFO
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Do NOT "set -e"
+install -d -m 0775 -o root -g accumulo /var/run/accumulo
+
+# PATH should only include /usr/* if it runs after the mountnfs.sh script
+PATH=/sbin:/usr/sbin:/bin:/usr/bin:/usr/share/accumulo/bin
+DESC="Accumulo Tserver"
+NAME=accumulo-tserver
+ACCUMULO_PROC=tserver
+DAEMON=/usr/share/accumulo/bin/start-server.sh
+IP=`ifconfig | grep inet[^6] | awk '{print $2}' | sed 's/addr://' | grep -v 0.0.0.0 | grep -v 127.0.0.1 | head -n 1`
+DAEMON_ARGS="$IP slaves"
+PIDFILE=/var/run/$NAME.pid
+SCRIPTNAME=/etc/init.d/$NAME
+
+# Read configuration variable file if it is present
+[ -r /etc/default/accumulo ] && . /etc/default/accumulo
+
+# Load the VERBOSE setting and other rcS variables
+if [ -f /lib/init/vars.sh ]; then
+  . /lib/init/vars.sh
+else
+  log_daemon_msg() { logger "$@"; }
+  log_end_msg() { [ $1 -eq 0 ] && RES=OK; logger ${RES:=FAIL}; }
+fi
+
+# Define LSB log_* functions.
+# Depend on lsb-base (>= 3.2-14) to ensure that this file is present
+# and status_of_proc is working.
+. /lib/lsb/init-functions
+
+#
+# Function that starts the daemon/service
+#
+do_start()
+{
+    TARGET_USER_NAME="ACCUMULO_USER"
+    TARGET_USER=$(eval "echo \$$TARGET_USER_NAME")
+	# Return
+	#  0 if daemon has been started
+	#  1 if daemon was already running
+	#  2 if daemon could not be started
+	
+  if [ "`jps -m | grep $ACCUMULO_PROC`" ] ; then return 1; fi
+
+  su -s /bin/sh $TARGET_USER -c "/usr/share/accumulo/bin/start-server.sh $IP tserver \"tablet server\""
+
+  if [ "`jps -m | grep $ACCUMULO_PROC`" ] ; then return 0; fi
+	return 2
+	# Add code here, if necessary, that waits for the process to be ready
+	# to handle requests from services started subsequently which depend
+	# on this one.  As a last resort, sleep for some time.
+}
+
+#
+# Function that stops the daemon/service
+#
+do_stop()
+{
+    TARGET_USER_NAME="ACCUMULO_USER"
+    TARGET_USER=$(eval "echo \$$TARGET_USER_NAME")
+	# Return
+	#  0 if daemon has been stopped
+	#  1 if daemon was already stopped
+	#  2 if daemon could not be stopped
+	#  other if a failure occurred
+
+  if [ ! "`jps -m | grep $ACCUMULO_PROC`" ] ; then return 1; fi
+
+	su -s /bin/sh $TARGET_USER -c "/usr/share/accumulo/bin/accumulo admin stop $IP"
+
+  if [ "`jps -m | grep $ACCUMULO_PROC`" ] ; then jps -m | grep $ACCUMULO_PROC | awk '{print $1}' | xargs kill -9; fi
+
+  if [ "`jps -m | grep $ACCUMULO_PROC`" ] ; then return 2; fi
+	return 0;
+}
+
+case "$1" in
+  start)
+	[ "$VERBOSE" != no ] && log_daemon_msg "Starting $DESC" "$NAME"
+	do_start
+	case "$?" in
+		0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;;
+		2) [ "$VERBOSE" != no ] && log_end_msg 1 ;;
+	esac
+	;;
+  stop)
+	[ "$VERBOSE" != no ] && log_daemon_msg "Stopping $DESC" "$NAME"
+	do_stop
+	case "$?" in
+		0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;;
+		2) [ "$VERBOSE" != no ] && log_end_msg 1 ;;
+	esac
+	;;
+  status)
+      status_of_proc "$DAEMON" "$NAME" && exit 0 || exit $?
+      ;;
+  #reload|force-reload)
+	#
+	# If do_reload() is not implemented then leave this commented out
+	# and leave 'force-reload' as an alias for 'restart'.
+	#
+	#log_daemon_msg "Reloading $DESC" "$NAME"
+	#do_reload
+	#log_end_msg $?
+	#;;
+  restart|force-reload)
+	#
+	# If the "reload" option is implemented then remove the
+	# 'force-reload' alias
+	#
+	log_daemon_msg "Restarting $DESC" "$NAME"
+	do_stop
+	case "$?" in
+	  0|1)
+		do_start
+		case "$?" in
+			0) log_end_msg 0 ;;
+			1) log_end_msg 1 ;; # Old process is still running
+			*) log_end_msg 1 ;; # Failed to start
+		esac
+		;;
+	  *)
+	  	# Failed to stop
+		log_end_msg 1
+		;;
+	esac
+	;;
+  *)
+	#echo "Usage: $SCRIPTNAME {start|stop|restart|reload|force-reload}" >&2
+	echo "Usage: $SCRIPTNAME {start|stop|status|restart|force-reload}" >&2
+	exit 3
+	;;
+esac
+
+:

http://git-wip-us.apache.org/repos/asf/accumulo/blob/05d5921c/server/utils/pom.xml
----------------------------------------------------------------------
diff --git a/server/utils/pom.xml b/server/utils/pom.xml
deleted file mode 100644
index ff8a58f..0000000
--- a/server/utils/pom.xml
+++ /dev/null
@@ -1,135 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <parent>
-    <groupId>org.apache.accumulo</groupId>
-    <artifactId>accumulo-project</artifactId>
-    <version>1.6.0-SNAPSHOT</version>
-    <relativePath>../../pom.xml</relativePath>
-  </parent>
-  <artifactId>accumulo-utils</artifactId>
-  <name>Server Utilities</name>
-  <dependencies>
-    <dependency>
-      <groupId>com.beust</groupId>
-      <artifactId>jcommander</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>com.google.code.gson</groupId>
-      <artifactId>gson</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>jline</groupId>
-      <artifactId>jline</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.accumulo</groupId>
-      <artifactId>accumulo-core</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.accumulo</groupId>
-      <artifactId>accumulo-fate</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.accumulo</groupId>
-      <artifactId>accumulo-master</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.accumulo</groupId>
-      <artifactId>accumulo-start</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.accumulo</groupId>
-      <artifactId>accumulo-trace</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.accumulo</groupId>
-      <artifactId>accumulo-tserver</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.thrift</groupId>
-      <artifactId>libthrift</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>commons-codec</groupId>
-      <artifactId>commons-codec</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>commons-collections</groupId>
-      <artifactId>commons-collections</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>commons-configuration</groupId>
-      <artifactId>commons-configuration</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>commons-io</groupId>
-      <artifactId>commons-io</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>commons-lang</groupId>
-      <artifactId>commons-lang</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>javax.servlet</groupId>
-      <artifactId>servlet-api</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>log4j</groupId>
-      <artifactId>log4j</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-client</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.zookeeper</groupId>
-      <artifactId>zookeeper</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.mortbay.jetty</groupId>
-      <artifactId>jetty</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-api</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
-      <scope>test</scope>
-    </dependency>
-  </dependencies>
-</project>

http://git-wip-us.apache.org/repos/asf/accumulo/blob/05d5921c/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/FilterMeta.java
----------------------------------------------------------------------
diff --git a/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/FilterMeta.java b/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/FilterMeta.java
deleted file mode 100644
index ac56c99..0000000
--- a/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/FilterMeta.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.utils.metanalysis;
-
-import java.io.IOException;
-import java.util.HashSet;
-import java.util.Set;
-
-import org.apache.accumulo.core.metadata.MetadataTable;
-import org.apache.accumulo.core.util.CachedConfiguration;
-import org.apache.accumulo.tserver.logger.LogEvents;
-import org.apache.accumulo.tserver.logger.LogFileKey;
-import org.apache.accumulo.tserver.logger.LogFileValue;
-import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-
-/**
- * A map reduce job that takes a set of walogs and filters out all non metadata table events.
- */
-public class FilterMeta extends Configured implements Tool {
-  
-  public static class FilterMapper extends Mapper<LogFileKey,LogFileValue,LogFileKey,LogFileValue> {
-    private Set<Integer> tabletIds;
-    
-    @Override
-    protected void setup(Context context) throws java.io.IOException, java.lang.InterruptedException {
-      tabletIds = new HashSet<Integer>();
-    }
-    
-    @Override
-    public void map(LogFileKey key, LogFileValue value, Context context) throws IOException, InterruptedException {
-      if (key.event == LogEvents.OPEN) {
-        context.write(key, value);
-      } else if (key.event == LogEvents.DEFINE_TABLET && key.tablet.getTableId().toString().equals(MetadataTable.ID)) {
-        tabletIds.add(key.tid);
-        context.write(key, value);
-      } else if ((key.event == LogEvents.MUTATION || key.event == LogEvents.MANY_MUTATIONS) && tabletIds.contains(key.tid)) {
-        context.write(key, value);
-      }
-    }
-  }
-
-  @Override
-  public int run(String[] args) throws Exception {
-    
-    String jobName = this.getClass().getSimpleName() + "_" + System.currentTimeMillis();
-
-    @SuppressWarnings("deprecation")
-    Job job = new Job(getConf(), jobName);
-    job.setJarByClass(this.getClass());
-    
-    Path paths[] = new Path[args.length - 1];
-    for (int i = 0; i < paths.length; i++) {
-      paths[i] = new Path(args[i]);
-    }
-
-    job.setInputFormatClass(LogFileInputFormat.class);
-    LogFileInputFormat.setInputPaths(job, paths);
-    
-    job.setOutputFormatClass(LogFileOutputFormat.class);
-    LogFileOutputFormat.setOutputPath(job, new Path(args[args.length - 1]));
-
-    job.setMapperClass(FilterMapper.class);
-    
-    job.setNumReduceTasks(0);
-
-    job.waitForCompletion(true);
-    return job.isSuccessful() ? 0 : 1;
-  }
-  
-  public static void main(String[] args) throws Exception {
-    int res = ToolRunner.run(CachedConfiguration.getInstance(), new FilterMeta(), args);
-    System.exit(res);
-  }
-}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/05d5921c/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/FindTablet.java
----------------------------------------------------------------------
diff --git a/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/FindTablet.java b/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/FindTablet.java
deleted file mode 100644
index 773c41a..0000000
--- a/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/FindTablet.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.utils.metanalysis;
-
-import java.util.Map.Entry;
-
-import org.apache.accumulo.core.client.Scanner;
-import org.apache.accumulo.core.data.Key;
-import org.apache.accumulo.core.data.KeyExtent;
-import org.apache.accumulo.core.data.Range;
-import org.apache.accumulo.core.data.Value;
-import org.apache.accumulo.core.util.TextUtil;
-import org.apache.accumulo.server.cli.ClientOpts;
-import org.apache.hadoop.io.Text;
-
-import com.beust.jcommander.Parameter;
-
-/**
- * Finds tablet creation events.
- */
-public class FindTablet {
-  
-  static public class Opts extends ClientOpts {
-    @Parameter(names = {"-r", "--row"}, required = true, description = "find tablets that contain this row")
-    String row = null;
-    
-    @Parameter(names = "--tableId", required = true, description = "table id")
-    String tableId = null;
-  }
-  
-  public static void main(String[] args) throws Exception {
-    Opts opts = new Opts();
-    opts.parseArgs(FindTablet.class.getName(), args);
-    
-    findContainingTablets(opts);
-  }
-  
-  private static void findContainingTablets(Opts opts) throws Exception {
-    Range range = new KeyExtent(new Text(opts.tableId), null, null).toMetadataRange();
-    
-    Scanner scanner = opts.getConnector().createScanner("createEvents", opts.auths);
-    scanner.setRange(range);
-    
-    Text row = new Text(opts.row);
-    for (Entry<Key,Value> entry : scanner) {
-      KeyExtent ke = new KeyExtent(entry.getKey().getRow(), new Value(TextUtil.getBytes(entry.getKey().getColumnFamily())));
-      if (ke.contains(row)) {
-        System.out.println(entry.getKey().getColumnQualifier() + " " + ke + " " + entry.getValue());
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/05d5921c/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/IndexMeta.java
----------------------------------------------------------------------
diff --git a/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/IndexMeta.java b/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/IndexMeta.java
deleted file mode 100644
index 8e7b221..0000000
--- a/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/IndexMeta.java
+++ /dev/null
@@ -1,177 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.utils.metanalysis;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.TableExistsException;
-import org.apache.accumulo.core.client.mapreduce.AccumuloOutputFormat;
-import org.apache.accumulo.core.data.ColumnUpdate;
-import org.apache.accumulo.core.data.KeyExtent;
-import org.apache.accumulo.core.data.Mutation;
-import org.apache.accumulo.core.data.Value;
-import org.apache.accumulo.core.metadata.MetadataTable;
-import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection;
-import org.apache.accumulo.core.util.CachedConfiguration;
-import org.apache.accumulo.server.cli.ClientOpts;
-import org.apache.accumulo.tserver.logger.LogEvents;
-import org.apache.accumulo.tserver.logger.LogFileKey;
-import org.apache.accumulo.tserver.logger.LogFileValue;
-import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.WritableUtils;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.Mapper;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-import org.apache.log4j.Logger;
-
-import com.beust.jcommander.Parameter;
-
-/**
- * A map reduce job that takes write ahead logs containing mutations for the metadata table and indexes them into Accumulo tables for analysis.
- * 
- */
-
-public class IndexMeta extends Configured implements Tool {
-  
-  public static class IndexMapper extends Mapper<LogFileKey,LogFileValue,Text,Mutation> {
-    private static final Text CREATE_EVENTS_TABLE = new Text("createEvents");
-    private static final Text TABLET_EVENTS_TABLE = new Text("tabletEvents");
-    private Map<Integer,KeyExtent> tabletIds = new HashMap<Integer,KeyExtent>();
-    private String uuid = null;
-    
-    @Override
-    protected void setup(Context context) throws java.io.IOException, java.lang.InterruptedException {
-      tabletIds = new HashMap<Integer,KeyExtent>();
-      uuid = null;
-    }
-    
-    @Override
-    public void map(LogFileKey key, LogFileValue value, Context context) throws IOException, InterruptedException {
-      if (key.event == LogEvents.OPEN) {
-        uuid = key.tserverSession;
-      } else if (key.event == LogEvents.DEFINE_TABLET) {
-        if (key.tablet.getTableId().toString().equals(MetadataTable.ID)) {
-          tabletIds.put(key.tid, new KeyExtent(key.tablet));
-        }
-      } else if ((key.event == LogEvents.MUTATION || key.event == LogEvents.MANY_MUTATIONS) && tabletIds.containsKey(key.tid)) {
-        for (Mutation m : value.mutations) {
-          index(context, m, uuid, tabletIds.get(key.tid));
-        }
-      }
-    }
-    
-    void index(Context context, Mutation m, String logFile, KeyExtent metaTablet) throws IOException, InterruptedException {
-      List<ColumnUpdate> columnsUpdates = m.getUpdates();
-      
-      Text prevRow = null;
-      long timestamp = 0;
-      
-      if (m.getRow().length > 0 && m.getRow()[0] == '~') {
-        return;
-      }
-      
-      for (ColumnUpdate cu : columnsUpdates) {
-        if (TabletsSection.TabletColumnFamily.PREV_ROW_COLUMN.equals(new Text(cu.getColumnFamily()), new Text(cu.getColumnQualifier())) && !cu.isDeleted()) {
-          prevRow = new Text(cu.getValue());
-        }
-        
-        timestamp = cu.getTimestamp();
-      }
-      
-      byte[] serMut = WritableUtils.toByteArray(m);
-      
-      if (prevRow != null) {
-        Mutation createEvent = new Mutation(new Text(m.getRow()));
-        createEvent.put(prevRow, new Text(String.format("%020d", timestamp)), new Value(metaTablet.toString().getBytes()));
-        context.write(CREATE_EVENTS_TABLE, createEvent);
-      }
-      
-      Mutation tabletEvent = new Mutation(new Text(m.getRow()));
-      tabletEvent.put(new Text(String.format("%020d", timestamp)), new Text("mut"), new Value(serMut));
-      tabletEvent.put(new Text(String.format("%020d", timestamp)), new Text("mtab"), new Value(metaTablet.toString().getBytes()));
-      tabletEvent.put(new Text(String.format("%020d", timestamp)), new Text("log"), new Value(logFile.getBytes()));
-      context.write(TABLET_EVENTS_TABLE, tabletEvent);
-    }
-  }
-  
-  static class Opts extends ClientOpts {
-    @Parameter(description = "<logfile> { <logfile> ...}")
-    List<String> logFiles = new ArrayList<String>();
-  }
-  
-  @Override
-  public int run(String[] args) throws Exception {
-    Opts opts = new Opts();
-    opts.parseArgs(IndexMeta.class.getName(), args);
-    
-    String jobName = this.getClass().getSimpleName() + "_" + System.currentTimeMillis();
-
-    @SuppressWarnings("deprecation")
-    Job job = new Job(getConf(), jobName);
-    job.setJarByClass(this.getClass());
-    
-    List<String> logFiles = Arrays.asList(args).subList(4, args.length);
-    Path paths[] = new Path[logFiles.size()];
-    int count = 0;
-    for (String logFile : logFiles) {
-      paths[count++] = new Path(logFile);
-    }
-    
-    job.setInputFormatClass(LogFileInputFormat.class);
-    LogFileInputFormat.setInputPaths(job, paths);
-    
-    job.setNumReduceTasks(0);
-    
-    job.setOutputFormatClass(AccumuloOutputFormat.class);
-    AccumuloOutputFormat.setZooKeeperInstance(job, opts.instance, opts.zookeepers);
-    AccumuloOutputFormat.setConnectorInfo(job, opts.principal, opts.getToken());
-    AccumuloOutputFormat.setCreateTables(job, false);
-    
-    job.setMapperClass(IndexMapper.class);
-    
-    Connector conn = opts.getConnector();
-    
-    try {
-      conn.tableOperations().create("createEvents");
-    } catch (TableExistsException tee) {
-      Logger.getLogger(IndexMeta.class).warn("Table createEvents exists");
-    }
-    
-    try {
-      conn.tableOperations().create("tabletEvents");
-    } catch (TableExistsException tee) {
-      Logger.getLogger(IndexMeta.class).warn("Table tabletEvents exists");
-    }
-    
-    job.waitForCompletion(true);
-    return job.isSuccessful() ? 0 : 1;
-  }
-  
-  public static void main(String[] args) throws Exception {
-    int res = ToolRunner.run(CachedConfiguration.getInstance(), new IndexMeta(), args);
-    System.exit(res);
-  }
-}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/05d5921c/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/LogFileInputFormat.java
----------------------------------------------------------------------
diff --git a/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/LogFileInputFormat.java b/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/LogFileInputFormat.java
deleted file mode 100644
index 603bf71..0000000
--- a/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/LogFileInputFormat.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.accumulo.utils.metanalysis;
-
-import java.io.EOFException;
-import java.io.IOException;
-
-import org.apache.accumulo.tserver.logger.LogFileKey;
-import org.apache.accumulo.tserver.logger.LogFileValue;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.input.FileSplit;
-
-/**
- * Input format for Accumulo write ahead logs
- */
-public class LogFileInputFormat extends FileInputFormat<LogFileKey,LogFileValue> {
-  
-  private static class LogFileRecordReader extends RecordReader<LogFileKey,LogFileValue> {
-    
-    private FSDataInputStream fsdis;
-    private LogFileKey key;
-    private LogFileValue value;
-    private long length;
-    
-    @Override
-    public void close() throws IOException {
-      fsdis.close();
-    }
-    
-    @Override
-    public LogFileKey getCurrentKey() throws IOException, InterruptedException {
-      return key;
-    }
-    
-    @Override
-    public LogFileValue getCurrentValue() throws IOException, InterruptedException {
-      return value;
-    }
-    
-    @Override
-    public float getProgress() throws IOException, InterruptedException {
-      float progress = (length - fsdis.getPos()) / (float) length;
-      if (progress < 0)
-        return 0;
-      return progress;
-    }
-    
-    @Override
-    public void initialize(InputSplit is, TaskAttemptContext context) throws IOException, InterruptedException {
-      FileSplit fileSplit = (FileSplit) is;
-      
-      Configuration conf = new Configuration();
-      FileSystem fs = FileSystem.get(conf);
-      
-      key = new LogFileKey();
-      value = new LogFileValue();
-      
-      fsdis = fs.open(fileSplit.getPath());
-      FileStatus status = fs.getFileStatus(fileSplit.getPath());
-      length = status.getLen();
-    }
-
-    @Override
-    public boolean nextKeyValue() throws IOException, InterruptedException {
-      if (key == null)
-        return false;
-      
-      try {
-        key.readFields(fsdis);
-        value.readFields(fsdis);
-        return true;
-      } catch (EOFException ex) {
-        key = null;
-        value = null;
-        return false;
-      }
-    }
-    
-  }
-
-  
-  @Override
-  public RecordReader<LogFileKey,LogFileValue> createRecordReader(InputSplit arg0, TaskAttemptContext arg1) throws IOException, InterruptedException {
-    return new LogFileRecordReader();
-  }
-  
-  @Override
-  protected boolean isSplitable(JobContext context, Path filename) {
-    return false;
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/05d5921c/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/LogFileOutputFormat.java
----------------------------------------------------------------------
diff --git a/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/LogFileOutputFormat.java b/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/LogFileOutputFormat.java
deleted file mode 100644
index cccb89e..0000000
--- a/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/LogFileOutputFormat.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.utils.metanalysis;
-
-import java.io.IOException;
-
-import org.apache.accumulo.tserver.logger.LogFileKey;
-import org.apache.accumulo.tserver.logger.LogFileValue;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-
-/**
- * Output format for Accumulo write ahead logs.
- */
-public class LogFileOutputFormat extends FileOutputFormat<LogFileKey,LogFileValue> {
-  
-  private static class LogFileRecordWriter extends RecordWriter<LogFileKey,LogFileValue> {
-    
-    private FSDataOutputStream out;
-    
-    public LogFileRecordWriter(Path outputPath) throws IOException {
-      Configuration conf = new Configuration();
-      FileSystem fs = FileSystem.get(conf);
-      
-      out = fs.create(outputPath);
-    }
-    
-    @Override
-    public void close(TaskAttemptContext arg0) throws IOException, InterruptedException {
-      out.close();
-    }
-    
-    @Override
-    public void write(LogFileKey key, LogFileValue val) throws IOException, InterruptedException {
-      key.write(out);
-      val.write(out);
-    }
-    
-  }
-  
-  @Override
-  public RecordWriter<LogFileKey,LogFileValue> getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException {
-    Path outputPath = getDefaultWorkFile(context, "");
-    return new LogFileRecordWriter(outputPath);
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/05d5921c/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/PrintEvents.java
----------------------------------------------------------------------
diff --git a/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/PrintEvents.java b/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/PrintEvents.java
deleted file mode 100644
index fc6c18a..0000000
--- a/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/PrintEvents.java
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.accumulo.utils.metanalysis;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInputStream;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map.Entry;
-
-import org.apache.accumulo.core.client.Connector;
-import org.apache.accumulo.core.client.Scanner;
-import org.apache.accumulo.core.data.ColumnUpdate;
-import org.apache.accumulo.core.data.Key;
-import org.apache.accumulo.core.data.Mutation;
-import org.apache.accumulo.core.data.PartialKey;
-import org.apache.accumulo.core.data.Range;
-import org.apache.accumulo.core.data.Value;
-import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection;
-import org.apache.accumulo.core.security.Authorizations;
-import org.apache.accumulo.server.cli.ClientOpts;
-import org.apache.accumulo.tserver.logger.LogFileValue;
-import org.apache.hadoop.io.Text;
-
-import com.beust.jcommander.Parameter;
-
-/**
- * Looks up and prints mutations indexed by IndexMeta
- */
-public class PrintEvents {
-  
-  static class Opts extends ClientOpts {
-    @Parameter(names = {"-t", "--tableId"}, description = "table id", required = true)
-    String tableId;
-    @Parameter(names = {"-e", "--endRow"}, description = "end row")
-    String endRow;
-    @Parameter(names = {"-t", "--time"}, description = "time, in milliseconds", required = true)
-    long time;
-  }
-  
-  public static void main(String[] args) throws Exception {
-    Opts opts = new Opts();
-    opts.parseArgs(PrintEvents.class.getName(), args);
-    
-    Connector conn = opts.getConnector();
-    
-    printEvents(conn, opts.tableId, opts.endRow, opts.time);
-  }
-  
-  private static void printEvents(Connector conn, String tableId, String endRow, Long time) throws Exception {
-    Scanner scanner = conn.createScanner("tabletEvents", new Authorizations());
-    String metaRow = tableId + (endRow == null ? "<" : ";" + endRow);
-    scanner.setRange(new Range(new Key(metaRow, String.format("%020d", time)), true, new Key(metaRow).followingKey(PartialKey.ROW), false));
-    int count = 0;
-    
-    String lastLog = null;
-    
-    loop1: for (Entry<Key,Value> entry : scanner) {
-      if (entry.getKey().getColumnQualifier().toString().equals("log")) {
-        if (lastLog == null || !lastLog.equals(entry.getValue().toString()))
-          System.out.println("Log : " + entry.getValue());
-        lastLog = entry.getValue().toString();
-      } else if (entry.getKey().getColumnQualifier().toString().equals("mut")) {
-        DataInputStream dis = new DataInputStream(new ByteArrayInputStream(entry.getValue().get()));
-        Mutation m = new Mutation();
-        m.readFields(dis);
-        
-        LogFileValue lfv = new LogFileValue();
-        lfv.mutations = Collections.singletonList(m);
-        
-        System.out.println(LogFileValue.format(lfv, 1));
-        
-        List<ColumnUpdate> columnsUpdates = m.getUpdates();
-        for (ColumnUpdate cu : columnsUpdates) {
-          if (TabletsSection.TabletColumnFamily.PREV_ROW_COLUMN.equals(new Text(cu.getColumnFamily()), new Text(cu.getColumnQualifier())) && count > 0) {
-            System.out.println("Saw change to prevrow, stopping printing events.");
-            break loop1;
-          }
-        }
-        count++;
-      }
-    }
-    
-  }
-}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/05d5921c/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/package-info.java
----------------------------------------------------------------------
diff --git a/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/package-info.java b/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/package-info.java
deleted file mode 100644
index 3c15884..0000000
--- a/server/utils/src/main/java/org/apache/accumulo/utils/metanalysis/package-info.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/**
- * Provides programs to analyze metadata mutations written to write ahead logs.  
- * 
- * <p>
- * These programs can be used when write ahead logs are archived.   The best way to find
- * which write ahead logs contain metadata mutations is to grep the tablet server logs.  
- * Grep for events where walogs were added to metadata tablets, then take the unique set 
- * of walogs.
- *
- * <p>
- * To use these programs, use IndexMeta to index the metadata mutations in walogs into 
- * Accumulo tables.  Then use FindTable and PrintEvents to analyze those indexes.  
- * FilterMetaiallows filtering walogs down to just metadata events.  This is useful for the
- * case where the walogs need to be exported from the cluster for analysis.
- *
- * @since 1.5
- */
-package org.apache.accumulo.utils.metanalysis;

http://git-wip-us.apache.org/repos/asf/accumulo/blob/05d5921c/start/pom.xml
----------------------------------------------------------------------
diff --git a/start/pom.xml b/start/pom.xml
index 3484aab..7e7a5a0 100644
--- a/start/pom.xml
+++ b/start/pom.xml
@@ -24,6 +24,7 @@
   </parent>
   <artifactId>accumulo-start</artifactId>
   <name>Start</name>
+  <description>A library for launching Apache Accumulo services.</description>
   <dependencies>
     <dependency>
       <groupId>org.apache.commons</groupId>
@@ -107,4 +108,37 @@
       </plugins>
     </pluginManagement>
   </build>
+  <profiles>
+    <profile>
+      <id>rpm</id>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.codehaus.mojo</groupId>
+            <artifactId>rpm-maven-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>build-rpm</id>
+                <goals>
+                  <goal>attached-rpm</goal>
+                </goals>
+                <phase>package</phase>
+                <configuration>
+                  <requires>
+                    <require>jre &gt;= 1.6.0</require>
+                  </requires>
+                  <mappings>
+                    <mapping>
+                      <directory>%{_javadir}/accumulo</directory>
+                      <artifact />
+                    </mapping>
+                  </mappings>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
 </project>

http://git-wip-us.apache.org/repos/asf/accumulo/blob/05d5921c/test/pom.xml
----------------------------------------------------------------------
diff --git a/test/pom.xml b/test/pom.xml
index 278b8e6..8e4c152 100644
--- a/test/pom.xml
+++ b/test/pom.xml
@@ -24,6 +24,7 @@
   </parent>
   <artifactId>accumulo-test</artifactId>
   <name>Testing</name>
+  <description>Tests for Apache Accumulo.</description>
   <dependencies>
     <dependency>
       <groupId>com.beust</groupId>

http://git-wip-us.apache.org/repos/asf/accumulo/blob/05d5921c/trace/pom.xml
----------------------------------------------------------------------
diff --git a/trace/pom.xml b/trace/pom.xml
index 9501f26..0754041 100644
--- a/trace/pom.xml
+++ b/trace/pom.xml
@@ -24,6 +24,7 @@
   </parent>
   <artifactId>accumulo-trace</artifactId>
   <name>Trace</name>
+  <description>A distributed tracing library for Apache Accumulo.</description>
   <dependencies>
     <dependency>
       <groupId>org.apache.thrift</groupId>
@@ -79,5 +80,36 @@
         </plugins>
       </build>
     </profile>
+    <profile>
+      <id>rpm</id>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.codehaus.mojo</groupId>
+            <artifactId>rpm-maven-plugin</artifactId>
+            <executions>
+              <execution>
+                <id>build-rpm</id>
+                <goals>
+                  <goal>attached-rpm</goal>
+                </goals>
+                <phase>package</phase>
+                <configuration>
+                  <requires>
+                    <require>jre &gt;= 1.6.0</require>
+                  </requires>
+                  <mappings>
+                    <mapping>
+                      <directory>%{_javadir}/accumulo</directory>
+                      <artifact />
+                    </mapping>
+                  </mappings>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
   </profiles>
 </project>