You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@accumulo.apache.org by ec...@apache.org on 2015/10/02 19:16:05 UTC

[1/5] accumulo git commit: ACCUMULO-2232 Added options to Combiner for handling deletes

Repository: accumulo
Updated Branches:
  refs/heads/master 18e834ec2 -> 17ae2f919


ACCUMULO-2232 Added options to Combiner for handling deletes


Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/7a1d6d92
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/7a1d6d92
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/7a1d6d92

Branch: refs/heads/master
Commit: 7a1d6d921ba81c7f92eab8d3b20aae461a46066a
Parents: e24bd36
Author: Keith Turner <kt...@apache.org>
Authored: Fri Oct 2 10:19:37 2015 -0400
Committer: Keith Turner <kt...@apache.org>
Committed: Fri Oct 2 11:10:00 2015 -0400

----------------------------------------------------------------------
 .../accumulo/core/iterators/Combiner.java       |  93 +++++++++-
 .../core/iterators/CombinerTestUtil.java        |  23 +++
 .../iterators/user/BigDecimalCombinerTest.java  |   6 +-
 .../core/iterators/user/CombinerTest.java       | 175 ++++++++++++++++---
 4 files changed, 270 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/accumulo/blob/7a1d6d92/core/src/main/java/org/apache/accumulo/core/iterators/Combiner.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/accumulo/core/iterators/Combiner.java b/core/src/main/java/org/apache/accumulo/core/iterators/Combiner.java
index 97d0ce3..091803b 100644
--- a/core/src/main/java/org/apache/accumulo/core/iterators/Combiner.java
+++ b/core/src/main/java/org/apache/accumulo/core/iterators/Combiner.java
@@ -22,6 +22,9 @@ import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.NoSuchElementException;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.accumulo.core.client.IteratorSetting;
 import org.apache.accumulo.core.client.IteratorSetting.Column;
@@ -31,11 +34,15 @@ import org.apache.accumulo.core.data.Key;
 import org.apache.accumulo.core.data.PartialKey;
 import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.data.Value;
+import org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope;
 import org.apache.accumulo.core.iterators.conf.ColumnSet;
 import org.apache.hadoop.io.Text;
 import org.apache.log4j.Logger;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Splitter;
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
 import com.google.common.collect.Lists;
 
 /**
@@ -45,18 +52,28 @@ import com.google.common.collect.Lists;
  * will combine all Keys in column family and qualifier individually. Combination is only ever performed on multiple versions and not across column qualifiers
  * or column visibilities.
  *
+ * <p>
  * Implementations must provide a reduce method: {@code public Value reduce(Key key, Iterator<Value> iter)}.
  *
+ * <p>
  * This reduce method will be passed the most recent Key and an iterator over the Values for all non-deleted versions of that Key. A combiner will not combine
  * keys that differ by more than the timestamp.
  *
+ * <p>
  * This class and its implementations do not automatically filter out unwanted columns from those being combined, thus it is generally recommended to use a
  * {@link Combiner} implementation with the {@link ScannerBase#fetchColumnFamily(Text)} or {@link ScannerBase#fetchColumn(Text, Text)} methods.
+ *
+ * <p>
+ * WARNING : Using deletes with Combiners may not work as intended. See {@link #setReduceOnFullCompactionOnly(IteratorSetting, boolean)}
  */
 public abstract class Combiner extends WrappingIterator implements OptionDescriber {
-  static final Logger log = Logger.getLogger(Combiner.class);
+  static final Logger sawDeleteLog = Logger.getLogger(Combiner.class.getName()+".SawDelete");
   protected static final String COLUMNS_OPTION = "columns";
   protected static final String ALL_OPTION = "all";
+  protected static final String REDUCE_ON_FULL_COMPACTION_ONLY_OPTION = "reduceOnFullCompactionOnly";
+
+  private boolean isMajorCompaction;
+  private boolean reduceOnFullCompactionOnly;
 
   /**
    * A Java Iterator that iterates over the Values for a given Key from a source SortedKeyValueIterator.
@@ -149,6 +166,27 @@ public abstract class Combiner extends WrappingIterator implements OptionDescrib
 
   private Key workKey = new Key();
 
+  @VisibleForTesting
+  static final Cache<String,Boolean> loggedMsgCache = CacheBuilder.newBuilder().expireAfterWrite(1, TimeUnit.HOURS).maximumSize(10000).build();
+
+  private void sawDelete() {
+    if (isMajorCompaction && !reduceOnFullCompactionOnly) {
+      try {
+        loggedMsgCache.get(this.getClass().getName(), new Callable<Boolean>() {
+          @Override
+          public Boolean call() throws Exception {
+            sawDeleteLog.error("Combiner of type " + Combiner.this.getClass().getSimpleName()
+                + " saw a delete during a partial compaction.  This could cause undesired results.  See ACCUMULO-2232.  Will not log subsequent occurences for at least 1 hour.");
+            // the value is not used and does not matter
+            return Boolean.TRUE;
+          }
+        });
+      } catch (ExecutionException e) {
+        throw new RuntimeException(e);
+      }
+    }
+  }
+
   /**
    * Sets the topKey and topValue based on the top key of the source. If the column of the source top key is in the set of combiners, topKey will be the top key
    * of the source and topValue will be the result of the reduce method. Otherwise, topKey and topValue will be unchanged. (They are always set to null before
@@ -159,8 +197,10 @@ public abstract class Combiner extends WrappingIterator implements OptionDescrib
     if (super.hasTop()) {
       workKey.set(super.getTopKey());
       if (combineAllColumns || combiners.contains(workKey)) {
-        if (workKey.isDeleted())
+        if (workKey.isDeleted()) {
+          sawDelete();
           return;
+        }
         topKey = workKey;
         Iterator<Value> viter = new ValueIterator(getSource());
         topValue = reduce(topKey, viter);
@@ -219,6 +259,7 @@ public abstract class Combiner extends WrappingIterator implements OptionDescrib
       if (combineAllColumns)
         return;
     }
+
     if (!options.containsKey(COLUMNS_OPTION))
       throw new IllegalArgumentException("Must specify " + COLUMNS_OPTION + " option");
 
@@ -227,10 +268,27 @@ public abstract class Combiner extends WrappingIterator implements OptionDescrib
       throw new IllegalArgumentException("The " + COLUMNS_OPTION + " must not be empty");
 
     combiners = new ColumnSet(Lists.newArrayList(Splitter.on(",").split(encodedColumns)));
+
+    isMajorCompaction = env.getIteratorScope() == IteratorScope.majc;
+
+    String rofco = options.get(REDUCE_ON_FULL_COMPACTION_ONLY_OPTION);
+    if (rofco != null) {
+      reduceOnFullCompactionOnly = Boolean.parseBoolean(rofco);
+    } else {
+      reduceOnFullCompactionOnly = false;
+    }
+
+    if (reduceOnFullCompactionOnly && isMajorCompaction && !env.isFullMajorCompaction()) {
+      // adjust configuration so that no columns are combined for a partial maror compaction
+      combineAllColumns = false;
+      combiners = new ColumnSet();
+    }
+
   }
 
   @Override
   public SortedKeyValueIterator<Key,Value> deepCopy(IteratorEnvironment env) {
+    // TODO test
     Combiner newInstance;
     try {
       newInstance = this.getClass().newInstance();
@@ -240,6 +298,8 @@ public abstract class Combiner extends WrappingIterator implements OptionDescrib
     newInstance.setSource(getSource().deepCopy(env));
     newInstance.combiners = combiners;
     newInstance.combineAllColumns = combineAllColumns;
+    newInstance.isMajorCompaction = isMajorCompaction;
+    newInstance.reduceOnFullCompactionOnly = reduceOnFullCompactionOnly;
     return newInstance;
   }
 
@@ -249,6 +309,7 @@ public abstract class Combiner extends WrappingIterator implements OptionDescrib
     io.addNamedOption(ALL_OPTION, "set to true to apply Combiner to every column, otherwise leave blank. if true, " + COLUMNS_OPTION
         + " option will be ignored.");
     io.addNamedOption(COLUMNS_OPTION, "<col fam>[:<col qual>]{,<col fam>[:<col qual>]} escape non-alphanum chars using %<hex>.");
+    io.addNamedOption(REDUCE_ON_FULL_COMPACTION_ONLY_OPTION, "If true, only reduce on full major compactions.  Defaults to false. ");
     return io;
   }
 
@@ -288,7 +349,6 @@ public abstract class Combiner extends WrappingIterator implements OptionDescrib
    * @param columns
    *          a list of columns to encode as the value for the combiner column configuration
    */
-
   public static void setColumns(IteratorSetting is, List<IteratorSetting.Column> columns) {
     String sep = "";
     StringBuilder sb = new StringBuilder();
@@ -313,4 +373,31 @@ public abstract class Combiner extends WrappingIterator implements OptionDescrib
   public static void setCombineAllColumns(IteratorSetting is, boolean combineAllColumns) {
     is.addOption(ALL_OPTION, Boolean.toString(combineAllColumns));
   }
+
+  /**
+   * Combiners may not work correctly with deletes. Sometimes when Accumulo compacts the files in a tablet, it only compacts a subset of the files. If a delete
+   * marker exists in one of the files that is not being compacted, then data that should be deleted may be combined. See
+   * <a href="https://issues.apache.org/jira/browse/ACCUMULO-2232">ACCUMULO-2232</a> for more information. For correctness deletes should not be used with
+   * columns that are combined OR this option should be set to true.
+   *
+   * <p>
+   * When this method is set to true all data is passed through during partial major compactions and no reducing is done. Reducing is only done during scan and
+   * full major compactions, when deletes can be correctly handled. Only reducing on full major compactions may have negative performance implications, leaving
+   * lots of work to be done at scan time.
+   *
+   * <p>
+   * When this method is set to false, combiners will log an error if a delete is seen during any compaction. This can be suppressed by adjusting logging
+   * configuration. Errors will not be logged more than once an hour per Combiner, regardless of how many deletes are seen.
+   *
+   * <p>
+   * This method was added in 1.6.4 and 1.7.1. If you want your code to work in earlier versions of 1.6 and 1.7 then do not call this method. If not set this
+   * property defaults to false in order to maintain compatibility.
+   *
+   * @since 1.6.4 1.7.1 1.8.0
+   */
+
+  public static void setReduceOnFullCompactionOnly(IteratorSetting is, boolean reduceOnFullCompactionOnly) {
+    is.addOption(REDUCE_ON_FULL_COMPACTION_ONLY_OPTION, Boolean.toString(reduceOnFullCompactionOnly));
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/7a1d6d92/core/src/test/java/org/apache/accumulo/core/iterators/CombinerTestUtil.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/accumulo/core/iterators/CombinerTestUtil.java b/core/src/test/java/org/apache/accumulo/core/iterators/CombinerTestUtil.java
new file mode 100644
index 0000000..f874173
--- /dev/null
+++ b/core/src/test/java/org/apache/accumulo/core/iterators/CombinerTestUtil.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.accumulo.core.iterators;
+
+public class CombinerTestUtil {
+  public static void clearLogCache(){
+    Combiner.loggedMsgCache.invalidateAll();
+  }
+}

http://git-wip-us.apache.org/repos/asf/accumulo/blob/7a1d6d92/core/src/test/java/org/apache/accumulo/core/iterators/user/BigDecimalCombinerTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/accumulo/core/iterators/user/BigDecimalCombinerTest.java b/core/src/test/java/org/apache/accumulo/core/iterators/user/BigDecimalCombinerTest.java
index c15fe55..dfcb869 100644
--- a/core/src/test/java/org/apache/accumulo/core/iterators/user/BigDecimalCombinerTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/iterators/user/BigDecimalCombinerTest.java
@@ -72,7 +72,7 @@ public class BigDecimalCombinerTest {
     IteratorSetting is = new IteratorSetting(1, BigDecimalCombiner.BigDecimalSummingCombiner.class);
     Combiner.setColumns(is, columns);
 
-    ai.init(new SortedMapIterator(tm1), is.getOptions(), null);
+    ai.init(new SortedMapIterator(tm1), is.getOptions(), CombinerTest.SCAN_IE);
     ai.seek(new Range(), EMPTY_COL_FAMS, false);
 
     assertTrue(ai.hasTop());
@@ -88,7 +88,7 @@ public class BigDecimalCombinerTest {
     IteratorSetting is = new IteratorSetting(1, BigDecimalCombiner.BigDecimalMinCombiner.class);
     Combiner.setColumns(is, columns);
 
-    ai.init(new SortedMapIterator(tm1), is.getOptions(), null);
+    ai.init(new SortedMapIterator(tm1), is.getOptions(), CombinerTest.SCAN_IE);
     ai.seek(new Range(), EMPTY_COL_FAMS, false);
 
     assertTrue(ai.hasTop());
@@ -104,7 +104,7 @@ public class BigDecimalCombinerTest {
     IteratorSetting is = new IteratorSetting(1, BigDecimalCombiner.BigDecimalMaxCombiner.class);
     Combiner.setColumns(is, columns);
 
-    ai.init(new SortedMapIterator(tm1), is.getOptions(), null);
+    ai.init(new SortedMapIterator(tm1), is.getOptions(), CombinerTest.SCAN_IE);
     ai.seek(new Range(), EMPTY_COL_FAMS, false);
 
     assertTrue(ai.hasTop());

http://git-wip-us.apache.org/repos/asf/accumulo/blob/7a1d6d92/core/src/test/java/org/apache/accumulo/core/iterators/user/CombinerTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/accumulo/core/iterators/user/CombinerTest.java b/core/src/test/java/org/apache/accumulo/core/iterators/user/CombinerTest.java
index cdac2fb..152e12f 100644
--- a/core/src/test/java/org/apache/accumulo/core/iterators/user/CombinerTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/iterators/user/CombinerTest.java
@@ -21,6 +21,7 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
+import java.io.StringWriter;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
@@ -35,7 +36,10 @@ import org.apache.accumulo.core.data.Range;
 import org.apache.accumulo.core.data.Value;
 import org.apache.accumulo.core.iterators.Combiner;
 import org.apache.accumulo.core.iterators.Combiner.ValueIterator;
+import org.apache.accumulo.core.iterators.CombinerTestUtil;
 import org.apache.accumulo.core.iterators.DefaultIteratorEnvironment;
+import org.apache.accumulo.core.iterators.IteratorEnvironment;
+import org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope;
 import org.apache.accumulo.core.iterators.LongCombiner;
 import org.apache.accumulo.core.iterators.LongCombiner.FixedLenEncoder;
 import org.apache.accumulo.core.iterators.LongCombiner.StringEncoder;
@@ -46,6 +50,9 @@ import org.apache.accumulo.core.iterators.TypedValueCombiner;
 import org.apache.accumulo.core.iterators.TypedValueCombiner.Encoder;
 import org.apache.accumulo.core.iterators.system.MultiIterator;
 import org.apache.hadoop.io.Text;
+import org.apache.log4j.Logger;
+import org.apache.log4j.PatternLayout;
+import org.apache.log4j.WriterAppender;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -53,6 +60,29 @@ public class CombinerTest {
 
   private static final Collection<ByteSequence> EMPTY_COL_FAMS = new ArrayList<ByteSequence>();
 
+  static class CombinerIteratorEnvironment extends DefaultIteratorEnvironment {
+
+    private IteratorScope scope;
+    private boolean isFullMajc;
+
+    CombinerIteratorEnvironment(IteratorScope scope, boolean isFullMajc) {
+      this.scope = scope;
+      this.isFullMajc = isFullMajc;
+    }
+
+    @Override
+    public IteratorScope getIteratorScope() {
+      return scope;
+    }
+
+    @Override
+    public boolean isFullMajorCompaction() {
+      return isFullMajc;
+    }
+  }
+
+  static final IteratorEnvironment SCAN_IE = new CombinerIteratorEnvironment(IteratorScope.scan, false);
+
   static Key nk(int row, int colf, int colq, long ts, boolean deleted) {
     Key k = nk(row, colf, colq, ts);
     k.setDeleted(deleted);
@@ -98,7 +128,7 @@ public class CombinerTest {
     LongCombiner.setEncodingType(is, SummingCombiner.Type.VARLEN);
     Combiner.setColumns(is, Collections.singletonList(new IteratorSetting.Column("2")));
 
-    ai.init(new SortedMapIterator(tm1), is.getOptions(), null);
+    ai.init(new SortedMapIterator(tm1), is.getOptions(), SCAN_IE);
     ai.seek(new Range(), EMPTY_COL_FAMS, false);
 
     assertTrue(ai.hasTop());
@@ -163,7 +193,7 @@ public class CombinerTest {
     LongCombiner.setEncodingType(is, VarLenEncoder.class);
     Combiner.setColumns(is, Collections.singletonList(new IteratorSetting.Column("cf001")));
 
-    ai.init(new SortedMapIterator(tm1), is.getOptions(), null);
+    ai.init(new SortedMapIterator(tm1), is.getOptions(), SCAN_IE);
     ai.seek(new Range(), EMPTY_COL_FAMS, false);
 
     assertTrue(ai.hasTop());
@@ -229,7 +259,7 @@ public class CombinerTest {
     LongCombiner.setEncodingType(is, FixedLenEncoder.class.getName());
     Combiner.setColumns(is, Collections.singletonList(new IteratorSetting.Column("cf001")));
 
-    ai.init(new SortedMapIterator(tm1), is.getOptions(), null);
+    ai.init(new SortedMapIterator(tm1), is.getOptions(), SCAN_IE);
     ai.seek(new Range(), EMPTY_COL_FAMS, false);
 
     assertTrue(ai.hasTop());
@@ -295,7 +325,7 @@ public class CombinerTest {
     LongCombiner.setEncodingType(is, FixedLenEncoder.class.getName());
     Combiner.setColumns(is, Collections.singletonList(new IteratorSetting.Column("cf001")));
 
-    ai.init(new SortedMapIterator(tm1), is.getOptions(), null);
+    ai.init(new SortedMapIterator(tm1), is.getOptions(), SCAN_IE);
 
     SortedKeyValueIterator<Key,Value> ai2 = ai.deepCopy(null);
     SortedKeyValueIterator<Key,Value> ai3 = ai.deepCopy(null);
@@ -366,7 +396,7 @@ public class CombinerTest {
     LongCombiner.setEncodingType(is, SummingCombiner.Type.STRING);
     Combiner.setColumns(is, Collections.singletonList(new IteratorSetting.Column("cf001")));
 
-    ai.init(new SortedMapIterator(tm1), is.getOptions(), null);
+    ai.init(new SortedMapIterator(tm1), is.getOptions(), SCAN_IE);
     ai.seek(new Range(), EMPTY_COL_FAMS, false);
 
     assertTrue(ai.hasTop());
@@ -421,7 +451,7 @@ public class CombinerTest {
     LongCombiner.setEncodingType(is, SummingCombiner.Type.STRING);
     Combiner.setCombineAllColumns(is, true);
 
-    ai.init(new SortedMapIterator(tm1), is.getOptions(), null);
+    ai.init(new SortedMapIterator(tm1), is.getOptions(), SCAN_IE);
     ai.seek(new Range(), EMPTY_COL_FAMS, false);
 
     assertTrue(ai.hasTop());
@@ -472,7 +502,7 @@ public class CombinerTest {
     sources.add(new SortedMapIterator(tm3));
 
     MultiIterator mi = new MultiIterator(sources, true);
-    ai.init(mi, is.getOptions(), null);
+    ai.init(mi, is.getOptions(), SCAN_IE);
     ai.seek(new Range(), EMPTY_COL_FAMS, false);
 
     assertTrue(ai.hasTop());
@@ -496,7 +526,7 @@ public class CombinerTest {
     LongCombiner.setEncodingType(is, VarLenEncoder.class.getName());
     Combiner.setColumns(is, Collections.singletonList(new IteratorSetting.Column("cf001")));
 
-    ai.init(new SortedMapIterator(tm1), is.getOptions(), new DefaultIteratorEnvironment());
+    ai.init(new SortedMapIterator(tm1), is.getOptions(), SCAN_IE);
 
     // try seeking to the beginning of a key that aggregates
 
@@ -524,7 +554,7 @@ public class CombinerTest {
     LongCombiner.setEncodingType(is, SummingCombiner.Type.FIXEDLEN);
     Combiner.setColumns(is, Collections.singletonList(new IteratorSetting.Column("cf001")));
 
-    ai.init(new SortedMapIterator(tm1), is.getOptions(), new DefaultIteratorEnvironment());
+    ai.init(new SortedMapIterator(tm1), is.getOptions(), SCAN_IE);
 
     ai.seek(nr(1, 1, 1, 4, true), EMPTY_COL_FAMS, false);
 
@@ -543,7 +573,7 @@ public class CombinerTest {
     tm1 = new TreeMap<Key,Value>();
     nkv(tm1, 1, 1, 1, 2, true, 0l, encoder);
     ai = new SummingCombiner();
-    ai.init(new SortedMapIterator(tm1), is.getOptions(), new DefaultIteratorEnvironment());
+    ai.init(new SortedMapIterator(tm1), is.getOptions(), SCAN_IE);
 
     ai.seek(nr(1, 1, 1, 4, true), EMPTY_COL_FAMS, false);
 
@@ -582,12 +612,12 @@ public class CombinerTest {
     IteratorSetting s = new IteratorSetting(10, "s", SummingCombiner.class);
     SummingCombiner.setColumns(s, Collections.singletonList(new IteratorSetting.Column("count")));
     SummingCombiner.setEncodingType(s, LongCombiner.StringEncoder.class);
-    iter.init(smi, s.getOptions(), new DefaultIteratorEnvironment());
+    iter.init(smi, s.getOptions(), SCAN_IE);
     Combiner iter2 = new SummingCombiner();
     IteratorSetting s2 = new IteratorSetting(10, "s2", SummingCombiner.class);
     SummingCombiner.setColumns(s2, Collections.singletonList(new IteratorSetting.Column("count", "a")));
     SummingCombiner.setEncodingType(s2, LongCombiner.StringEncoder.class);
-    iter2.init(iter, s.getOptions(), new DefaultIteratorEnvironment());
+    iter2.init(iter, s.getOptions(), SCAN_IE);
     iter2.seek(new Range(), EMPTY_COL_FAMS, false);
 
     assertTrue(iter2.hasTop());
@@ -619,7 +649,7 @@ public class CombinerTest {
     LongCombiner.setEncodingType(is, SummingCombiner.Type.VARLEN);
     Combiner.setColumns(is, Collections.singletonList(new IteratorSetting.Column("cf001")));
 
-    ai.init(new SortedMapIterator(tm1), is.getOptions(), null);
+    ai.init(new SortedMapIterator(tm1), is.getOptions(), SCAN_IE);
     ai.seek(new Range(), EMPTY_COL_FAMS, false);
 
     assertTrue(ai.hasTop());
@@ -632,7 +662,7 @@ public class CombinerTest {
 
     ai = new MinCombiner();
 
-    ai.init(new SortedMapIterator(tm1), is.getOptions(), null);
+    ai.init(new SortedMapIterator(tm1), is.getOptions(), SCAN_IE);
     ai.seek(new Range(), EMPTY_COL_FAMS, false);
 
     assertTrue(ai.hasTop());
@@ -658,8 +688,8 @@ public class CombinerTest {
       assertEquals(a[i], b[i]);
   }
 
-  public static void sumArray(Class<? extends Encoder<List<Long>>> encoderClass, SummingArrayCombiner.Type type) throws IOException, InstantiationException,
-      IllegalAccessException {
+  public static void sumArray(Class<? extends Encoder<List<Long>>> encoderClass, SummingArrayCombiner.Type type)
+      throws IOException, InstantiationException, IllegalAccessException {
     Encoder<List<Long>> encoder = encoderClass.newInstance();
 
     TreeMap<Key,Value> tm1 = new TreeMap<Key,Value>();
@@ -675,7 +705,7 @@ public class CombinerTest {
     SummingArrayCombiner.setEncodingType(is, type);
     Combiner.setColumns(is, Collections.singletonList(new IteratorSetting.Column("cf001")));
 
-    ai.init(new SortedMapIterator(tm1), is.getOptions(), null);
+    ai.init(new SortedMapIterator(tm1), is.getOptions(), SCAN_IE);
     ai.seek(new Range(), EMPTY_COL_FAMS, false);
 
     assertTrue(ai.hasTop());
@@ -690,7 +720,7 @@ public class CombinerTest {
     SummingArrayCombiner.setEncodingType(is, encoderClass);
     Combiner.setColumns(is, Collections.singletonList(new IteratorSetting.Column("cf001")));
 
-    ai.init(new SortedMapIterator(tm1), is.getOptions(), null);
+    ai.init(new SortedMapIterator(tm1), is.getOptions(), SCAN_IE);
     ai.seek(new Range(), EMPTY_COL_FAMS, false);
 
     assertTrue(ai.hasTop());
@@ -705,7 +735,7 @@ public class CombinerTest {
     SummingArrayCombiner.setEncodingType(is, encoderClass.getName());
     Combiner.setColumns(is, Collections.singletonList(new IteratorSetting.Column("cf001")));
 
-    ai.init(new SortedMapIterator(tm1), is.getOptions(), null);
+    ai.init(new SortedMapIterator(tm1), is.getOptions(), SCAN_IE);
     ai.seek(new Range(), EMPTY_COL_FAMS, false);
 
     assertTrue(ai.hasTop());
@@ -721,7 +751,7 @@ public class CombinerTest {
     Combiner.setColumns(is, Collections.singletonList(new IteratorSetting.Column("cf001")));
 
     try {
-      ai.init(new SortedMapIterator(tm1), is.getOptions(), null);
+      ai.init(new SortedMapIterator(tm1), is.getOptions(), SCAN_IE);
       Assert.fail();
     } catch (IllegalArgumentException e) {}
 
@@ -730,7 +760,7 @@ public class CombinerTest {
     Combiner.setColumns(is, Collections.singletonList(new IteratorSetting.Column("cf001")));
 
     try {
-      ai.init(new SortedMapIterator(tm1), is.getOptions(), null);
+      ai.init(new SortedMapIterator(tm1), is.getOptions(), SCAN_IE);
       Assert.fail();
     } catch (IllegalArgumentException e) {}
   }
@@ -786,4 +816,107 @@ public class CombinerTest {
     assertEquals(LongCombiner.safeAdd(Long.MAX_VALUE - 5, 5), Long.MAX_VALUE);
   }
 
+  private TreeMap<Key,Value> readAll(SortedKeyValueIterator<Key,Value> combiner) throws Exception {
+    TreeMap<Key,Value> ret = new TreeMap<Key,Value>();
+
+    combiner.seek(new Range(), EMPTY_COL_FAMS, false);
+
+    while (combiner.hasTop()) {
+      ret.put(new Key(combiner.getTopKey()), new Value(combiner.getTopValue()));
+      combiner.next();
+    }
+
+    return ret;
+  }
+
+  private void runDeleteHandlingTest(TreeMap<Key,Value> input, TreeMap<Key,Value> expected, Boolean rofco, IteratorEnvironment env)
+      throws Exception {
+    runDeleteHandlingTest(input, expected, rofco, env, null, true);
+  }
+
+  private void runDeleteHandlingTest(TreeMap<Key,Value> input, TreeMap<Key,Value> expected, Boolean rofco, IteratorEnvironment env,
+      String expectedLog) throws Exception {
+    runDeleteHandlingTest(input, expected, rofco, env, expectedLog, true);
+    if (expectedLog != null) {
+      // run test again... should not see log message again because cache is not cleared
+      runDeleteHandlingTest(input, expected, rofco, env, null, false);
+    }
+  }
+
+  private void runDeleteHandlingTest(TreeMap<Key,Value> input, TreeMap<Key,Value> expected, Boolean rofco, IteratorEnvironment env,
+      String expectedLog, boolean clearLogMsgCache) throws Exception {
+    boolean deepCopy = expected == null;
+
+    if (clearLogMsgCache) {
+      CombinerTestUtil.clearLogCache();
+    }
+
+    StringWriter writer = new StringWriter();
+    WriterAppender appender = new WriterAppender(new PatternLayout("%p, %m%n"), writer);
+    Logger logger = Logger.getLogger(Combiner.class);
+    boolean additivity = logger.getAdditivity();
+    try {
+      logger.addAppender(appender);
+      logger.setAdditivity(false);
+
+      Combiner ai = new SummingCombiner();
+
+      IteratorSetting is = new IteratorSetting(1, SummingCombiner.class);
+      SummingCombiner.setEncodingType(is, LongCombiner.StringEncoder.class);
+      Combiner.setColumns(is, Collections.singletonList(new IteratorSetting.Column("cf001")));
+      if (rofco != null) {
+        Combiner.setReduceOnFullCompactionOnly(is, rofco);
+      }
+
+      ai.init(new SortedMapIterator(input), is.getOptions(), env);
+
+      if (deepCopy)
+        assertEquals(expected, readAll(ai.deepCopy(env)));
+      assertEquals(expected, readAll(ai));
+
+    } finally {
+      logger.removeAppender(appender);
+      logger.setAdditivity(additivity);
+    }
+
+    String logMsgs = writer.toString();
+    if (expectedLog == null) {
+      Assert.assertTrue("Expected 0 length log message, but got : " + logMsgs, logMsgs.length() == 0);
+    } else {
+      logMsgs = logMsgs.replace('\n', ' ');
+      Assert.assertTrue("Did not match pattern [" + expectedLog + "] in [" + logMsgs + "]", logMsgs.matches(expectedLog));
+    }
+  }
+
+  @Test
+  public void testDeleteHandling() throws Exception {
+    Encoder<Long> encoder = LongCombiner.STRING_ENCODER;
+
+    TreeMap<Key,Value> input = new TreeMap<Key,Value>();
+
+    IteratorEnvironment paritalMajcIe = new CombinerIteratorEnvironment(IteratorScope.majc, false);
+    IteratorEnvironment fullMajcIe = new CombinerIteratorEnvironment(IteratorScope.majc, true);
+
+    // keys that aggregate
+    nkv(input, 1, 1, 1, 1, false, 4l, encoder);
+    nkv(input, 1, 1, 1, 2, true, 0l, encoder);
+    nkv(input, 1, 1, 1, 3, false, 2l, encoder);
+    nkv(input, 1, 1, 1, 4, false, 9l, encoder);
+
+    TreeMap<Key,Value> expected = new TreeMap<Key,Value>();
+    nkv(expected, 1, 1, 1, 1, false, 4l, encoder);
+    nkv(expected, 1, 1, 1, 2, true, 0l, encoder);
+    nkv(expected, 1, 1, 1, 4, false, 11l, encoder);
+
+    runDeleteHandlingTest(input, input, true, paritalMajcIe);
+    runDeleteHandlingTest(input, expected, true, fullMajcIe);
+    runDeleteHandlingTest(input, expected, true, SCAN_IE);
+
+    runDeleteHandlingTest(input, expected, false, fullMajcIe, ".*ERROR.*ACCUMULO-2232.*");
+    runDeleteHandlingTest(input, expected, false, SCAN_IE);
+
+    runDeleteHandlingTest(input, expected, false, paritalMajcIe, ".*ERROR.*SummingCombiner.*ACCUMULO-2232.*");
+    runDeleteHandlingTest(input, expected, null, paritalMajcIe, ".*ERROR.*SummingCombiner.*ACCUMULO-2232.*");
+    runDeleteHandlingTest(input, expected, null, fullMajcIe, ".*ERROR.*SummingCombiner.*ACCUMULO-2232.*");
+  }
 }


[4/5] accumulo git commit: ACCUMULO-3462 use object reference equality to remove the minimum object queued

Posted by ec...@apache.org.
ACCUMULO-3462 use object reference equality to remove the minimum object queued


Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/0013e46e
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/0013e46e
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/0013e46e

Branch: refs/heads/master
Commit: 0013e46e6b7f195f9d17dbd540b841d2933ac37b
Parents: f50db38 4b5ea53
Author: Eric C. Newton <er...@gmail.com>
Authored: Fri Oct 2 13:15:15 2015 -0400
Committer: Eric C. Newton <er...@gmail.com>
Committed: Fri Oct 2 13:15:15 2015 -0400

----------------------------------------------------------------------
 .../java/org/apache/accumulo/tserver/CompactionQueue.java | 10 ++++++++--
 1 file changed, 8 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/accumulo/blob/0013e46e/server/tserver/src/main/java/org/apache/accumulo/tserver/CompactionQueue.java
----------------------------------------------------------------------
diff --cc server/tserver/src/main/java/org/apache/accumulo/tserver/CompactionQueue.java
index bbb6536,1e7c086..f87131e
--- a/server/tserver/src/main/java/org/apache/accumulo/tserver/CompactionQueue.java
+++ b/server/tserver/src/main/java/org/apache/accumulo/tserver/CompactionQueue.java
@@@ -48,9 -35,15 +48,15 @@@ class CompactionQueue extends AbstractQ
      if (task.size() == 0)
        return null;
  
 -    Comparable min = Collections.min(task);
 -    Iterator<Comparable> iterator = task.iterator();
 +    TraceRunnable min = Collections.min(task, comparator);
-     task.remove(min);
-     return min;
++    Iterator<TraceRunnable> iterator = task.iterator();
+     while (iterator.hasNext()) {
+       if (iterator.next() == min) {
+         iterator.remove();
 -        return (Runnable) min;
++        return min;
+       }
+     }
+     throw new IllegalStateException("Minimum object found, but not there when removing");
    }
  
    @Override


[2/5] accumulo git commit: Merge branch '1.6' into 1.7

Posted by ec...@apache.org.
Merge branch '1.6' into 1.7

Conflicts:
	core/src/main/java/org/apache/accumulo/core/iterators/Combiner.java


Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/f50db38f
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/f50db38f
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/f50db38f

Branch: refs/heads/master
Commit: f50db38f7ad71cb22480c457b7c42ab233c41b21
Parents: 38f36e2 7a1d6d9
Author: Keith Turner <kt...@apache.org>
Authored: Fri Oct 2 11:46:54 2015 -0400
Committer: Keith Turner <kt...@apache.org>
Committed: Fri Oct 2 11:46:54 2015 -0400

----------------------------------------------------------------------
 .../accumulo/core/iterators/Combiner.java       |  97 ++++++++++-
 .../core/iterators/CombinerTestUtil.java        |  23 +++
 .../iterators/user/BigDecimalCombinerTest.java  |   6 +-
 .../core/iterators/user/CombinerTest.java       | 170 ++++++++++++++++---
 4 files changed, 272 insertions(+), 24 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/accumulo/blob/f50db38f/core/src/main/java/org/apache/accumulo/core/iterators/Combiner.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/apache/accumulo/core/iterators/Combiner.java
index 1c2d8b5,091803b..b3cd93b
--- a/core/src/main/java/org/apache/accumulo/core/iterators/Combiner.java
+++ b/core/src/main/java/org/apache/accumulo/core/iterators/Combiner.java
@@@ -31,10 -34,15 +34,16 @@@ import org.apache.accumulo.core.data.Ke
  import org.apache.accumulo.core.data.PartialKey;
  import org.apache.accumulo.core.data.Range;
  import org.apache.accumulo.core.data.Value;
+ import org.apache.accumulo.core.iterators.IteratorUtil.IteratorScope;
  import org.apache.accumulo.core.iterators.conf.ColumnSet;
  import org.apache.hadoop.io.Text;
 -import org.apache.log4j.Logger;
++import org.slf4j.Logger;
++import org.slf4j.LoggerFactory;
  
+ import com.google.common.annotations.VisibleForTesting;
  import com.google.common.base.Splitter;
+ import com.google.common.cache.Cache;
+ import com.google.common.cache.CacheBuilder;
  import com.google.common.collect.Lists;
  
  /**
@@@ -49,12 -59,21 +60,23 @@@
   * This reduce method will be passed the most recent Key and an iterator over the Values for all non-deleted versions of that Key. A combiner will not combine
   * keys that differ by more than the timestamp.
   *
+  * <p>
   * This class and its implementations do not automatically filter out unwanted columns from those being combined, thus it is generally recommended to use a
   * {@link Combiner} implementation with the {@link ScannerBase#fetchColumnFamily(Text)} or {@link ScannerBase#fetchColumn(Text, Text)} methods.
+  *
+  * <p>
+  * WARNING : Using deletes with Combiners may not work as intended. See {@link #setReduceOnFullCompactionOnly(IteratorSetting, boolean)}
   */
  public abstract class Combiner extends WrappingIterator implements OptionDescriber {
 -  static final Logger sawDeleteLog = Logger.getLogger(Combiner.class.getName()+".SawDelete");
++
++  static final Logger sawDeleteLog = LoggerFactory.getLogger(Combiner.class.getName() + ".SawDelete");
++
    protected static final String COLUMNS_OPTION = "columns";
    protected static final String ALL_OPTION = "all";
+   protected static final String REDUCE_ON_FULL_COMPACTION_ONLY_OPTION = "reduceOnFullCompactionOnly";
+ 
+   private boolean isMajorCompaction;
+   private boolean reduceOnFullCompactionOnly;
  
    /**
     * A Java Iterator that iterates over the Values for a given Key from a source SortedKeyValueIterator.
@@@ -147,6 -166,27 +169,28 @@@
  
    private Key workKey = new Key();
  
+   @VisibleForTesting
+   static final Cache<String,Boolean> loggedMsgCache = CacheBuilder.newBuilder().expireAfterWrite(1, TimeUnit.HOURS).maximumSize(10000).build();
+ 
+   private void sawDelete() {
+     if (isMajorCompaction && !reduceOnFullCompactionOnly) {
+       try {
+         loggedMsgCache.get(this.getClass().getName(), new Callable<Boolean>() {
+           @Override
+           public Boolean call() throws Exception {
 -            sawDeleteLog.error("Combiner of type " + Combiner.this.getClass().getSimpleName()
 -                + " saw a delete during a partial compaction.  This could cause undesired results.  See ACCUMULO-2232.  Will not log subsequent occurences for at least 1 hour.");
++            sawDeleteLog.error(
++                "Combiner of type {} saw a delete during a partial compaction.  This could cause undesired results.  See ACCUMULO-2232.  Will not log subsequent "
++                    + "occurences for at least 1 hour.", Combiner.this.getClass().getSimpleName());
+             // the value is not used and does not matter
+             return Boolean.TRUE;
+           }
+         });
+       } catch (ExecutionException e) {
+         throw new RuntimeException(e);
+       }
+     }
+   }
+ 
    /**
     * Sets the topKey and topValue based on the top key of the source. If the column of the source top key is in the set of combiners, topKey will be the top key
     * of the source and topValue will be the result of the reduce method. Otherwise, topKey and topValue will be unchanged. (They are always set to null before
@@@ -311,4 -373,31 +377,31 @@@
    public static void setCombineAllColumns(IteratorSetting is, boolean combineAllColumns) {
      is.addOption(ALL_OPTION, Boolean.toString(combineAllColumns));
    }
+ 
+   /**
+    * Combiners may not work correctly with deletes. Sometimes when Accumulo compacts the files in a tablet, it only compacts a subset of the files. If a delete
 -   * marker exists in one of the files that is not being compacted, then data that should be deleted may be combined. See
 -   * <a href="https://issues.apache.org/jira/browse/ACCUMULO-2232">ACCUMULO-2232</a> for more information. For correctness deletes should not be used with
 -   * columns that are combined OR this option should be set to true.
++   * marker exists in one of the files that is not being compacted, then data that should be deleted may be combined. See <a
++   * href="https://issues.apache.org/jira/browse/ACCUMULO-2232">ACCUMULO-2232</a> for more information. For correctness deletes should not be used with columns
++   * that are combined OR this option should be set to true.
+    *
+    * <p>
+    * When this method is set to true all data is passed through during partial major compactions and no reducing is done. Reducing is only done during scan and
+    * full major compactions, when deletes can be correctly handled. Only reducing on full major compactions may have negative performance implications, leaving
+    * lots of work to be done at scan time.
+    *
+    * <p>
+    * When this method is set to false, combiners will log an error if a delete is seen during any compaction. This can be suppressed by adjusting logging
+    * configuration. Errors will not be logged more than once an hour per Combiner, regardless of how many deletes are seen.
+    *
+    * <p>
+    * This method was added in 1.6.4 and 1.7.1. If you want your code to work in earlier versions of 1.6 and 1.7 then do not call this method. If not set this
+    * property defaults to false in order to maintain compatibility.
+    *
+    * @since 1.6.4 1.7.1 1.8.0
+    */
+ 
+   public static void setReduceOnFullCompactionOnly(IteratorSetting is, boolean reduceOnFullCompactionOnly) {
+     is.addOption(REDUCE_ON_FULL_COMPACTION_ONLY_OPTION, Boolean.toString(reduceOnFullCompactionOnly));
+   }
+ 
  }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/f50db38f/core/src/test/java/org/apache/accumulo/core/iterators/CombinerTestUtil.java
----------------------------------------------------------------------
diff --cc core/src/test/java/org/apache/accumulo/core/iterators/CombinerTestUtil.java
index 0000000,f874173..f9ef933
mode 000000,100644..100644
--- a/core/src/test/java/org/apache/accumulo/core/iterators/CombinerTestUtil.java
+++ b/core/src/test/java/org/apache/accumulo/core/iterators/CombinerTestUtil.java
@@@ -1,0 -1,23 +1,23 @@@
+ /*
+  * Licensed to the Apache Software Foundation (ASF) under one or more
+  * contributor license agreements.  See the NOTICE file distributed with
+  * this work for additional information regarding copyright ownership.
+  * The ASF licenses this file to You under the Apache License, Version 2.0
+  * (the "License"); you may not use this file except in compliance with
+  * the License.  You may obtain a copy of the License at
+  *
+  *     http://www.apache.org/licenses/LICENSE-2.0
+  *
+  * Unless required by applicable law or agreed to in writing, software
+  * distributed under the License is distributed on an "AS IS" BASIS,
+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  * See the License for the specific language governing permissions and
+  * limitations under the License.
+  */
+ package org.apache.accumulo.core.iterators;
+ 
+ public class CombinerTestUtil {
 -  public static void clearLogCache(){
++  public static void clearLogCache() {
+     Combiner.loggedMsgCache.invalidateAll();
+   }
+ }

http://git-wip-us.apache.org/repos/asf/accumulo/blob/f50db38f/core/src/test/java/org/apache/accumulo/core/iterators/user/CombinerTest.java
----------------------------------------------------------------------
diff --cc core/src/test/java/org/apache/accumulo/core/iterators/user/CombinerTest.java
index 26326a5,152e12f..a442534
--- a/core/src/test/java/org/apache/accumulo/core/iterators/user/CombinerTest.java
+++ b/core/src/test/java/org/apache/accumulo/core/iterators/user/CombinerTest.java
@@@ -790,4 -816,107 +820,106 @@@ public class CombinerTest 
      assertEquals(LongCombiner.safeAdd(Long.MAX_VALUE - 5, 5), Long.MAX_VALUE);
    }
  
+   private TreeMap<Key,Value> readAll(SortedKeyValueIterator<Key,Value> combiner) throws Exception {
+     TreeMap<Key,Value> ret = new TreeMap<Key,Value>();
+ 
+     combiner.seek(new Range(), EMPTY_COL_FAMS, false);
+ 
+     while (combiner.hasTop()) {
+       ret.put(new Key(combiner.getTopKey()), new Value(combiner.getTopValue()));
+       combiner.next();
+     }
+ 
+     return ret;
+   }
+ 
 -  private void runDeleteHandlingTest(TreeMap<Key,Value> input, TreeMap<Key,Value> expected, Boolean rofco, IteratorEnvironment env)
 -      throws Exception {
++  private void runDeleteHandlingTest(TreeMap<Key,Value> input, TreeMap<Key,Value> expected, Boolean rofco, IteratorEnvironment env) throws Exception {
+     runDeleteHandlingTest(input, expected, rofco, env, null, true);
+   }
+ 
 -  private void runDeleteHandlingTest(TreeMap<Key,Value> input, TreeMap<Key,Value> expected, Boolean rofco, IteratorEnvironment env,
 -      String expectedLog) throws Exception {
++  private void runDeleteHandlingTest(TreeMap<Key,Value> input, TreeMap<Key,Value> expected, Boolean rofco, IteratorEnvironment env, String expectedLog)
++      throws Exception {
+     runDeleteHandlingTest(input, expected, rofco, env, expectedLog, true);
+     if (expectedLog != null) {
+       // run test again... should not see log message again because cache is not cleared
+       runDeleteHandlingTest(input, expected, rofco, env, null, false);
+     }
+   }
+ 
 -  private void runDeleteHandlingTest(TreeMap<Key,Value> input, TreeMap<Key,Value> expected, Boolean rofco, IteratorEnvironment env,
 -      String expectedLog, boolean clearLogMsgCache) throws Exception {
++  private void runDeleteHandlingTest(TreeMap<Key,Value> input, TreeMap<Key,Value> expected, Boolean rofco, IteratorEnvironment env, String expectedLog,
++      boolean clearLogMsgCache) throws Exception {
+     boolean deepCopy = expected == null;
+ 
+     if (clearLogMsgCache) {
+       CombinerTestUtil.clearLogCache();
+     }
+ 
+     StringWriter writer = new StringWriter();
+     WriterAppender appender = new WriterAppender(new PatternLayout("%p, %m%n"), writer);
+     Logger logger = Logger.getLogger(Combiner.class);
+     boolean additivity = logger.getAdditivity();
+     try {
+       logger.addAppender(appender);
+       logger.setAdditivity(false);
+ 
+       Combiner ai = new SummingCombiner();
+ 
+       IteratorSetting is = new IteratorSetting(1, SummingCombiner.class);
+       SummingCombiner.setEncodingType(is, LongCombiner.StringEncoder.class);
+       Combiner.setColumns(is, Collections.singletonList(new IteratorSetting.Column("cf001")));
+       if (rofco != null) {
+         Combiner.setReduceOnFullCompactionOnly(is, rofco);
+       }
+ 
+       ai.init(new SortedMapIterator(input), is.getOptions(), env);
+ 
+       if (deepCopy)
+         assertEquals(expected, readAll(ai.deepCopy(env)));
+       assertEquals(expected, readAll(ai));
+ 
+     } finally {
+       logger.removeAppender(appender);
+       logger.setAdditivity(additivity);
+     }
+ 
+     String logMsgs = writer.toString();
+     if (expectedLog == null) {
+       Assert.assertTrue("Expected 0 length log message, but got : " + logMsgs, logMsgs.length() == 0);
+     } else {
+       logMsgs = logMsgs.replace('\n', ' ');
+       Assert.assertTrue("Did not match pattern [" + expectedLog + "] in [" + logMsgs + "]", logMsgs.matches(expectedLog));
+     }
+   }
+ 
+   @Test
+   public void testDeleteHandling() throws Exception {
+     Encoder<Long> encoder = LongCombiner.STRING_ENCODER;
+ 
+     TreeMap<Key,Value> input = new TreeMap<Key,Value>();
+ 
+     IteratorEnvironment paritalMajcIe = new CombinerIteratorEnvironment(IteratorScope.majc, false);
+     IteratorEnvironment fullMajcIe = new CombinerIteratorEnvironment(IteratorScope.majc, true);
+ 
+     // keys that aggregate
+     nkv(input, 1, 1, 1, 1, false, 4l, encoder);
+     nkv(input, 1, 1, 1, 2, true, 0l, encoder);
+     nkv(input, 1, 1, 1, 3, false, 2l, encoder);
+     nkv(input, 1, 1, 1, 4, false, 9l, encoder);
+ 
+     TreeMap<Key,Value> expected = new TreeMap<Key,Value>();
+     nkv(expected, 1, 1, 1, 1, false, 4l, encoder);
+     nkv(expected, 1, 1, 1, 2, true, 0l, encoder);
+     nkv(expected, 1, 1, 1, 4, false, 11l, encoder);
+ 
+     runDeleteHandlingTest(input, input, true, paritalMajcIe);
+     runDeleteHandlingTest(input, expected, true, fullMajcIe);
+     runDeleteHandlingTest(input, expected, true, SCAN_IE);
+ 
+     runDeleteHandlingTest(input, expected, false, fullMajcIe, ".*ERROR.*ACCUMULO-2232.*");
+     runDeleteHandlingTest(input, expected, false, SCAN_IE);
+ 
+     runDeleteHandlingTest(input, expected, false, paritalMajcIe, ".*ERROR.*SummingCombiner.*ACCUMULO-2232.*");
+     runDeleteHandlingTest(input, expected, null, paritalMajcIe, ".*ERROR.*SummingCombiner.*ACCUMULO-2232.*");
+     runDeleteHandlingTest(input, expected, null, fullMajcIe, ".*ERROR.*SummingCombiner.*ACCUMULO-2232.*");
+   }
  }


[3/5] accumulo git commit: ACCUMULO-3462 use object reference equality to remove the minimum object queued

Posted by ec...@apache.org.
ACCUMULO-3462 use object reference equality to remove the minimum object queued


Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/4b5ea535
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/4b5ea535
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/4b5ea535

Branch: refs/heads/master
Commit: 4b5ea535516d98ac482e33e210a6c48fb8f12c43
Parents: 7a1d6d9
Author: Eric C. Newton <er...@gmail.com>
Authored: Fri Oct 2 13:09:57 2015 -0400
Committer: Eric C. Newton <er...@gmail.com>
Committed: Fri Oct 2 13:09:57 2015 -0400

----------------------------------------------------------------------
 .../java/org/apache/accumulo/tserver/CompactionQueue.java | 10 ++++++++--
 1 file changed, 8 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/accumulo/blob/4b5ea535/server/tserver/src/main/java/org/apache/accumulo/tserver/CompactionQueue.java
----------------------------------------------------------------------
diff --git a/server/tserver/src/main/java/org/apache/accumulo/tserver/CompactionQueue.java b/server/tserver/src/main/java/org/apache/accumulo/tserver/CompactionQueue.java
index 0cb04a7..1e7c086 100644
--- a/server/tserver/src/main/java/org/apache/accumulo/tserver/CompactionQueue.java
+++ b/server/tserver/src/main/java/org/apache/accumulo/tserver/CompactionQueue.java
@@ -36,8 +36,14 @@ public class CompactionQueue extends AbstractQueue<Runnable> implements Blocking
       return null;
 
     Comparable min = Collections.min(task);
-    task.remove(min);
-    return (Runnable) min;
+    Iterator<Comparable> iterator = task.iterator();
+    while (iterator.hasNext()) {
+      if (iterator.next() == min) {
+        iterator.remove();
+        return (Runnable) min;
+      }
+    }
+    throw new IllegalStateException("Minimum object found, but not there when removing");
   }
 
   @Override


[5/5] accumulo git commit: Merge branch '1.7'

Posted by ec...@apache.org.
Merge branch '1.7'


Project: http://git-wip-us.apache.org/repos/asf/accumulo/repo
Commit: http://git-wip-us.apache.org/repos/asf/accumulo/commit/17ae2f91
Tree: http://git-wip-us.apache.org/repos/asf/accumulo/tree/17ae2f91
Diff: http://git-wip-us.apache.org/repos/asf/accumulo/diff/17ae2f91

Branch: refs/heads/master
Commit: 17ae2f919024daef4a6b4dff7ac92aefd8805de2
Parents: 18e834e 0013e46
Author: Eric C. Newton <er...@gmail.com>
Authored: Fri Oct 2 13:15:30 2015 -0400
Committer: Eric C. Newton <er...@gmail.com>
Committed: Fri Oct 2 13:15:30 2015 -0400

----------------------------------------------------------------------
 .../accumulo/core/iterators/Combiner.java       |  97 ++++++++++-
 .../core/iterators/CombinerTestUtil.java        |  23 +++
 .../iterators/user/BigDecimalCombinerTest.java  |   6 +-
 .../core/iterators/user/CombinerTest.java       | 170 ++++++++++++++++---
 .../accumulo/tserver/CompactionQueue.java       |  10 +-
 5 files changed, 280 insertions(+), 26 deletions(-)
----------------------------------------------------------------------