You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ec...@apache.org on 2013/08/15 07:44:57 UTC
svn commit: r1514163 [2/2] - in /hbase/trunk:
hbase-it/src/test/java/org/apache/hadoop/hbase/
hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/
hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/
hbase-it/src/test/java/org/apache/hadoop/h...
Added: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceRegionsAction.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceRegionsAction.java?rev=1514163&view=auto
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceRegionsAction.java (added)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/actions/UnbalanceRegionsAction.java Thu Aug 15 05:44:56 2013
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.chaos.actions;
+
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.commons.lang.math.RandomUtils;
+import org.apache.hadoop.hbase.ClusterStatus;
+import org.apache.hadoop.hbase.ServerName;
+
+/**
+* Action that tries to unbalance the regions of a cluster.
+*/
+public class UnbalanceRegionsAction extends Action {
+ private double fractionOfRegions;
+ private double fractionOfServers;
+
+ /**
+ * Unbalances the regions on the cluster by choosing "target" servers, and moving
+ * some regions from each of the non-target servers to random target servers.
+ * @param fractionOfRegions Fraction of regions to move from each server.
+ * @param fractionOfServers Fraction of servers to be chosen as targets.
+ */
+ public UnbalanceRegionsAction(double fractionOfRegions, double fractionOfServers) {
+ this.fractionOfRegions = fractionOfRegions;
+ this.fractionOfServers = fractionOfServers;
+ }
+
+ @Override
+ public void perform() throws Exception {
+ LOG.info("Unbalancing regions");
+ ClusterStatus status = this.cluster.getClusterStatus();
+ List<ServerName> victimServers = new LinkedList<ServerName>(status.getServers());
+ int targetServerCount = (int)Math.ceil(fractionOfServers * victimServers.size());
+ List<ServerName> targetServers = new ArrayList<ServerName>(targetServerCount);
+ for (int i = 0; i < targetServerCount; ++i) {
+ int victimIx = RandomUtils.nextInt(victimServers.size());
+ targetServers.add(victimServers.remove(victimIx));
+ }
+ unbalanceRegions(status, victimServers, targetServers, fractionOfRegions);
+ }
+}
Added: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/CalmMonkeyFactory.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/CalmMonkeyFactory.java?rev=1514163&view=auto
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/CalmMonkeyFactory.java (added)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/CalmMonkeyFactory.java Thu Aug 15 05:44:56 2013
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.chaos.factories;
+
+import org.apache.hadoop.hbase.chaos.monkies.CalmChaosMonkey;
+import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey;
+
+
+/**
+ * Factory to create a calm ChaosMonkey.
+ */
+public class CalmMonkeyFactory extends MonkeyFactory {
+ @Override
+ public ChaosMonkey build() {
+ return new CalmChaosMonkey();
+ }
+}
Added: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/MonkeyFactory.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/MonkeyFactory.java?rev=1514163&view=auto
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/MonkeyFactory.java (added)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/MonkeyFactory.java Thu Aug 15 05:44:56 2013
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.chaos.factories;
+
+import java.util.Map;
+import java.util.Set;
+
+import com.google.common.collect.ImmutableMap;
+import org.apache.hadoop.hbase.IntegrationTestingUtility;
+import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey;
+
+/**
+ * Base class of the factory that will create a ChaosMonkey.
+ */
+public abstract class MonkeyFactory {
+
+ protected String tableName;
+ protected Set<String> columnFamilies;
+ protected IntegrationTestingUtility util;
+
+ public MonkeyFactory setTableName(String tableName) {
+ this.tableName = tableName;
+ return this;
+ }
+
+ public MonkeyFactory setColumnFamilies(Set<String> columnFamilies) {
+ this.columnFamilies = columnFamilies;
+ return this;
+ }
+
+ public MonkeyFactory setUtil(IntegrationTestingUtility util) {
+ this.util = util;
+ return this;
+ }
+
+ public abstract ChaosMonkey build();
+
+
+ public static final String CALM = "calm";
+ public static final String SLOW_DETERMINISTIC = "slowDeterministic";
+ public static final String UNBALANCE = "unbalance";
+
+ public static Map<String, MonkeyFactory> FACTORIES = ImmutableMap.<String,MonkeyFactory>builder()
+ .put(CALM, new CalmMonkeyFactory())
+ .put(SLOW_DETERMINISTIC, new SlowDeterministicMonkeyFactory())
+ .put(UNBALANCE, new UnbalanceMonkeyFactory())
+ .build();
+
+ public static MonkeyFactory getFactory(String factoryName) {
+ MonkeyFactory fact = FACTORIES.get(factoryName);
+ if (fact == null) {
+ fact = FACTORIES.get(CALM);
+ }
+ return fact;
+ }
+}
Added: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/SlowDeterministicMonkeyFactory.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/SlowDeterministicMonkeyFactory.java?rev=1514163&view=auto
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/SlowDeterministicMonkeyFactory.java (added)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/SlowDeterministicMonkeyFactory.java Thu Aug 15 05:44:56 2013
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.chaos.factories;
+
+import org.apache.hadoop.hbase.chaos.actions.Action;
+import org.apache.hadoop.hbase.chaos.actions.AddColumnAction;
+import org.apache.hadoop.hbase.chaos.actions.BatchRestartRsAction;
+import org.apache.hadoop.hbase.chaos.actions.ChangeEncodingAction;
+import org.apache.hadoop.hbase.chaos.actions.ChangeVersionsAction;
+import org.apache.hadoop.hbase.chaos.actions.CompactRandomRegionOfTableAction;
+import org.apache.hadoop.hbase.chaos.actions.CompactTableAction;
+import org.apache.hadoop.hbase.chaos.actions.FlushRandomRegionOfTableAction;
+import org.apache.hadoop.hbase.chaos.actions.FlushTableAction;
+import org.apache.hadoop.hbase.chaos.actions.MergeRandomAdjacentRegionsOfTableAction;
+import org.apache.hadoop.hbase.chaos.actions.MoveRandomRegionOfTableAction;
+import org.apache.hadoop.hbase.chaos.actions.MoveRegionsOfTableAction;
+import org.apache.hadoop.hbase.chaos.actions.RemoveColumnAction;
+import org.apache.hadoop.hbase.chaos.actions.RestartActiveMasterAction;
+import org.apache.hadoop.hbase.chaos.actions.RestartRandomRsAction;
+import org.apache.hadoop.hbase.chaos.actions.RestartRsHoldingMetaAction;
+import org.apache.hadoop.hbase.chaos.actions.RollingBatchRestartRsAction;
+import org.apache.hadoop.hbase.chaos.actions.SnapshotTableAction;
+import org.apache.hadoop.hbase.chaos.actions.SplitRandomRegionOfTableAction;
+import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey;
+import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey;
+import org.apache.hadoop.hbase.chaos.policies.CompositeSequentialPolicy;
+import org.apache.hadoop.hbase.chaos.policies.DoActionsOncePolicy;
+import org.apache.hadoop.hbase.chaos.policies.PeriodicRandomActionPolicy;
+
+public class SlowDeterministicMonkeyFactory extends MonkeyFactory {
+ @Override
+ public ChaosMonkey build() {
+
+ // Actions such as compact/flush a table/region,
+ // move one region around. They are not so destructive,
+ // can be executed more frequently.
+ Action[] actions1 = new Action[] {
+ new CompactTableAction(tableName, 0.5f),
+ new CompactRandomRegionOfTableAction(tableName, 0.6f),
+ new FlushTableAction(tableName),
+ new FlushRandomRegionOfTableAction(tableName),
+ new MoveRandomRegionOfTableAction(tableName)
+ };
+
+ // Actions such as split/merge/snapshot.
+ // They should not cause data loss, or unreliability
+ // such as region stuck in transition.
+ Action[] actions2 = new Action[] {
+ new SplitRandomRegionOfTableAction(tableName),
+ new MergeRandomAdjacentRegionsOfTableAction(tableName),
+ new SnapshotTableAction(tableName),
+ new AddColumnAction(tableName),
+ new RemoveColumnAction(tableName, columnFamilies),
+ new ChangeEncodingAction(tableName),
+ new ChangeVersionsAction(tableName)
+ };
+
+ // Destructive actions to mess things around.
+ Action[] actions3 = new Action[] {
+ new MoveRegionsOfTableAction(tableName),
+ new RestartRandomRsAction(60000),
+ new BatchRestartRsAction(5000, 0.5f),
+ new RestartActiveMasterAction(5000),
+ new RollingBatchRestartRsAction(5000, 1.0f),
+ new RestartRsHoldingMetaAction(35000)
+ };
+
+ return new PolicyBasedChaosMonkey(util,
+ new PeriodicRandomActionPolicy(60 * 1000, actions1),
+ new PeriodicRandomActionPolicy(90 * 1000, actions2),
+ new CompositeSequentialPolicy(
+ new DoActionsOncePolicy(150 * 1000, actions3),
+ new PeriodicRandomActionPolicy(150 * 1000, actions3)));
+ }
+}
Added: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/UnbalanceMonkeyFactory.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/UnbalanceMonkeyFactory.java?rev=1514163&view=auto
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/UnbalanceMonkeyFactory.java (added)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/factories/UnbalanceMonkeyFactory.java Thu Aug 15 05:44:56 2013
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.chaos.factories;
+
+import org.apache.hadoop.hbase.chaos.actions.UnbalanceKillAndRebalanceAction;
+import org.apache.hadoop.hbase.chaos.monkies.ChaosMonkey;
+import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey;
+import org.apache.hadoop.hbase.chaos.policies.PeriodicRandomActionPolicy;
+import org.apache.hadoop.hbase.chaos.policies.Policy;
+
+public class UnbalanceMonkeyFactory extends MonkeyFactory {
+ /** How often to introduce the chaos. If too frequent, sequence of kills on minicluster
+ * can cause test to fail when Put runs out of retries. */
+ private static final long CHAOS_EVERY_MS = 65 * 1000;
+
+ @Override
+ public ChaosMonkey build() {
+ Policy chaosPolicy = new PeriodicRandomActionPolicy(
+ CHAOS_EVERY_MS,
+ new UnbalanceKillAndRebalanceAction()
+ );
+
+ return new PolicyBasedChaosMonkey(util, chaosPolicy);
+ }
+}
Added: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/CalmChaosMonkey.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/CalmChaosMonkey.java?rev=1514163&view=auto
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/CalmChaosMonkey.java (added)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/CalmChaosMonkey.java Thu Aug 15 05:44:56 2013
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.chaos.monkies;
+
+/**
+ * Chaos Monkey that does nothing.
+ */
+public class CalmChaosMonkey extends ChaosMonkey {
+ @Override
+ public void start() throws Exception {
+
+ }
+
+ @Override
+ public void stop(String why) {
+
+ }
+
+ @Override
+ public boolean isStopped() {
+ return false;
+ }
+
+ @Override
+ public void waitForStop() throws InterruptedException {
+
+ }
+}
Added: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/ChaosMonkey.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/ChaosMonkey.java?rev=1514163&view=auto
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/ChaosMonkey.java (added)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/ChaosMonkey.java Thu Aug 15 05:44:56 2013
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.chaos.monkies;
+
+import org.apache.hadoop.hbase.Stoppable;
+
+public abstract class ChaosMonkey implements Stoppable {
+ public abstract void start() throws Exception;
+
+ @Override
+ public abstract void stop(String why);
+
+ @Override
+ public abstract boolean isStopped();
+
+ public abstract void waitForStop() throws InterruptedException;
+}
Added: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java?rev=1514163&view=auto
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java (added)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/monkies/PolicyBasedChaosMonkey.java Thu Aug 15 05:44:56 2013
@@ -0,0 +1,162 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.chaos.monkies;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+import org.apache.commons.lang.math.RandomUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.IntegrationTestingUtility;
+import org.apache.hadoop.hbase.chaos.policies.Policy;
+import org.apache.hadoop.hbase.util.Pair;
+
+/**
+ * A utility to injects faults in a running cluster.
+ * <p>
+ * ChaosMonkey defines Action's and Policy's. Actions are sequences of events, like
+ * - Select a random server to kill
+ * - Sleep for 5 sec
+ * - Start the server on the same host
+ * Actions can also be complex events, like rolling restart of all of the servers.
+ * <p>
+ * Policies on the other hand are responsible for executing the actions based on a strategy.
+ * The default policy is to execute a random action every minute based on predefined action
+ * weights. ChaosMonkey executes predefined named policies until it is stopped. More than one
+ * policy can be active at any time.
+ * <p>
+ * Chaos monkey can be run from the command line, or can be invoked from integration tests.
+ * See {@link org.apache.hadoop.hbase.IntegrationTestIngest} or other integration tests that use
+ * chaos monkey for code examples.
+ * <p>
+ * ChaosMonkey class is indeed inspired by the Netflix's same-named tool:
+ * http://techblog.netflix.com/2012/07/chaos-monkey-released-into-wild.html
+ */
+public class PolicyBasedChaosMonkey extends ChaosMonkey {
+
+ private static final Log LOG = LogFactory.getLog(PolicyBasedChaosMonkey.class);
+ private static final long ONE_SEC = 1000;
+ private static final long FIVE_SEC = 5 * ONE_SEC;
+ private static final long ONE_MIN = 60 * ONE_SEC;
+
+ public static final long TIMEOUT = ONE_MIN;
+
+ final IntegrationTestingUtility util;
+
+ /**
+ * Construct a new ChaosMonkey
+ * @param util the HBaseIntegrationTestingUtility already configured
+ * @param policies custom policies to use
+ */
+ public PolicyBasedChaosMonkey(IntegrationTestingUtility util, Policy... policies) {
+ this.util = util;
+ this.policies = policies;
+ }
+
+ public PolicyBasedChaosMonkey(IntegrationTestingUtility util, Collection<Policy> policies) {
+ this.util = util;
+ this.policies = policies.toArray(new Policy[policies.size()]);
+ }
+
+
+ /** Selects a random item from the given items */
+ public static <T> T selectRandomItem(T[] items) {
+ return items[RandomUtils.nextInt(items.length)];
+ }
+
+ /** Selects a random item from the given items with weights*/
+ public static <T> T selectWeightedRandomItem(List<Pair<T, Integer>> items) {
+ int totalWeight = 0;
+ for (Pair<T, Integer> pair : items) {
+ totalWeight += pair.getSecond();
+ }
+
+ int cutoff = RandomUtils.nextInt(totalWeight);
+ int cummulative = 0;
+ T item = null;
+
+ //warn: O(n)
+ for (int i=0; i<items.size(); i++) {
+ int curWeight = items.get(i).getSecond();
+ if ( cutoff < cummulative + curWeight) {
+ item = items.get(i).getFirst();
+ break;
+ }
+ cummulative += curWeight;
+ }
+
+ return item;
+ }
+
+ /** Selects and returns ceil(ratio * items.length) random items from the given array */
+ public static <T> List<T> selectRandomItems(T[] items, float ratio) {
+ int remaining = (int)Math.ceil(items.length * ratio);
+
+ List<T> selectedItems = new ArrayList<T>(remaining);
+
+ for (int i=0; i<items.length && remaining > 0; i++) {
+ if (RandomUtils.nextFloat() < ((float)remaining/(items.length-i))) {
+ selectedItems.add(items[i]);
+ remaining--;
+ }
+ }
+
+ return selectedItems;
+ }
+
+ private Policy[] policies;
+ private Thread[] monkeyThreads;
+
+ @Override
+ public void start() throws Exception {
+ monkeyThreads = new Thread[policies.length];
+
+ for (int i=0; i<policies.length; i++) {
+ policies[i].init(new Policy.PolicyContext(this.util));
+ Thread monkeyThread = new Thread(policies[i]);
+ monkeyThread.start();
+ monkeyThreads[i] = monkeyThread;
+ }
+ }
+
+ @Override
+ public void stop(String why) {
+ for (Policy policy : policies) {
+ policy.stop(why);
+ }
+ }
+
+ @Override
+ public boolean isStopped() {
+ return policies[0].isStopped();
+ }
+
+ /**
+ * Wait for ChaosMonkey to stop.
+ * @throws InterruptedException
+ */
+ @Override
+ public void waitForStop() throws InterruptedException {
+ for (Thread monkeyThread : monkeyThreads) {
+ monkeyThread.join();
+ }
+ }
+}
Added: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/CompositeSequentialPolicy.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/CompositeSequentialPolicy.java?rev=1514163&view=auto
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/CompositeSequentialPolicy.java (added)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/CompositeSequentialPolicy.java Thu Aug 15 05:44:56 2013
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.chaos.policies;
+
+import java.util.Arrays;
+import java.util.List;
+
+/** A policy that runs multiple other policies one after the other */
+public class CompositeSequentialPolicy extends Policy {
+ private List<Policy> policies;
+ public CompositeSequentialPolicy(Policy... policies) {
+ this.policies = Arrays.asList(policies);
+ }
+
+ @Override
+ public void stop(String why) {
+ super.stop(why);
+ for (Policy p : policies) {
+ p.stop(why);
+ }
+ }
+
+ @Override
+ public void run() {
+ for (Policy p : policies) {
+ p.run();
+ }
+ }
+
+ @Override
+ public void init(PolicyContext context) throws Exception {
+ super.init(context);
+ for (Policy p : policies) {
+ p.init(context);
+ }
+ }
+}
Added: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/DoActionsOncePolicy.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/DoActionsOncePolicy.java?rev=1514163&view=auto
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/DoActionsOncePolicy.java (added)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/DoActionsOncePolicy.java Thu Aug 15 05:44:56 2013
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.chaos.policies;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.hbase.chaos.actions.Action;
+import org.apache.hadoop.util.StringUtils;
+
+/** A policy which performs a sequence of actions deterministically. */
+public class DoActionsOncePolicy extends PeriodicPolicy {
+ private List<Action> actions;
+
+ public DoActionsOncePolicy(long periodMs, List<Action> actions) {
+ super(periodMs);
+ this.actions = new ArrayList<Action>(actions);
+ }
+
+ public DoActionsOncePolicy(long periodMs, Action... actions) {
+ this(periodMs, Arrays.asList(actions));
+ }
+
+ @Override
+ protected void runOneIteration() {
+ if (actions.isEmpty()) {
+ this.stop("done");
+ return;
+ }
+ Action action = actions.remove(0);
+
+ try {
+ action.perform();
+ } catch (Exception ex) {
+ LOG.warn("Exception occured during performing action: "
+ + StringUtils.stringifyException(ex));
+ }
+ }
+
+ @Override
+ public void init(PolicyContext context) throws Exception {
+ super.init(context);
+ for (Action action : actions) {
+ action.init(this.context);
+ }
+ }
+}
Added: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/PeriodicPolicy.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/PeriodicPolicy.java?rev=1514163&view=auto
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/PeriodicPolicy.java (added)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/PeriodicPolicy.java Thu Aug 15 05:44:56 2013
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.chaos.policies;
+
+import org.apache.commons.lang.math.RandomUtils;
+import org.apache.hadoop.hbase.util.Threads;
+
+/** A policy which does stuff every time interval. */
+public abstract class PeriodicPolicy extends Policy {
+ private long periodMs;
+
+ public PeriodicPolicy(long periodMs) {
+ this.periodMs = periodMs;
+ }
+
+ @Override
+ public void run() {
+ // Add some jitter.
+ int jitter = RandomUtils.nextInt((int) periodMs);
+ LOG.info("Sleeping for " + jitter + " to add jitter");
+ Threads.sleep(jitter);
+
+ while (!isStopped()) {
+ long start = System.currentTimeMillis();
+ runOneIteration();
+
+ if (isStopped()) return;
+ long sleepTime = periodMs - (System.currentTimeMillis() - start);
+ if (sleepTime > 0) {
+ LOG.info("Sleeping for: " + sleepTime);
+ Threads.sleep(sleepTime);
+ }
+ }
+ }
+
+ protected abstract void runOneIteration();
+
+ @Override
+ public void init(PolicyContext context) throws Exception {
+ super.init(context);
+ LOG.info("Using ChaosMonkey Policy: " + this.getClass() + ", period: " + periodMs);
+ }
+}
Added: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/PeriodicRandomActionPolicy.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/PeriodicRandomActionPolicy.java?rev=1514163&view=auto
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/PeriodicRandomActionPolicy.java (added)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/PeriodicRandomActionPolicy.java Thu Aug 15 05:44:56 2013
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.chaos.policies;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.hbase.chaos.actions.Action;
+import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey;
+import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.util.StringUtils;
+
+/**
+ * A policy, which picks a random action according to the given weights,
+ * and performs it every configurable period.
+ */
+public class PeriodicRandomActionPolicy extends PeriodicPolicy {
+ private List<Pair<Action, Integer>> actions;
+
+ public PeriodicRandomActionPolicy(long periodMs, List<Pair<Action, Integer>> actions) {
+ super(periodMs);
+ this.actions = actions;
+ }
+
+ public PeriodicRandomActionPolicy(long periodMs, Pair<Action, Integer>... actions) {
+ // We don't expect it to be modified.
+ this(periodMs, Arrays.asList(actions));
+ }
+
+ public PeriodicRandomActionPolicy(long periodMs, Action... actions) {
+ super(periodMs);
+ this.actions = new ArrayList<Pair<Action, Integer>>(actions.length);
+ for (Action action : actions) {
+ this.actions.add(new Pair<Action, Integer>(action, 1));
+ }
+ }
+
+ @Override
+ protected void runOneIteration() {
+ Action action = PolicyBasedChaosMonkey.selectWeightedRandomItem(actions);
+ try {
+ action.perform();
+ } catch (Exception ex) {
+ LOG.warn("Exception occured during performing action: "
+ + StringUtils.stringifyException(ex));
+ }
+ }
+
+ @Override
+ public void init(PolicyContext context) throws Exception {
+ super.init(context);
+ for (Pair<Action, Integer> action : actions) {
+ action.getFirst().init(this.context);
+ }
+ }
+}
Added: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java?rev=1514163&view=auto
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java (added)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/chaos/policies/Policy.java Thu Aug 15 05:44:56 2013
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.chaos.policies;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.IntegrationTestingUtility;
+import org.apache.hadoop.hbase.chaos.actions.Action;
+import org.apache.hadoop.hbase.util.StoppableImplementation;
+
+/**
+ * A policy to introduce chaos to the cluster
+ */
+public abstract class Policy extends StoppableImplementation implements Runnable {
+
+ protected static Log LOG = LogFactory.getLog(Policy.class);
+
+ protected PolicyContext context;
+
+ public void init(PolicyContext context) throws Exception {
+ this.context = context;
+ }
+
+ /**
+ * A context for a Policy
+ */
+ public static class PolicyContext extends Action.ActionContext {
+ public PolicyContext(IntegrationTestingUtility util) {
+ super(util);
+ }
+ }
+}
Modified: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java?rev=1514163&r1=1514162&r2=1514163&view=diff
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java (original)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestBulkLoad.java Thu Aug 15 05:44:56 2013
@@ -18,11 +18,20 @@
*/
package org.apache.hadoop.hbase.mapreduce;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+
import org.apache.commons.lang.RandomStringUtils;
-import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.IntegrationTestBase;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.IntegrationTests;
import org.apache.hadoop.hbase.KeyValue;
@@ -31,7 +40,6 @@ import org.apache.hadoop.hbase.client.Re
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.EnvironmentEdge;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.RegionSplitter;
import org.apache.hadoop.io.LongWritable;
@@ -51,22 +59,12 @@ import org.apache.hadoop.mapreduce.Reduc
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
+import org.junit.After;
+import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.UUID;
-
import static org.junit.Assert.assertEquals;
/**
@@ -101,7 +99,7 @@ import static org.junit.Assert.assertEqu
*
*/
@Category(IntegrationTests.class)
-public class IntegrationTestBulkLoad implements Configurable, Tool {
+public class IntegrationTestBulkLoad extends IntegrationTestBase {
private static byte[] CHAIN_FAM = Bytes.toBytes("L");
private static byte[] SORT_FAM = Bytes.toBytes("S");
@@ -120,11 +118,6 @@ public class IntegrationTestBulkLoad imp
private static String TABLE_NAME_KEY = "hbase.IntegrationTestBulkLoad.tableName";
private static String TABLE_NAME = "IntegrationTestBulkLoad";
- private static IntegrationTestingUtility util;
-
- private String tableName;
- private byte[] tableNameBytes;
-
@Test
public void testBulkLoad() throws Exception {
setupTable();
@@ -143,14 +136,12 @@ public class IntegrationTestBulkLoad imp
}
private void setupTable() throws IOException {
- tableName = getConf().get(TABLE_NAME_KEY, TABLE_NAME);
- tableNameBytes = Bytes.toBytes(tableName);
- if (util.getHBaseAdmin().tableExists(tableNameBytes)) {
- util.deleteTable(tableNameBytes);
+ if (util.getHBaseAdmin().tableExists(getTablename())) {
+ util.deleteTable(getTablename());
}
util.createTable(
- tableNameBytes,
+ Bytes.toBytes(getTablename()),
new byte[][]{CHAIN_FAM, SORT_FAM, DATA_FAM},
getSplits(16)
);
@@ -160,8 +151,8 @@ public class IntegrationTestBulkLoad imp
String jobName = IntegrationTestBulkLoad.class.getSimpleName() + " - " +
EnvironmentEdgeManager.currentTimeMillis();
Configuration conf = new Configuration(util.getConfiguration());
- Path p = util.getDataTestDirOnTestFS(tableName + "-" + iteration);
- HTable table = new HTable(conf, tableName);
+ Path p = util.getDataTestDirOnTestFS(getTablename() + "-" + iteration);
+ HTable table = new HTable(conf, getTablename());
conf.setBoolean("mapreduce.map.speculative", false);
conf.setBoolean("mapreduce.reduce.speculative", false);
@@ -532,7 +523,7 @@ public class IntegrationTestBulkLoad imp
*/
private void runCheck() throws IOException, ClassNotFoundException, InterruptedException {
Configuration conf = getConf();
- String jobName = tableName + "_check" + EnvironmentEdgeManager.currentTimeMillis();
+ String jobName = getTablename() + "_check" + EnvironmentEdgeManager.currentTimeMillis();
Path p = util.getDataTestDirOnTestFS(jobName);
Job job = new Job(conf);
@@ -551,7 +542,7 @@ public class IntegrationTestBulkLoad imp
s.setBatch(100);
TableMapReduceUtil.initTableMapperJob(
- Bytes.toBytes(tableName),
+ Bytes.toBytes(getTablename()),
new Scan(),
LinkedListCheckingMapper.class,
LinkKey.class,
@@ -571,12 +562,10 @@ public class IntegrationTestBulkLoad imp
util.getTestFileSystem().delete(p, true);
}
- @BeforeClass
- public static void provisionCluster() throws Exception {
- if (null == util) {
- util = new IntegrationTestingUtility();
- }
-
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ util = getTestingUtil(getConf());
util.initializeCluster(1);
// Scale this up on a real cluster
@@ -590,30 +579,27 @@ public class IntegrationTestBulkLoad imp
}
}
- @AfterClass
- public static void releaseCluster() throws Exception {
+ @After
+ @Override
+ public void cleanUp() throws Exception {
util.restoreCluster();
util = null;
}
@Override
- public int run(String[] args) throws Exception {
- provisionCluster();
- testBulkLoad();
- releaseCluster();
+ public int runTestFromCommandLine() throws Exception {
+ runCheck();
return 0;
}
- public void setConf(Configuration conf) {
- if (util != null) {
- throw new IllegalArgumentException("setConf not supported after the cluster has been started.");
- }
- util = new IntegrationTestingUtility(conf);
+ @Override
+ public String getTablename() {
+ return getConf().get(TABLE_NAME_KEY, TABLE_NAME);
}
@Override
- public Configuration getConf() {
- return util.getConfiguration();
+ protected Set<String> getColumnFamilies() {
+ return null;
}
public static void main(String[] args) throws Exception {
Modified: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java?rev=1514163&r1=1514162&r2=1514163&view=diff
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java (original)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/mapreduce/IntegrationTestImportTsv.java Thu Aug 15 05:44:56 2013
@@ -17,11 +17,6 @@
*/
package org.apache.hadoop.hbase.mapreduce;
-import static java.lang.String.format;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator;
@@ -67,6 +62,11 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
+import static java.lang.String.format;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
/**
* Validate ImportTsv + LoadIncrementalHFiles on a distributed cluster.
*/
Modified: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java?rev=1514163&r1=1514162&r2=1514163&view=diff
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java (original)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java Thu Aug 15 05:44:56 2013
@@ -18,6 +18,14 @@
package org.apache.hadoop.hbase.mttr;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+
import com.google.common.base.Objects;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.logging.Log;
@@ -29,6 +37,11 @@ import org.apache.hadoop.hbase.HTableDes
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.IntegrationTests;
import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.chaos.actions.Action;
+import org.apache.hadoop.hbase.chaos.actions.MoveRegionsOfTableAction;
+import org.apache.hadoop.hbase.chaos.actions.RestartActiveMasterAction;
+import org.apache.hadoop.hbase.chaos.actions.RestartRsHoldingMetaAction;
+import org.apache.hadoop.hbase.chaos.actions.RestartRsHoldingTableAction;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
@@ -37,7 +50,6 @@ import org.apache.hadoop.hbase.client.Re
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.ChaosMonkey;
import org.apache.hadoop.hbase.util.LoadTestTool;
import org.cloudera.htrace.Sampler;
import org.cloudera.htrace.Span;
@@ -49,14 +61,6 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
-
import static junit.framework.Assert.assertEquals;
/**
@@ -126,10 +130,10 @@ public class IntegrationTestMTTR {
/**
* All of the chaos monkey actions used.
*/
- private static ChaosMonkey.Action restartRSAction;
- private static ChaosMonkey.Action restartMetaAction;
- private static ChaosMonkey.Action moveRegionAction;
- private static ChaosMonkey.Action restartMasterAction;
+ private static Action restartRSAction;
+ private static Action restartMetaAction;
+ private static Action moveRegionAction;
+ private static Action restartMasterAction;
/**
* The load test tool used to create load and make sure that HLogs aren't empty.
@@ -165,19 +169,19 @@ public class IntegrationTestMTTR {
private static void setupActions() throws IOException {
// Set up the action that will restart a region server holding a region from our table
// because this table should only have one region we should be good.
- restartRSAction = new ChaosMonkey.RestartRsHoldingTable(SLEEP_TIME, tableName.getNameAsString());
+ restartRSAction = new RestartRsHoldingTableAction(SLEEP_TIME, tableName.getNameAsString());
// Set up the action that will kill the region holding meta.
- restartMetaAction = new ChaosMonkey.RestartRsHoldingMeta(SLEEP_TIME);
+ restartMetaAction = new RestartRsHoldingMetaAction(SLEEP_TIME);
// Set up the action that will move the regions of our table.
- moveRegionAction = new ChaosMonkey.MoveRegionsOfTable(SLEEP_TIME, tableName.getNameAsString());
+ moveRegionAction = new MoveRegionsOfTableAction(SLEEP_TIME, tableName.getNameAsString());
// Kill the master
- restartMasterAction = new ChaosMonkey.RestartActiveMaster(1000);
+ restartMasterAction = new RestartActiveMasterAction(1000);
// Give the action the access to the cluster.
- ChaosMonkey.ActionContext actionContext = new ChaosMonkey.ActionContext(util);
+ Action.ActionContext actionContext = new Action.ActionContext(util);
restartRSAction.init(actionContext);
restartMetaAction.init(actionContext);
moveRegionAction.init(actionContext);
@@ -237,7 +241,7 @@ public class IntegrationTestMTTR {
@Test
public void testRestartRsHoldingTable() throws Exception {
- run(new ActionCallable(restartRSAction), "RestartRsHoldingTable");
+ run(new ActionCallable(restartRSAction), "RestartRsHoldingTableAction");
}
@Test
@@ -478,9 +482,9 @@ public class IntegrationTestMTTR {
public class ActionCallable implements Callable<Boolean> {
- private final ChaosMonkey.Action action;
+ private final Action action;
- public ActionCallable(ChaosMonkey.Action action) {
+ public ActionCallable(Action action) {
this.action = action;
}
Modified: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java?rev=1514163&r1=1514162&r2=1514163&view=diff
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java (original)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java Thu Aug 15 05:44:56 2013
@@ -25,6 +25,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
+import java.util.Set;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
@@ -37,12 +38,13 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.IntegrationTestBase;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.IntegrationTests;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
@@ -147,7 +149,7 @@ import org.junit.experimental.categories
* This class can be run as a unit test, as an integration test, or from the command line
*/
@Category(IntegrationTests.class)
-public class IntegrationTestBigLinkedList extends Configured implements Tool {
+public class IntegrationTestBigLinkedList extends IntegrationTestBase {
private static final byte[] NO_KEY = new byte[1];
protected static String TABLE_NAME_KEY = "IntegrationTestBigLinkedList.table";
@@ -182,9 +184,11 @@ public class IntegrationTestBigLinkedLis
private static final int WIDTH_DEFAULT = 1000000;
private static final int WRAP_DEFAULT = 25;
-
private static final int ROWKEY_LENGTH = 16;
+ private String toRun;
+ private String[] otherArgs;
+
static class CINode {
byte[] key;
byte[] prev;
@@ -970,38 +974,29 @@ public class IntegrationTestBigLinkedLis
protected IntegrationTestingUtility util;
@Before
+ @Override
public void setUp() throws Exception {
- util = getTestingUtil();
+ util = getTestingUtil(getConf());
util.initializeCluster(this.NUM_SLAVES_BASE);
this.setConf(util.getConfiguration());
}
@After
- public void tearDown() throws Exception {
+ @Override
+ public void cleanUp() throws Exception {
util.restoreCluster();
}
@Test
public void testContinuousIngest() throws IOException, Exception {
//Loop <num iterations> <num mappers> <num nodes per mapper> <output dir> <num reducers>
- int ret = ToolRunner.run(getTestingUtil().getConfiguration(), new Loop(),
+ int ret = ToolRunner.run(getTestingUtil(getConf()).getConfiguration(), new Loop(),
new String[] {"1", "1", "2000000",
util.getDataTestDirOnTestFS("IntegrationTestBigLinkedList").toString(), "1"});
org.junit.Assert.assertEquals(0, ret);
}
- protected IntegrationTestingUtility getTestingUtil() {
- if (this.util == null) {
- if (getConf() == null) {
- this.util = new IntegrationTestingUtility();
- } else {
- this.util = new IntegrationTestingUtility(getConf());
- }
- }
- return util;
- }
-
- private int printUsage() {
+ private void usage() {
System.err.println("Usage: " + this.getClass().getSimpleName() + " COMMAND [COMMAND options]");
System.err.println(" where COMMAND is one of:");
System.err.println("");
@@ -1021,39 +1016,55 @@ public class IntegrationTestBigLinkedLis
System.err.println(" Loop A program to Loop through Generator and");
System.err.println(" Verify steps");
System.err.println("\t ");
- return 1;
+ System.err.flush();
}
@Override
- public int run(String[] args) throws Exception {
+ protected void processOptions(CommandLine cmd) {
+ super.processOptions(cmd);
+ String[] args = cmd.getArgs();
//get the class, run with the conf
if (args.length < 1) {
- return printUsage();
+ printUsage();
+ throw new RuntimeException("Incorrect Number of args.");
}
+ toRun = args[0];
+ otherArgs = Arrays.copyOfRange(args, 1, args.length);
+ }
+
+ @Override
+ public int runTestFromCommandLine() throws Exception {
+
Tool tool = null;
- if (args[0].equals("Generator")) {
+ if (toRun.equals("Generator")) {
tool = new Generator();
- } else if (args[0].equals("Verify")) {
+ } else if (toRun.equals("Verify")) {
tool = new Verify();
- } else if (args[0].equals("Loop")) {
+ } else if (toRun.equals("Loop")) {
tool = new Loop();
- } else if (args[0].equals("Walker")) {
+ } else if (toRun.equals("Walker")) {
tool = new Walker();
- } else if (args[0].equals("Print")) {
+ } else if (toRun.equals("Print")) {
tool = new Print();
- } else if (args[0].equals("Delete")) {
+ } else if (toRun.equals("Delete")) {
tool = new Delete();
} else {
- return printUsage();
+ usage();
+ throw new RuntimeException("Unknown arg");
}
- args = Arrays.copyOfRange(args, 1, args.length);
- return ToolRunner.run(getConf(), tool, args);
+ return ToolRunner.run(getConf(), tool, otherArgs);
}
- public static void main(String[] args) throws Exception {
- int ret = ToolRunner.run(HBaseConfiguration.create(), new IntegrationTestBigLinkedList(), args);
- System.exit(ret);
+ @Override
+ public String getTablename() {
+ Configuration c = getConf();
+ return c.get(TABLE_NAME_KEY, DEFAULT_TABLE_NAME);
+ }
+
+ @Override
+ protected Set<String> getColumnFamilies() {
+ return null;
}
private static void setJobConf(Job job, int numMappers, long numNodes,
@@ -1073,4 +1084,11 @@ public class IntegrationTestBigLinkedLis
job.getConfiguration().setBoolean(ScannerCallable.LOG_SCANNER_ACTIVITY, true);
job.getConfiguration().setInt(TableRecordReaderImpl.LOG_PER_ROW_COUNT, 100000);
}
+
+ public static void main(String[] args) throws Exception {
+ Configuration conf = HBaseConfiguration.create();
+ IntegrationTestingUtility.setUseDistributedCluster(conf);
+ int ret = ToolRunner.run(conf, new IntegrationTestBigLinkedList(), args);
+ System.exit(ret);
+ }
}
Modified: hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java?rev=1514163&r1=1514162&r2=1514163&view=diff
==============================================================================
--- hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java (original)
+++ hbase/trunk/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestLoadAndVerify.java Thu Aug 15 05:44:56 2013
@@ -17,27 +17,28 @@
*/
package org.apache.hadoop.hbase.test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
import java.io.IOException;
import java.util.Random;
+import java.util.Set;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+import org.apache.commons.cli.CommandLine;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.IntegrationTestBase;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.IntegrationTests;
import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
@@ -56,14 +57,14 @@ import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
-import com.google.common.collect.Lists;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
/**
* A large test which loads a lot of data that has internal references, and
@@ -83,7 +84,7 @@ import com.google.common.collect.Lists;
* Originally taken from Apache Bigtop.
*/
@Category(IntegrationTests.class)
-public class IntegrationTestLoadAndVerify extends Configured implements Tool {
+public class IntegrationTestLoadAndVerify extends IntegrationTestBase {
private static final String TEST_NAME = "IntegrationTestLoadAndVerify";
private static final byte[] TEST_FAMILY = Bytes.toBytes("f1");
private static final byte[] TEST_QUALIFIER = Bytes.toBytes("q1");
@@ -107,6 +108,8 @@ public class IntegrationTestLoadAndVerif
private IntegrationTestingUtility util;
+ private String toRun = null;
+
private enum Counters {
ROWS_WRITTEN,
REFERENCES_WRITTEN,
@@ -115,7 +118,7 @@ public class IntegrationTestLoadAndVerif
@Before
public void setUp() throws Exception {
- util = getTestingUtil();
+ util = getTestingUtil(getConf());
util.initializeCluster(3);
this.setConf(util.getConfiguration());
getConf().setLong(NUM_TO_WRITE_KEY, NUM_TO_WRITE_DEFAULT / 100);
@@ -123,11 +126,6 @@ public class IntegrationTestLoadAndVerif
getConf().setInt(NUM_REDUCE_TASKS_KEY, NUM_REDUCE_TASKS_DEFAULT / 10);
}
- @After
- public void tearDown() throws Exception {
- util.restoreCluster();
- }
-
/**
* Converts a "long" value between endian systems.
* Borrowed from Apache Commons IO
@@ -147,6 +145,12 @@ public class IntegrationTestLoadAndVerif
( ( ( value >> 56 ) & 0xff ) << 0 );
}
+ @Override
+ @After
+ public void cleanUp() throws Exception {
+ util.restoreCluster();
+ }
+
public static class LoadMapper
extends Mapper<NullWritable, NullWritable, NullWritable, NullWritable>
{
@@ -353,7 +357,7 @@ public class IntegrationTestLoadAndVerif
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(TEST_NAME));
htd.addFamily(new HColumnDescriptor(TEST_FAMILY));
- HBaseAdmin admin = getTestingUtil().getHBaseAdmin();
+ HBaseAdmin admin = getTestingUtil(getConf()).getHBaseAdmin();
int numPreCreate = 40;
admin.createTable(htd, Bytes.toBytes(0L), Bytes.toBytes(-1L), numPreCreate);
@@ -398,33 +402,41 @@ public class IntegrationTestLoadAndVerif
System.err.println(" -Dverify.scannercaching=<n> Number hbase scanner caching rows to read (default 50)");
}
- public int run(String argv[]) throws Exception {
- if (argv.length < 1 || argv.length > 1) {
+
+ @Override
+ protected void processOptions(CommandLine cmd) {
+ super.processOptions(cmd);
+
+ String[] args = cmd.getArgs();
+ if (args == null || args.length < 1 || args.length > 1) {
usage();
- return 1;
+ throw new RuntimeException("Incorrect Number of args.");
}
+ toRun = args[0];
+ }
+ public int runTestFromCommandLine() throws Exception {
IntegrationTestingUtility.setUseDistributedCluster(getConf());
boolean doLoad = false;
boolean doVerify = false;
boolean doDelete = getConf().getBoolean("loadmapper.deleteAfter",true);
int numPresplits = getConf().getInt("loadmapper.numPresplits", 40);
- if (argv[0].equals("load")) {
+ if (toRun.equals("load")) {
doLoad = true;
- } else if (argv[0].equals("verify")) {
+ } else if (toRun.equals("verify")) {
doVerify= true;
- } else if (argv[0].equals("loadAndVerify")) {
+ } else if (toRun.equals("loadAndVerify")) {
doLoad=true;
doVerify= true;
} else {
- System.err.println("Invalid argument " + argv[0]);
+ System.err.println("Invalid argument " + toRun);
usage();
return 1;
}
// create HTableDescriptor for specified table
- String table = getConf().get(TABLE_NAME_KEY, TEST_NAME);
+ String table = getTablename();
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(table));
htd.addFamily(new HColumnDescriptor(TEST_FAMILY));
@@ -442,19 +454,19 @@ public class IntegrationTestLoadAndVerif
return 0;
}
- private IntegrationTestingUtility getTestingUtil() {
- if (this.util == null) {
- if (getConf() == null) {
- this.util = new IntegrationTestingUtility();
- } else {
- this.util = new IntegrationTestingUtility(getConf());
- }
- }
- return util;
+ @Override
+ public String getTablename() {
+ return getConf().get(TABLE_NAME_KEY, TEST_NAME);
+ }
+
+ @Override
+ protected Set<String> getColumnFamilies() {
+ return Sets.newHashSet(Bytes.toString(TEST_FAMILY));
}
public static void main(String argv[]) throws Exception {
Configuration conf = HBaseConfiguration.create();
+ IntegrationTestingUtility.setUseDistributedCluster(conf);
int ret = ToolRunner.run(conf, new IntegrationTestLoadAndVerify(), argv);
System.exit(ret);
}
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java?rev=1514163&r1=1514162&r2=1514163&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java Thu Aug 15 05:44:56 2013
@@ -56,7 +56,7 @@ public class LoadTestTool extends Abstra
protected static final String DEFAULT_TABLE_NAME = "cluster_test";
/** Column family used by the test */
- protected static byte[] COLUMN_FAMILY = Bytes.toBytes("test_cf");
+ public static byte[] COLUMN_FAMILY = Bytes.toBytes("test_cf");
/** Column families used by the test */
protected static final byte[][] COLUMN_FAMILIES = { COLUMN_FAMILY };