You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@storm.apache.org by bo...@apache.org on 2015/01/06 23:44:17 UTC
[07/11] storm git commit: Removing unused imports,
deleting accidentaly added file.
Removing unused imports, deleting accidentaly added file.
Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/88a8658c
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/88a8658c
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/88a8658c
Branch: refs/heads/master
Commit: 88a8658c619b42ac63b3371199f57969aeb5edb0
Parents: e590a72
Author: Parth Brahmbhatt <br...@gmail.com>
Authored: Thu Dec 11 20:25:35 2014 -0800
Committer: Parth Brahmbhatt <br...@gmail.com>
Committed: Fri Dec 12 10:18:22 2014 -0800
----------------------------------------------------------------------
.../apache/storm/hbase/PersistentWordCount.java | 92 --------------------
.../storm/hbase/security/HBaseSecurityUtil.java | 12 +--
.../org/apache/storm/hdfs/bolt/HdfsBolt.java | 10 ---
.../hdfs/common/security/HdfsSecurityUtil.java | 11 +--
.../apache/storm/hdfs/trident/HdfsState.java | 10 ---
.../src/clj/backtype/storm/daemon/nimbus.clj | 4 +-
6 files changed, 6 insertions(+), 133 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/storm/blob/88a8658c/external/storm-hbase/src/main/java/org/apache/storm/hbase/PersistentWordCount.java
----------------------------------------------------------------------
diff --git a/external/storm-hbase/src/main/java/org/apache/storm/hbase/PersistentWordCount.java b/external/storm-hbase/src/main/java/org/apache/storm/hbase/PersistentWordCount.java
deleted file mode 100644
index 94a8ff5..0000000
--- a/external/storm-hbase/src/main/java/org/apache/storm/hbase/PersistentWordCount.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.storm.hbase;
-
-import org.apache.storm.hbase.bolt.HBaseBolt;
-import org.apache.storm.hbase.bolt.mapper.SimpleHBaseMapper;
-import org.apache.storm.hbase.security.HBaseSecurityUtil;
-
-import backtype.storm.Config;
-import backtype.storm.LocalCluster;
-import backtype.storm.StormSubmitter;
-import backtype.storm.topology.TopologyBuilder;
-import backtype.storm.tuple.Fields;
-import org.apache.storm.hbase.topology.WordCounter;
-import org.apache.storm.hbase.topology.WordSpout;
-
-import java.util.HashMap;
-import java.util.Map;
-
-
-public class PersistentWordCount {
- private static final String WORD_SPOUT = "WORD_SPOUT";
- private static final String COUNT_BOLT = "COUNT_BOLT";
- private static final String HBASE_BOLT = "HBASE_BOLT";
-
-
- public static void main(String[] args) throws Exception {
- Config config = new Config();
-
- Map<String, Object> hbConf = new HashMap<String, Object>();
- if(args.length > 0){
- hbConf.put("hbase.rootdir", args[0]);
- }
- config.put("hbase.conf", hbConf);
-
- WordSpout spout = new WordSpout();
- WordCounter bolt = new WordCounter();
-
- SimpleHBaseMapper mapper = new SimpleHBaseMapper()
- .withRowKeyField("word")
- .withColumnFields(new Fields("word"))
- .withCounterFields(new Fields("count"))
- .withColumnFamily("cf");
-
- HBaseBolt hbase = new HBaseBolt("WordCount", mapper)
- .withConfigKey("hbase.conf");
-
-
- // wordSpout ==> countBolt ==> HBaseBolt
- TopologyBuilder builder = new TopologyBuilder();
-
- builder.setSpout(WORD_SPOUT, spout, 1);
- builder.setBolt(COUNT_BOLT, bolt, 1).shuffleGrouping(WORD_SPOUT);
- builder.setBolt(HBASE_BOLT, hbase, 1).fieldsGrouping(COUNT_BOLT, new Fields("word"));
-
-
- if (args.length == 1) {
- LocalCluster cluster = new LocalCluster();
- cluster.submitTopology("test", config, builder.createTopology());
- Thread.sleep(30000);
- cluster.killTopology("test");
- cluster.shutdown();
- System.exit(0);
- } else if (args.length == 2) {
- StormSubmitter.submitTopology(args[1], config, builder.createTopology());
- } else if (args.length == 4) {
- System.out.println("hdfs url: " + args[0] + ", keytab file: " + args[2] +
- ", principal name: " + args[3] + ", toplogy name: " + args[1]);
- hbConf.put(HBaseSecurityUtil.STORM_KEYTAB_FILE_KEY, args[2]);
- hbConf.put(HBaseSecurityUtil.STORM_USER_NAME_KEY, args[3]);
- config.setNumWorkers(3);
- StormSubmitter.submitTopology(args[1], config, builder.createTopology());
- } else {
- System.out.println("Usage: PersistentWordCount <hbase.rootdir> [topology name] [keytab file] [principal name]");
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/88a8658c/external/storm-hbase/src/main/java/org/apache/storm/hbase/security/HBaseSecurityUtil.java
----------------------------------------------------------------------
diff --git a/external/storm-hbase/src/main/java/org/apache/storm/hbase/security/HBaseSecurityUtil.java b/external/storm-hbase/src/main/java/org/apache/storm/hbase/security/HBaseSecurityUtil.java
index 36a4b89..99dfeba 100644
--- a/external/storm-hbase/src/main/java/org/apache/storm/hbase/security/HBaseSecurityUtil.java
+++ b/external/storm-hbase/src/main/java/org/apache/storm/hbase/security/HBaseSecurityUtil.java
@@ -17,26 +17,18 @@
*/
package org.apache.storm.hbase.security;
-import static backtype.storm.Config.TOPOLOGY_AUTO_CREDENTIALS;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.security.UserProvider;
-import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import javax.security.auth.Subject;
import java.io.IOException;
import java.net.InetAddress;
-import java.security.AccessControlContext;
-import java.security.AccessController;
-import java.util.Collection;
import java.util.List;
import java.util.Map;
-import java.util.Set;
+
+import static backtype.storm.Config.TOPOLOGY_AUTO_CREDENTIALS;
/**
* This class provides util methods for storm-hbase connector communicating
http://git-wip-us.apache.org/repos/asf/storm/blob/88a8658c/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/bolt/HdfsBolt.java
----------------------------------------------------------------------
diff --git a/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/bolt/HdfsBolt.java b/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/bolt/HdfsBolt.java
index 7e2369d..006ec25 100644
--- a/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/bolt/HdfsBolt.java
+++ b/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/bolt/HdfsBolt.java
@@ -25,10 +25,6 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream.SyncFlag;
-import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.storm.hdfs.bolt.format.FileNameFormat;
import org.apache.storm.hdfs.bolt.format.RecordFormat;
import org.apache.storm.hdfs.bolt.rotation.FileRotationPolicy;
@@ -37,16 +33,10 @@ import org.apache.storm.hdfs.common.rotation.RotationAction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import javax.security.auth.Subject;
import java.io.IOException;
import java.net.URI;
-import java.security.AccessControlContext;
-import java.security.AccessController;
-import java.security.Principal;
-import java.util.Collection;
import java.util.EnumSet;
import java.util.Map;
-import java.util.Set;
public class HdfsBolt extends AbstractHdfsBolt{
private static final Logger LOG = LoggerFactory.getLogger(HdfsBolt.class);
http://git-wip-us.apache.org/repos/asf/storm/blob/88a8658c/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/common/security/HdfsSecurityUtil.java
----------------------------------------------------------------------
diff --git a/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/common/security/HdfsSecurityUtil.java b/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/common/security/HdfsSecurityUtil.java
index 5847a58..9e390e9 100644
--- a/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/common/security/HdfsSecurityUtil.java
+++ b/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/common/security/HdfsSecurityUtil.java
@@ -17,24 +17,17 @@
*/
package org.apache.storm.hdfs.common.security;
-import static backtype.storm.Config.TOPOLOGY_AUTO_CREDENTIALS;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import javax.security.auth.Subject;
import java.io.IOException;
-import java.security.AccessControlContext;
-import java.security.AccessController;
-import java.util.Collection;
import java.util.List;
import java.util.Map;
-import java.util.Set;
+
+import static backtype.storm.Config.TOPOLOGY_AUTO_CREDENTIALS;
/**
* This class provides util methods for storm-hdfs connector communicating
http://git-wip-us.apache.org/repos/asf/storm/blob/88a8658c/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/trident/HdfsState.java
----------------------------------------------------------------------
diff --git a/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/trident/HdfsState.java b/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/trident/HdfsState.java
index 511d2b2..cde0e56 100644
--- a/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/trident/HdfsState.java
+++ b/external/storm-hdfs/src/main/java/org/apache/storm/hdfs/trident/HdfsState.java
@@ -17,7 +17,6 @@
*/
package org.apache.storm.hdfs.trident;
-import backtype.storm.Config;
import backtype.storm.task.IMetricsContext;
import backtype.storm.topology.FailedException;
import org.apache.hadoop.conf.Configuration;
@@ -27,18 +26,12 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
-import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.storm.hdfs.common.rotation.RotationAction;
-import org.apache.storm.hdfs.common.security.AutoHDFS;
import org.apache.storm.hdfs.common.security.HdfsSecurityUtil;
import org.apache.storm.hdfs.trident.format.FileNameFormat;
import org.apache.storm.hdfs.trident.format.RecordFormat;
import org.apache.storm.hdfs.trident.format.SequenceFormat;
import org.apache.storm.hdfs.trident.rotation.FileRotationPolicy;
-
import org.apache.storm.hdfs.trident.rotation.TimedRotationPolicy;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -46,12 +39,9 @@ import storm.trident.operation.TridentCollector;
import storm.trident.state.State;
import storm.trident.tuple.TridentTuple;
-import javax.security.auth.Subject;
import java.io.IOException;
import java.io.Serializable;
import java.net.URI;
-import java.security.AccessControlContext;
-import java.security.AccessController;
import java.util.*;
public class HdfsState implements State {
http://git-wip-us.apache.org/repos/asf/storm/blob/88a8658c/storm-core/src/clj/backtype/storm/daemon/nimbus.clj
----------------------------------------------------------------------
diff --git a/storm-core/src/clj/backtype/storm/daemon/nimbus.clj b/storm-core/src/clj/backtype/storm/daemon/nimbus.clj
index b8a8815..1fbf7f0 100644
--- a/storm-core/src/clj/backtype/storm/daemon/nimbus.clj
+++ b/storm-core/src/clj/backtype/storm/daemon/nimbus.clj
@@ -1053,8 +1053,8 @@
topology (normalize-topology total-storm-conf topology)
storm-cluster-state (:storm-cluster-state nimbus)]
- (doseq [nimbus-autocred-plugin (:nimbus-autocred-plugins nimbus)]
- (.populateCredentials nimbus-autocred-plugin credentials (Collections/unmodifiableMap storm-conf)))
+ (when credentials (doseq [nimbus-autocred-plugin (:nimbus-autocred-plugins nimbus)]
+ (.populateCredentials nimbus-autocred-plugin credentials (Collections/unmodifiableMap storm-conf))))
(if (and (conf SUPERVISOR-RUN-WORKER-AS-USER) (or (nil? submitter-user) (.isEmpty (.trim submitter-user))))
(throw (AuthorizationException. "Could not determine the user to run this topology as.")))
(system-topology! total-storm-conf topology) ;; this validates the structure of the topology