You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@eagle.apache.org by yo...@apache.org on 2016/08/12 03:40:28 UTC
[1/3] incubator-eagle git commit: hdfs, hbase,
mapr app conversion Author: Yong Zhang
Close: #334
Repository: incubator-eagle
Updated Branches:
refs/heads/develop 43d229eec -> 27513f7b7
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/rest/HDFSResourceWebResource.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/rest/HDFSResourceWebResource.java b/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/rest/HDFSResourceWebResource.java
index 083c9f8..eac807e 100644
--- a/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/rest/HDFSResourceWebResource.java
+++ b/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/rest/HDFSResourceWebResource.java
@@ -18,6 +18,7 @@ package org.apache.eagle.service.security.hdfs.rest;
import java.util.ArrayList;
import java.util.List;
+import java.util.Map;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
@@ -26,9 +27,14 @@ import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
+import com.google.inject.Inject;
import com.typesafe.config.Config;
+import org.apache.eagle.metadata.model.ApplicationEntity;
+import org.apache.eagle.metadata.service.ApplicationEntityService;
import org.apache.eagle.security.entity.FileStatusEntity;
import org.apache.eagle.security.resolver.MetadataAccessConfigRepo;
+import org.apache.eagle.security.service.ISecurityMetadataDAO;
+import org.apache.eagle.security.service.MetadataDaoFactory;
import org.apache.eagle.service.common.EagleExceptionWrapper;
import org.apache.eagle.service.security.hdfs.HDFSResourceConstants;
import org.apache.eagle.service.security.hdfs.HDFSResourceSensitivityDataJoiner;
@@ -44,9 +50,17 @@ import org.apache.eagle.service.security.hdfs.HDFSFileSystem;
* REST Web Service to browse files and Paths in HDFS
*/
@Path(HDFSResourceConstants.HDFS_RESOURCE)
-public class HDFSResourceWebResource
-{
+public class HDFSResourceWebResource {
private static Logger LOG = LoggerFactory.getLogger(HDFSResourceWebResource.class);
+ final public static String HDFS_APPLICATION = "HdfsAuditLogApplication";
+ private ApplicationEntityService entityService;
+ private ISecurityMetadataDAO dao;
+
+ @Inject
+ public HDFSResourceWebResource(ApplicationEntityService entityService, Config eagleServerConfig){
+ this.entityService = entityService;
+ dao = MetadataDaoFactory.getMetadataDAO(eagleServerConfig);
+ }
@GET
@Consumes(MediaType.APPLICATION_JSON)
@@ -56,13 +70,12 @@ public class HDFSResourceWebResource
LOG.info("Starting HDFS Resource Browsing. Query Parameters ==> Site :"+site+" Path : "+filePath );
HDFSResourceWebResponse response = new HDFSResourceWebResponse();
HDFSResourceWebRequestValidator validator = new HDFSResourceWebRequestValidator();
- MetadataAccessConfigRepo repo = new MetadataAccessConfigRepo();
List<FileStatusEntity> result = new ArrayList<>();
List<FileStatus> fileStatuses = null;
try {
validator.validate(site, filePath); // First Step would be validating Request
- Config config = repo.getConfig(HDFSResourceConstants.HDFS_APPLICATION, site);
- Configuration conf = repo.convert(config);
+ Map<String, Object> config = getAppConfig(site, HDFS_APPLICATION);
+ Configuration conf = convert(config);
HDFSFileSystem fileSystem = new HDFSFileSystem(conf);
fileStatuses = fileSystem.browse(filePath);
// Join with File Sensitivity Info
@@ -76,4 +89,17 @@ public class HDFSResourceWebResource
response.setObj(result);
return response;
}
+
+ private Map<String, Object> getAppConfig(String site, String appType){
+ ApplicationEntity entity = entityService.getBySiteIdAndAppType(site, appType);
+ return entity.getConfiguration();
+ }
+
+ private Configuration convert(Map<String, Object> originalConfig) throws Exception {
+ Configuration config = new Configuration();
+ for (Map.Entry<String, Object> entry : originalConfig.entrySet()) {
+ config.set(entry.getKey().toString(), entry.getValue().toString());
+ }
+ return config;
+ }
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-maprfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/MapRFSAuditLogApplication.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-maprfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/MapRFSAuditLogApplication.java b/eagle-security/eagle-security-maprfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/MapRFSAuditLogApplication.java
new file mode 100644
index 0000000..3319164
--- /dev/null
+++ b/eagle-security/eagle-security-maprfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/MapRFSAuditLogApplication.java
@@ -0,0 +1,44 @@
+/*
+ *
+ * * Licensed to the Apache Software Foundation (ASF) under one or more
+ * * contributor license agreements. See the NOTICE file distributed with
+ * * this work for additional information regarding copyright ownership.
+ * * The ASF licenses this file to You under the Apache License, Version 2.0
+ * * (the "License"); you may not use this file except in compliance with
+ * * the License. You may obtain a copy of the License at
+ * * <p/>
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * * <p/>
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ *
+ */
+package org.apache.eagle.security.auditlog;
+
+import backtype.storm.topology.base.BaseRichBolt;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+
+/**
+ * Since 8/11/16.
+ */
+public class MapRFSAuditLogApplication extends AbstractHdfsAuditLogApplication {
+ @Override
+ public BaseRichBolt getParserBolt() {
+ return new MapRFSAuditLogParserBolt();
+ }
+
+ @Override
+ public String getSinkStreamName() {
+ return "mapr_audit_log_stream";
+ }
+
+ public static void main(String[] args){
+ Config config = ConfigFactory.load();
+ MapRFSAuditLogApplication app = new MapRFSAuditLogApplication();
+ app.run(config);
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-maprfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/MapRFSAuditLogParserBolt.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-maprfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/MapRFSAuditLogParserBolt.java b/eagle-security/eagle-security-maprfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/MapRFSAuditLogParserBolt.java
new file mode 100644
index 0000000..37e55c6
--- /dev/null
+++ b/eagle-security/eagle-security-maprfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/MapRFSAuditLogParserBolt.java
@@ -0,0 +1,76 @@
+/*
+ *
+ * * Licensed to the Apache Software Foundation (ASF) under one or more
+ * * contributor license agreements. See the NOTICE file distributed with
+ * * this work for additional information regarding copyright ownership.
+ * * The ASF licenses this file to You under the Apache License, Version 2.0
+ * * (the "License"); you may not use this file except in compliance with
+ * * the License. You may obtain a copy of the License at
+ * * <p/>
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * * <p/>
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ *
+ */
+
+package org.apache.eagle.security.auditlog;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.topology.base.BaseRichBolt;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Tuple;
+import org.apache.eagle.security.hdfs.MAPRFSAuditLogObject;
+import org.apache.eagle.security.hdfs.MAPRFSAuditLogParser;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Arrays;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Since 8/11/16.
+ */
+public class MapRFSAuditLogParserBolt extends BaseRichBolt {
+ private static Logger LOG = LoggerFactory.getLogger(HdfsAuditLogParserBolt.class);
+ private OutputCollector collector;
+
+ @Override
+ public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+ this.collector = collector;
+ }
+
+ @Override
+ public void execute(Tuple input) {
+ String logLine = input.getString(0);
+
+ MAPRFSAuditLogParser parser = new MAPRFSAuditLogParser();
+ MAPRFSAuditLogObject entity = null;
+ try{
+ entity = parser.parse(logLine);
+ Map<String, Object> map = new TreeMap<String, Object>();
+ map.put("src", entity.src);
+ map.put("dst", entity.dst);
+ map.put("host", entity.host);
+ map.put("timestamp", entity.timestamp);
+ map.put("status", entity.status);
+ map.put("user", entity.user);
+ map.put("cmd", entity.cmd);
+ map.put("volume", entity.volume);
+ collector.emit(Arrays.asList(map));
+ }catch(Exception ex) {
+ LOG.error("Failing parse audit log message", ex);
+ }
+ }
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ declarer.declare(new Fields("f1"));
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-maprfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/MapRFSAuditLogProcessorMain.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-maprfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/MapRFSAuditLogProcessorMain.java b/eagle-security/eagle-security-maprfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/MapRFSAuditLogProcessorMain.java
deleted file mode 100644
index b824e8a..0000000
--- a/eagle-security/eagle-security-maprfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/MapRFSAuditLogProcessorMain.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- *
- * * Licensed to the Apache Software Foundation (ASF) under one or more
- * * contributor license agreements. See the NOTICE file distributed with
- * * this work for additional information regarding copyright ownership.
- * * The ASF licenses this file to You under the Apache License, Version 2.0
- * * (the "License"); you may not use this file except in compliance with
- * * the License. You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- *
- */
-package org.apache.eagle.security.auditlog;
-
-import backtype.storm.spout.SchemeAsMultiScheme;
-import com.typesafe.config.Config;
-import org.apache.commons.lang3.time.DateUtils;
-import org.apache.eagle.common.config.EagleConfigConstants;
-import org.apache.eagle.dataproc.impl.storm.kafka.KafkaSourcedSpoutProvider;
-import org.apache.eagle.dataproc.impl.storm.kafka.KafkaSourcedSpoutScheme;
-import org.apache.eagle.datastream.ExecutionEnvironments;
-import org.apache.eagle.datastream.core.StreamProducer;
-import org.apache.eagle.datastream.storm.StormExecutionEnvironment;
-import org.apache.eagle.partition.DataDistributionDao;
-import org.apache.eagle.partition.PartitionAlgorithm;
-import org.apache.eagle.partition.PartitionStrategy;
-import org.apache.eagle.partition.PartitionStrategyImpl;
-import org.apache.eagle.security.partition.DataDistributionDaoImpl;
-import org.apache.eagle.security.partition.GreedyPartitionAlgorithm;
-
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-
-public class MapRFSAuditLogProcessorMain {
-
- public static PartitionStrategy createStrategy(Config config) {
- // TODO: Refactor configuration structure to avoid repeated config processing configure ~ hao
- String host = config.getString(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.HOST);
- Integer port = config.getInt(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.PORT);
- String username = config.getString(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.USERNAME);
- String password = config.getString(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.PASSWORD);
- String topic = config.getString("dataSourceConfig.topic");
- DataDistributionDao dao = new DataDistributionDaoImpl(host, port, username, password, topic);
- PartitionAlgorithm algorithm = new GreedyPartitionAlgorithm();
- String key1 = EagleConfigConstants.EAGLE_PROPS + ".partitionRefreshIntervalInMin";
- Integer partitionRefreshIntervalInMin = config.hasPath(key1) ? config.getInt(key1) : 60;
- String key2 = EagleConfigConstants.EAGLE_PROPS + ".kafkaStatisticRangeInMin";
- Integer kafkaStatisticRangeInMin = config.hasPath(key2) ? config.getInt(key2) : 60;
- PartitionStrategy strategy = new PartitionStrategyImpl(dao, algorithm, partitionRefreshIntervalInMin * DateUtils.MILLIS_PER_MINUTE, kafkaStatisticRangeInMin * DateUtils.MILLIS_PER_MINUTE);
- return strategy;
- }
-
- public static KafkaSourcedSpoutProvider createProvider(Config config) {
- String deserClsName = config.getString("dataSourceConfig.deserializerClass");
- final KafkaSourcedSpoutScheme scheme = new KafkaSourcedSpoutScheme(deserClsName, config) {
- @Override
- public List<Object> deserialize(byte[] ser) {
- Object tmp = deserializer.deserialize(ser);
- Map<String, Object> map = (Map<String, Object>)tmp;
- if(tmp == null) return null;
- return Arrays.asList(map.get("user"), tmp);
- }
- };
-
- KafkaSourcedSpoutProvider provider = new KafkaSourcedSpoutProvider() {
- @Override
- public SchemeAsMultiScheme getStreamScheme(String deserClsName, Config context) {
- return new SchemeAsMultiScheme(scheme);
- }
- };
- return provider;
- }
-
- @SuppressWarnings("unchecked")
- public static void execWithDefaultPartition(StormExecutionEnvironment env, KafkaSourcedSpoutProvider provider) {
- StreamProducer source = env.fromSpout(provider).withOutputFields(2).nameAs("kafkaMsgConsumer").groupBy(Arrays.asList(0));
- //StreamProducer reassembler = source.flatMap(new HdfsUserCommandReassembler()).groupBy(Arrays.asList(0));
- //source.streamUnion(reassembler)
- source.flatMap(new FileSensitivityDataJoinExecutor()).groupBy(Arrays.asList(0))
- .flatMap(new IPZoneDataJoinExecutor())
- .alertWithConsumer("maprFSAuditLogEventStream", "maprFSAuditLogAlertExecutor");
- env.execute();
- }
-
- @SuppressWarnings("unchecked")
- public static void execWithBalancedPartition(StormExecutionEnvironment env, KafkaSourcedSpoutProvider provider) {
- PartitionStrategy strategy = createStrategy(env.getConfig());
- StreamProducer source = env.fromSpout(provider).withOutputFields(2).nameAs("kafkaMsgConsumer").groupBy(strategy);
- //StreamProducer reassembler = source.flatMap(new HdfsUserCommandReassembler()).groupBy(Arrays.asList(0));
- //source.streamUnion(reassembler)
- source.flatMap(new FileSensitivityDataJoinExecutor()).groupBy(Arrays.asList(0))
- .flatMap(new IPZoneDataJoinExecutor())
- .alertWithConsumer("maprFSAuditLogEventStream", "maprFSAuditLogAlertExecutor");
- env.execute();
- }
-
- public static void main(String[] args) throws Exception{
- StormExecutionEnvironment env = ExecutionEnvironments.getStorm(args);
- Config config = env.getConfig();
- KafkaSourcedSpoutProvider provider = createProvider(env.getConfig());
- Boolean balancePartition = config.hasPath("eagleProps.balancePartitionEnabled") && config.getBoolean("eagleProps.balancePartitionEnabled");
- if (balancePartition) {
- execWithBalancedPartition(env, provider);
- } else {
- execWithDefaultPartition(env, provider);
- }
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-maprfs-auditlog/src/main/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-maprfs-auditlog/src/main/resources/log4j.properties b/eagle-security/eagle-security-maprfs-auditlog/src/main/resources/log4j.properties
index 07f8402..12f215c 100644
--- a/eagle-security/eagle-security-maprfs-auditlog/src/main/resources/log4j.properties
+++ b/eagle-security/eagle-security-maprfs-auditlog/src/main/resources/log4j.properties
@@ -20,7 +20,7 @@ eagle.log.file=eagle.log
#log4j.logger.org.apache.eagle.security.auditlog.IPZoneDataJoinExecutor=DEBUG
-#log4j.logger.org.apache.eagle.security.auditlog.FileSensitivityDataJoinExecutor=DEBUG
+#log4j.logger.org.apache.eagle.security.auditlog.FileSensitivityDataJoinBoltUG
log4j.logger.org.apache.eagle.security.auditlog.HdfsUserCommandReassembler=DEBUG
#log4j.logger.org.apache.eagle.executor.AlertExecutor=DEBUG
# standard output
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-server-assembly/src/main/conf/configuration.yml
----------------------------------------------------------------------
diff --git a/eagle-server-assembly/src/main/conf/configuration.yml b/eagle-server-assembly/src/main/conf/configuration.yml
deleted file mode 100644
index c671ade..0000000
--- a/eagle-server-assembly/src/main/conf/configuration.yml
+++ /dev/null
@@ -1,21 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-server:
- applicationConnectors:
- - type: http
- port: 9090
- adminConnectors:
- - type: http
- port: 9091
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-server-assembly/src/main/conf/configuration.yml~HEAD
----------------------------------------------------------------------
diff --git a/eagle-server-assembly/src/main/conf/configuration.yml~HEAD b/eagle-server-assembly/src/main/conf/configuration.yml~HEAD
new file mode 100644
index 0000000..c671ade
--- /dev/null
+++ b/eagle-server-assembly/src/main/conf/configuration.yml~HEAD
@@ -0,0 +1,21 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+server:
+ applicationConnectors:
+ - type: http
+ port: 9090
+ adminConnectors:
+ - type: http
+ port: 9091
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-server-assembly/src/main/conf/configuration.yml~upstream_develop
----------------------------------------------------------------------
diff --git a/eagle-server-assembly/src/main/conf/configuration.yml~upstream_develop b/eagle-server-assembly/src/main/conf/configuration.yml~upstream_develop
new file mode 100644
index 0000000..c671ade
--- /dev/null
+++ b/eagle-server-assembly/src/main/conf/configuration.yml~upstream_develop
@@ -0,0 +1,21 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+server:
+ applicationConnectors:
+ - type: http
+ port: 9090
+ adminConnectors:
+ - type: http
+ port: 9091
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-server/pom.xml
----------------------------------------------------------------------
diff --git a/eagle-server/pom.xml b/eagle-server/pom.xml
index 20819ff..924cd6d 100644
--- a/eagle-server/pom.xml
+++ b/eagle-server/pom.xml
@@ -145,6 +145,18 @@
<artifactId>eagle-security-hbase-web</artifactId>
<version>${project.version}</version>
</dependency>
+
+ <!-- hdfs audit log monitoring -->
+ <dependency>
+ <groupId>org.apache.eagle</groupId>
+ <artifactId>eagle-security-hdfs-auditlog</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.eagle</groupId>
+ <artifactId>eagle-security-hdfs-web</artifactId>
+ <version>${project.version}</version>
+ </dependency>
</dependencies>
<build>
<resources>
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-server/src/main/resources/application.conf
----------------------------------------------------------------------
diff --git a/eagle-server/src/main/resources/application.conf b/eagle-server/src/main/resources/application.conf
index 111b7ea..9363445 100644
--- a/eagle-server/src/main/resources/application.conf
+++ b/eagle-server/src/main/resources/application.conf
@@ -43,7 +43,7 @@
"store": "org.apache.eagle.metadata.service.memory.MemoryMetadataStore"
"jdbc":{
"username": "root"
- "password": null
+ "password": ""
"driverClassName":"com.mysql.jdbc.Driver"
"url":"jdbc:mysql://server.eagle.apache.org:3306/eagle"
}
[2/3] incubator-eagle git commit: hdfs, hbase,
mapr app conversion Author: Yong Zhang
Close: #334
Posted by yo...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/AbstractHdfsAuditLogApplication.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/AbstractHdfsAuditLogApplication.java b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/AbstractHdfsAuditLogApplication.java
new file mode 100644
index 0000000..ccbce98
--- /dev/null
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/AbstractHdfsAuditLogApplication.java
@@ -0,0 +1,117 @@
+/*
+ *
+ * * Licensed to the Apache Software Foundation (ASF) under one or more
+ * * contributor license agreements. See the NOTICE file distributed with
+ * * this work for additional information regarding copyright ownership.
+ * * The ASF licenses this file to You under the Apache License, Version 2.0
+ * * (the "License"); you may not use this file except in compliance with
+ * * the License. You may obtain a copy of the License at
+ * * <p/>
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * * <p/>
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ *
+ */
+
+package org.apache.eagle.security.auditlog;
+
+import backtype.storm.generated.StormTopology;
+import backtype.storm.topology.BoltDeclarer;
+import backtype.storm.topology.IRichSpout;
+import backtype.storm.topology.TopologyBuilder;
+import backtype.storm.topology.base.BaseRichBolt;
+import backtype.storm.tuple.Fields;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+import org.apache.commons.lang3.time.DateUtils;
+import org.apache.eagle.app.StormApplication;
+import org.apache.eagle.app.environment.impl.StormEnvironment;
+import org.apache.eagle.app.sink.StormStreamSink;
+import org.apache.eagle.common.config.EagleConfigConstants;
+import org.apache.eagle.dataproc.impl.storm.partition.CustomPartitionGrouping;
+import org.apache.eagle.partition.DataDistributionDao;
+import org.apache.eagle.partition.PartitionAlgorithm;
+import org.apache.eagle.partition.PartitionStrategy;
+import org.apache.eagle.partition.PartitionStrategyImpl;
+import org.apache.eagle.security.partition.DataDistributionDaoImpl;
+import org.apache.eagle.security.partition.GreedyPartitionAlgorithm;
+import org.apache.eagle.security.topo.NewKafkaSourcedSpoutProvider;
+import storm.kafka.StringScheme;
+
+/**
+ * Since 8/10/16.
+ */
+public abstract class AbstractHdfsAuditLogApplication extends StormApplication {
+ public final static String SPOUT_TASK_NUM = "topology.numOfSpoutTasks";
+ public final static String PARSER_TASK_NUM = "topology.numOfParserTasks";
+ public final static String SENSITIVITY_JOIN_TASK_NUM = "topology.numOfSensitivityJoinTasks";
+ public final static String IPZONE_JOIN_TASK_NUM = "topology.numOfIPZoneJoinTasks";
+ public final static String SINK_TASK_NUM = "topology.numOfSinkTasks";
+
+ @Override
+ public StormTopology execute(Config config, StormEnvironment environment) {
+ TopologyBuilder builder = new TopologyBuilder();
+ NewKafkaSourcedSpoutProvider provider = new NewKafkaSourcedSpoutProvider();
+ IRichSpout spout = provider.getSpout(config);
+
+ int numOfSpoutTasks = config.getInt(SPOUT_TASK_NUM);
+ int numOfParserTasks = config.getInt(PARSER_TASK_NUM);
+ int numOfSensitivityJoinTasks = config.getInt(SENSITIVITY_JOIN_TASK_NUM);
+ int numOfIPZoneJoinTasks = config.getInt(IPZONE_JOIN_TASK_NUM);
+ int numOfSinkTasks = config.getInt(SINK_TASK_NUM);
+
+ builder.setSpout("ingest", spout, numOfSpoutTasks);
+
+
+ HdfsAuditLogParserBolt parserBolt = new HdfsAuditLogParserBolt();
+ BoltDeclarer boltDeclarer = builder.setBolt("parserBolt", parserBolt, numOfParserTasks);
+
+ Boolean useDefaultPartition = !config.hasPath("eagleProps.useDefaultPartition") || config.getBoolean("eagleProps.useDefaultPartition");
+ if(useDefaultPartition){
+ boltDeclarer.fieldsGrouping("ingest", new Fields(StringScheme.STRING_SCHEME_KEY));
+ }else{
+ boltDeclarer.customGrouping("ingest", new CustomPartitionGrouping(createStrategy(config)));
+ }
+
+ FileSensitivityDataJoinBolt sensitivityDataJoinBolt = new FileSensitivityDataJoinBolt(config);
+ BoltDeclarer sensitivityDataJoinBoltDeclarer = builder.setBolt("sensitivityJoin", sensitivityDataJoinBolt, numOfSensitivityJoinTasks);
+ sensitivityDataJoinBoltDeclarer.fieldsGrouping("parserBolt", new Fields("f1"));
+
+ IPZoneDataJoinBolt ipZoneDataJoinBolt = new IPZoneDataJoinBolt(config);
+ BoltDeclarer ipZoneDataJoinBoltDeclarer = builder.setBolt("ipZoneJoin", ipZoneDataJoinBolt, numOfIPZoneJoinTasks);
+ ipZoneDataJoinBoltDeclarer.fieldsGrouping("sensitivityJoin", new Fields("user"));
+
+ StormStreamSink sinkBolt = environment.getStreamSink("hdfs_audit_log_stream",config);
+ BoltDeclarer kafkaBoltDeclarer = builder.setBolt("kafkaSink", sinkBolt, numOfSinkTasks);
+ kafkaBoltDeclarer.fieldsGrouping("ipZoneJoin", new Fields("user"));
+ return builder.createTopology();
+
+
+ }
+
+ public abstract BaseRichBolt getParserBolt();
+ public abstract String getSinkStreamName();
+
+ public static PartitionStrategy createStrategy(Config config) {
+ // TODO: Refactor configuration structure to avoid repeated config processing configure ~ hao
+ String host = config.getString(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.HOST);
+ Integer port = config.getInt(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.PORT);
+ String username = config.getString(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.USERNAME);
+ String password = config.getString(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.PASSWORD);
+ String topic = config.getString("dataSourceConfig.topic");
+ DataDistributionDao dao = new DataDistributionDaoImpl(host, port, username, password, topic);
+ PartitionAlgorithm algorithm = new GreedyPartitionAlgorithm();
+ String key1 = EagleConfigConstants.EAGLE_PROPS + ".partitionRefreshIntervalInMin";
+ Integer partitionRefreshIntervalInMin = config.hasPath(key1) ? config.getInt(key1) : 60;
+ String key2 = EagleConfigConstants.EAGLE_PROPS + ".kafkaStatisticRangeInMin";
+ Integer kafkaStatisticRangeInMin = config.hasPath(key2) ? config.getInt(key2) : 60;
+ PartitionStrategy strategy = new PartitionStrategyImpl(dao, algorithm, partitionRefreshIntervalInMin * DateUtils.MILLIS_PER_MINUTE, kafkaStatisticRangeInMin * DateUtils.MILLIS_PER_MINUTE);
+ return strategy;
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/FileSensitivityDataJoinBolt.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/FileSensitivityDataJoinBolt.java b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/FileSensitivityDataJoinBolt.java
new file mode 100644
index 0000000..6cbbde6
--- /dev/null
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/FileSensitivityDataJoinBolt.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.eagle.security.auditlog;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.IRichBolt;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.topology.base.BaseRichBolt;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Tuple;
+import com.typesafe.config.Config;
+import org.apache.eagle.datastream.Collector;
+import org.apache.eagle.datastream.JavaStormStreamExecutor2;
+import org.apache.eagle.security.auditlog.timer.FileSensitivityPollingJob;
+import org.apache.eagle.security.auditlog.util.SimplifyPath;
+import org.apache.eagle.security.entity.FileSensitivityAPIEntity;
+import org.apache.eagle.security.util.ExternalDataCache;
+import org.apache.eagle.security.util.ExternalDataJoiner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import scala.Tuple2;
+
+import java.util.Arrays;
+import java.util.Map;
+import java.util.TreeMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class FileSensitivityDataJoinBolt extends BaseRichBolt {
+ private static Logger LOG = LoggerFactory.getLogger(FileSensitivityDataJoinBolt.class);
+ private Config config;
+ private OutputCollector collector;
+
+ public FileSensitivityDataJoinBolt(Config config){
+ this.config = config;
+ }
+
+
+ @Override
+ public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+ this.collector = collector;
+ // start hdfs sensitivity data polling
+ try{
+ ExternalDataJoiner joiner = new ExternalDataJoiner(
+ FileSensitivityPollingJob.class, config, context.getThisComponentId() + "." + context.getThisTaskIndex());
+ joiner.start();
+ }catch(Exception ex){
+ LOG.error("Fail bringing up quartz scheduler", ex);
+ throw new IllegalStateException(ex);
+ }
+ }
+
+ @Override
+ public void execute(Tuple input) {
+ try {
+ Map<String, Object> toBeCopied = (Map<String, Object>) input.getValue(0);
+ Map<String, Object> event = new TreeMap<String, Object>(toBeCopied);
+ Map<String, FileSensitivityAPIEntity> map =
+ (Map<String, FileSensitivityAPIEntity>) ExternalDataCache.getInstance().
+ getJobResult(FileSensitivityPollingJob.class);
+ FileSensitivityAPIEntity e = null;
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Receive map: " + map + "event: " + event);
+ }
+
+ String src = (String) event.get("src");
+ if (map != null && src != null) {
+ String simplifiedPath = new SimplifyPath().build(src);
+ for (String fileDir : map.keySet()) {
+ Pattern pattern = Pattern.compile(simplifiedPath, Pattern.CASE_INSENSITIVE);
+ Matcher matcher = pattern.matcher(fileDir);
+ boolean isMatched = matcher.matches();
+ if (isMatched) {
+ e = map.get(fileDir);
+ break;
+ }
+ }
+ }
+ event.put("sensitivityType", e == null ? "NA" : e.getSensitivityType());
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("After file sensitivity lookup: " + event);
+ }
+ // LOG.info(">>>> After file sensitivity lookup: " + event);
+ collector.emit(Arrays.asList(event.get("user"), event));
+ }catch(Exception ex){
+ LOG.error("error joining data, ignore it", ex);
+ }finally {
+ collector.ack(input);
+ }
+ }
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ declarer.declare(new Fields("user", "message"));
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/FileSensitivityDataJoinExecutor.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/FileSensitivityDataJoinExecutor.java b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/FileSensitivityDataJoinExecutor.java
deleted file mode 100644
index 33d29d0..0000000
--- a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/FileSensitivityDataJoinExecutor.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.eagle.security.auditlog;
-
-import com.typesafe.config.Config;
-import org.apache.eagle.datastream.Collector;
-import org.apache.eagle.datastream.JavaStormStreamExecutor2;
-import org.apache.eagle.security.auditlog.timer.FileSensitivityPollingJob;
-import org.apache.eagle.security.auditlog.util.SimplifyPath;
-import org.apache.eagle.security.entity.FileSensitivityAPIEntity;
-import org.apache.eagle.security.util.ExternalDataCache;
-import org.apache.eagle.security.util.ExternalDataJoiner;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import scala.Tuple2;
-
-import java.util.Map;
-import java.util.TreeMap;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-public class FileSensitivityDataJoinExecutor extends JavaStormStreamExecutor2<String, Map> {
- private static final Logger LOG = LoggerFactory.getLogger(FileSensitivityDataJoinExecutor.class);
- private Config config;
-
- @Override
- public void prepareConfig(Config config) {
- this.config = config;
- }
-
- @Override
- public void init() {
- // start IPZone data polling
- try{
- ExternalDataJoiner joiner = new ExternalDataJoiner(FileSensitivityPollingJob.class, config, "1");
- joiner.start();
- }catch(Exception ex){
- LOG.error("Fail bring up quartz scheduler", ex);
- throw new IllegalStateException(ex);
- }
- }
-
- @Override
- public void flatMap(java.util.List<Object> input, Collector<Tuple2<String, Map>> outputCollector){
- Map<String, Object> toBeCopied = (Map<String, Object>)input.get(1);
- Map<String, Object> event = new TreeMap<String, Object>(toBeCopied);
- Map<String, FileSensitivityAPIEntity> map = (Map<String, FileSensitivityAPIEntity>) ExternalDataCache.getInstance().getJobResult(FileSensitivityPollingJob.class);
- FileSensitivityAPIEntity e = null;
- if (LOG.isDebugEnabled()) {
- LOG.debug("Receive map: " + map + "event: " + event);
- }
-
- String src = (String)event.get("src");
- if(map != null && src != null) {
- String simplifiedPath = new SimplifyPath().build(src);
- for (String fileDir : map.keySet()) {
- Pattern pattern = Pattern.compile(simplifiedPath,Pattern.CASE_INSENSITIVE);
- Matcher matcher = pattern.matcher(fileDir);
- boolean isMatched = matcher.matches();
- if (isMatched) {
- e = map.get(fileDir);
- break;
- }
- }
- }
- event.put("sensitivityType", e == null ? "NA" : e.getSensitivityType());
- if(LOG.isDebugEnabled()) {
- LOG.debug("After file sensitivity lookup: " + event);
- }
- // LOG.info(">>>> After file sensitivity lookup: " + event);
- outputCollector.collect(new Tuple2(event.get("user"), event));
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HDFSAuditLogAppProvider.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HDFSAuditLogAppProvider.java b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HDFSAuditLogAppProvider.java
new file mode 100644
index 0000000..fcf9d4f
--- /dev/null
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HDFSAuditLogAppProvider.java
@@ -0,0 +1,34 @@
+/*
+ *
+ * *
+ * * * Licensed to the Apache Software Foundation (ASF) under one or more
+ * * * contributor license agreements. See the NOTICE file distributed with
+ * * * this work for additional information regarding copyright ownership.
+ * * * The ASF licenses this file to You under the Apache License, Version 2.0
+ * * * (the "License"); you may not use this file except in compliance with
+ * * * the License. You may obtain a copy of the License at
+ * * * <p/>
+ * * * http://www.apache.org/licenses/LICENSE-2.0
+ * * * <p/>
+ * * * Unless required by applicable law or agreed to in writing, software
+ * * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * * See the License for the specific language governing permissions and
+ * * * limitations under the License.
+ * *
+ *
+ */
+
+package org.apache.eagle.security.auditlog;
+
+import org.apache.eagle.app.spi.AbstractApplicationProvider;
+
+/**
+ * Since 8/11/16.
+ */
+public class HdfsAuditLogAppProvider extends AbstractApplicationProvider<HdfsAuditLogApplication> {
+ @Override
+ public HdfsAuditLogApplication getApplication() {
+ return new HdfsAuditLogApplication();
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogApplication.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogApplication.java b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogApplication.java
new file mode 100644
index 0000000..791572b
--- /dev/null
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogApplication.java
@@ -0,0 +1,47 @@
+/*
+ *
+ * *
+ * * * Licensed to the Apache Software Foundation (ASF) under one or more
+ * * * contributor license agreements. See the NOTICE file distributed with
+ * * * this work for additional information regarding copyright ownership.
+ * * * The ASF licenses this file to You under the Apache License, Version 2.0
+ * * * (the "License"); you may not use this file except in compliance with
+ * * * the License. You may obtain a copy of the License at
+ * * * <p/>
+ * * * http://www.apache.org/licenses/LICENSE-2.0
+ * * * <p/>
+ * * * Unless required by applicable law or agreed to in writing, software
+ * * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * * See the License for the specific language governing permissions and
+ * * * limitations under the License.
+ * *
+ *
+ */
+
+package org.apache.eagle.security.auditlog;
+
+import backtype.storm.topology.base.BaseRichBolt;
+import com.typesafe.config.Config;
+import com.typesafe.config.ConfigFactory;
+
+/**
+ * Since 8/11/16.
+ */
+public class HdfsAuditLogApplication extends AbstractHdfsAuditLogApplication {
+ @Override
+ public BaseRichBolt getParserBolt() {
+ return new HdfsAuditLogParserBolt();
+ }
+
+ @Override
+ public String getSinkStreamName() {
+ return "hdfs_audit_log_stream";
+ }
+
+ public static void main(String[] args){
+ Config config = ConfigFactory.load();
+ HdfsAuditLogApplication app = new HdfsAuditLogApplication();
+ app.run(config);
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogKafkaDeserializer.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogKafkaDeserializer.java b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogKafkaDeserializer.java
deleted file mode 100644
index 08ab993..0000000
--- a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogKafkaDeserializer.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.eagle.security.auditlog;
-
-import java.util.Map;
-import java.util.Properties;
-import java.util.TreeMap;
-
-import org.apache.eagle.security.hdfs.HDFSAuditLogObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import org.apache.eagle.dataproc.impl.storm.kafka.SpoutKafkaMessageDeserializer;
-import org.apache.eagle.security.hdfs.HDFSAuditLogParser;
-import org.apache.eagle.security.hdfs.HDFSAuditLogObject;
-
-public class HdfsAuditLogKafkaDeserializer implements SpoutKafkaMessageDeserializer{
- private static Logger LOG = LoggerFactory.getLogger(HdfsAuditLogKafkaDeserializer.class);
- private Properties props;
-
- public HdfsAuditLogKafkaDeserializer(Properties props){
- this.props = props;
- }
-
- /**
- * the steps for deserializing message from kafka
- * 1. convert byte array to string
- * 2. parse string to eagle entity
- */
- @Override
- public Object deserialize(byte[] arg0) {
- String logLine = new String(arg0);
-
- HDFSAuditLogParser parser = new HDFSAuditLogParser();
- HDFSAuditLogObject entity = null;
- try{
- entity = parser.parse(logLine);
- }catch(Exception ex){
- LOG.error("Failing parse audit log message", ex);
- }
- if(entity == null){
- LOG.warn("Event ignored as it can't be correctly parsed, the log is ", logLine);
- return null;
- }
- Map<String, Object> map = new TreeMap<String, Object>();
- map.put("src", entity.src);
- map.put("dst", entity.dst);
- map.put("host", entity.host);
- map.put("timestamp", entity.timestamp);
- map.put("allowed", entity.allowed);
- map.put("user", entity.user);
- map.put("cmd", entity.cmd);
-
- return map;
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogMonitoringTopology.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogMonitoringTopology.java b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogMonitoringTopology.java
deleted file mode 100644
index a7f207e..0000000
--- a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogMonitoringTopology.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-package org.apache.eagle.security.auditlog;
-
-
-import com.typesafe.config.Config;
-import org.apache.eagle.dataproc.impl.storm.kafka.KafkaSourcedSpoutProvider;
-import org.apache.eagle.datastream.ExecutionEnvironments;
-import org.apache.eagle.datastream.storm.StormExecutionEnvironment;
-import org.apache.eagle.stream.application.TopologyExecutable;
-
-public class HdfsAuditLogMonitoringTopology implements TopologyExecutable {
- @Override
- public void submit(String topology, Config config) {
- StormExecutionEnvironment env = ExecutionEnvironments.getStorm(config);
- KafkaSourcedSpoutProvider provider = HdfsAuditLogProcessorMain.createProvider(env.getConfig());
- Boolean balancePartition = config.hasPath("eagleProps.balancePartitionEnabled") && config.getBoolean("eagleProps.balancePartitionEnabled");
- if (balancePartition) {
- HdfsAuditLogProcessorMain.execWithBalancedPartition(env, provider);
- } else {
- HdfsAuditLogProcessorMain.execWithDefaultPartition(env, provider);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogParserBolt.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogParserBolt.java b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogParserBolt.java
new file mode 100644
index 0000000..5ea5950
--- /dev/null
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogParserBolt.java
@@ -0,0 +1,77 @@
+/*
+ *
+ * * Licensed to the Apache Software Foundation (ASF) under one or more
+ * * contributor license agreements. See the NOTICE file distributed with
+ * * this work for additional information regarding copyright ownership.
+ * * The ASF licenses this file to You under the Apache License, Version 2.0
+ * * (the "License"); you may not use this file except in compliance with
+ * * the License. You may obtain a copy of the License at
+ * * <p/>
+ * * http://www.apache.org/licenses/LICENSE-2.0
+ * * <p/>
+ * * Unless required by applicable law or agreed to in writing, software
+ * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * See the License for the specific language governing permissions and
+ * * limitations under the License.
+ *
+ */
+
+package org.apache.eagle.security.auditlog;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.topology.base.BaseRichBolt;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Tuple;
+import org.apache.eagle.security.hdfs.HDFSAuditLogObject;
+import org.apache.eagle.security.hdfs.HDFSAuditLogParser;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Arrays;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Since 8/10/16.
+ */
+public class HdfsAuditLogParserBolt extends BaseRichBolt {
+ private static Logger LOG = LoggerFactory.getLogger(HdfsAuditLogParserBolt.class);
+ private OutputCollector collector;
+
+ @Override
+ public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+ this.collector = collector;
+ }
+
+ @Override
+ public void execute(Tuple input) {
+ String logLine = input.getString(0);
+
+ HDFSAuditLogParser parser = new HDFSAuditLogParser();
+ HDFSAuditLogObject entity = null;
+ try{
+ entity = parser.parse(logLine);
+ Map<String, Object> map = new TreeMap<String, Object>();
+ map.put("src", entity.src);
+ map.put("dst", entity.dst);
+ map.put("host", entity.host);
+ map.put("timestamp", entity.timestamp);
+ map.put("allowed", entity.allowed);
+ map.put("user", entity.user);
+ map.put("cmd", entity.cmd);
+ collector.emit(Arrays.asList(map));
+ }catch(Exception ex){
+ LOG.error("Failing parse audit log message", ex);
+ }finally {
+ collector.ack(input);
+ }
+ }
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ declarer.declare(new Fields("f1"));
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogProcessorMain.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogProcessorMain.java b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogProcessorMain.java
deleted file mode 100644
index 60b0e36..0000000
--- a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/HdfsAuditLogProcessorMain.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- *
- * * Licensed to the Apache Software Foundation (ASF) under one or more
- * * contributor license agreements. See the NOTICE file distributed with
- * * this work for additional information regarding copyright ownership.
- * * The ASF licenses this file to You under the Apache License, Version 2.0
- * * (the "License"); you may not use this file except in compliance with
- * * the License. You may obtain a copy of the License at
- * *
- * * http://www.apache.org/licenses/LICENSE-2.0
- * *
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- *
- */
-package org.apache.eagle.security.auditlog;
-
-import backtype.storm.spout.SchemeAsMultiScheme;
-import com.typesafe.config.Config;
-import org.apache.commons.lang3.time.DateUtils;
-import org.apache.eagle.common.config.EagleConfigConstants;
-import org.apache.eagle.dataproc.impl.storm.kafka.KafkaSourcedSpoutProvider;
-import org.apache.eagle.dataproc.impl.storm.kafka.KafkaSourcedSpoutScheme;
-import org.apache.eagle.datastream.ExecutionEnvironments;
-import org.apache.eagle.datastream.core.StreamProducer;
-import org.apache.eagle.datastream.storm.StormExecutionEnvironment;
-import org.apache.eagle.partition.DataDistributionDao;
-import org.apache.eagle.partition.PartitionAlgorithm;
-import org.apache.eagle.partition.PartitionStrategy;
-import org.apache.eagle.partition.PartitionStrategyImpl;
-import org.apache.eagle.security.partition.DataDistributionDaoImpl;
-import org.apache.eagle.security.partition.GreedyPartitionAlgorithm;
-
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-
-public class HdfsAuditLogProcessorMain {
- public static PartitionStrategy createStrategy(Config config) {
- // TODO: Refactor configuration structure to avoid repeated config processing configure ~ hao
- String host = config.getString(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.HOST);
- Integer port = config.getInt(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.PORT);
- String username = config.getString(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.USERNAME);
- String password = config.getString(EagleConfigConstants.EAGLE_PROPS + "." + EagleConfigConstants.EAGLE_SERVICE + "." + EagleConfigConstants.PASSWORD);
- String topic = config.getString("dataSourceConfig.topic");
- DataDistributionDao dao = new DataDistributionDaoImpl(host, port, username, password, topic);
- PartitionAlgorithm algorithm = new GreedyPartitionAlgorithm();
- String key1 = EagleConfigConstants.EAGLE_PROPS + ".partitionRefreshIntervalInMin";
- Integer partitionRefreshIntervalInMin = config.hasPath(key1) ? config.getInt(key1) : 60;
- String key2 = EagleConfigConstants.EAGLE_PROPS + ".kafkaStatisticRangeInMin";
- Integer kafkaStatisticRangeInMin = config.hasPath(key2) ? config.getInt(key2) : 60;
- PartitionStrategy strategy = new PartitionStrategyImpl(dao, algorithm, partitionRefreshIntervalInMin * DateUtils.MILLIS_PER_MINUTE, kafkaStatisticRangeInMin * DateUtils.MILLIS_PER_MINUTE);
- return strategy;
- }
-
- public static KafkaSourcedSpoutProvider createProvider(Config config) {
- String deserClsName = config.getString("dataSourceConfig.deserializerClass");
- final KafkaSourcedSpoutScheme scheme = new KafkaSourcedSpoutScheme(deserClsName, config) {
- @Override
- public List<Object> deserialize(byte[] ser) {
- Object tmp = deserializer.deserialize(ser);
- Map<String, Object> map = (Map<String, Object>)tmp;
- if(tmp == null) return null;
- return Arrays.asList(map.get("user"), tmp);
- }
- };
-
- KafkaSourcedSpoutProvider provider = new KafkaSourcedSpoutProvider() {
- @Override
- public SchemeAsMultiScheme getStreamScheme(String deserClsName, Config context) {
- return new SchemeAsMultiScheme(scheme);
- }
- };
- return provider;
- }
-
- @SuppressWarnings("unchecked")
- public static void execWithDefaultPartition(StormExecutionEnvironment env, KafkaSourcedSpoutProvider provider) {
- StreamProducer source = env.fromSpout(provider).withOutputFields(2).nameAs("kafkaMsgConsumer").groupBy(Arrays.asList(0));
- //StreamProducer reassembler = source.flatMap(new HdfsUserCommandReassembler()).groupBy(Arrays.asList(0));
- //source.streamUnion(reassembler)
- source.flatMap(new FileSensitivityDataJoinExecutor()).groupBy(Arrays.asList(0))
- .flatMap(new IPZoneDataJoinExecutor())
- .alertWithConsumer("hdfsAuditLogEventStream", "hdfsAuditLogAlertExecutor");
- env.execute();
- }
-
- @SuppressWarnings("unchecked")
- public static void execWithBalancedPartition(StormExecutionEnvironment env, KafkaSourcedSpoutProvider provider) {
- PartitionStrategy strategy = createStrategy(env.getConfig());
- StreamProducer source = env.fromSpout(provider).withOutputFields(2).nameAs("kafkaMsgConsumer").groupBy(strategy);
- //StreamProducer reassembler = source.flatMap(new HdfsUserCommandReassembler()).groupBy(Arrays.asList(0));
- //source.streamUnion(reassembler)
- source.flatMap(new FileSensitivityDataJoinExecutor()).groupBy(Arrays.asList(0))
- .flatMap(new IPZoneDataJoinExecutor())
- .alertWithConsumer("hdfsAuditLogEventStream", "hdfsAuditLogAlertExecutor");
- env.execute();
- }
-
- public static void main(String[] args) throws Exception{
- StormExecutionEnvironment env = ExecutionEnvironments.getStorm(args);
- Config config = env.getConfig();
- KafkaSourcedSpoutProvider provider = createProvider(env.getConfig());
- Boolean balancePartition = config.hasPath("eagleProps.balancePartitionEnabled") && config.getBoolean("eagleProps.balancePartitionEnabled");
- if (balancePartition) {
- execWithBalancedPartition(env, provider);
- } else {
- execWithDefaultPartition(env, provider);
- }
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/IPZoneDataJoinBolt.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/IPZoneDataJoinBolt.java b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/IPZoneDataJoinBolt.java
new file mode 100644
index 0000000..d02f959
--- /dev/null
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/IPZoneDataJoinBolt.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.eagle.security.auditlog;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.topology.base.BaseRichBolt;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Tuple;
+import com.typesafe.config.Config;
+import org.apache.eagle.datastream.Collector;
+import org.apache.eagle.datastream.JavaStormStreamExecutor2;
+import org.apache.eagle.security.auditlog.timer.FileSensitivityPollingJob;
+import org.apache.eagle.security.auditlog.timer.IPZonePollingJob;
+import org.apache.eagle.security.entity.IPZoneEntity;
+import org.apache.eagle.security.util.ExternalDataCache;
+import org.apache.eagle.security.util.ExternalDataJoiner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import scala.Tuple2;
+
+import java.util.Arrays;
+import java.util.Map;
+import java.util.TreeMap;
+
+public class IPZoneDataJoinBolt extends BaseRichBolt {
+ private static final Logger LOG = LoggerFactory.getLogger(IPZoneDataJoinBolt.class);
+ private Config config;
+ private OutputCollector collector;
+
+ public IPZoneDataJoinBolt(Config config){
+ this.config = config;
+ }
+
+ @Override
+ public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+ this.collector = collector;
+ // start ipzone data polling
+ try{
+ ExternalDataJoiner joiner = new ExternalDataJoiner(IPZonePollingJob.class, config, context.getThisComponentId() + "." + context.getThisTaskIndex());
+ joiner.start();
+ }catch(Exception ex){
+ LOG.error("Fail bring up quartz scheduler", ex);
+ throw new IllegalStateException(ex);
+ }
+ }
+
+ @Override
+ public void execute(Tuple input) {
+ try {
+ Map<String, Object> toBeCopied = (Map<String, Object>) input.getValue(1);
+ Map<String, Object> event = new TreeMap<String, Object>(toBeCopied); // shallow copy
+ Map<String, IPZoneEntity> map = (Map<String, IPZoneEntity>) ExternalDataCache.getInstance().getJobResult(IPZonePollingJob.class);
+ IPZoneEntity e = null;
+ if (map != null) {
+ e = map.get(event.get("host"));
+ }
+ event.put("securityZone", e == null ? "NA" : e.getSecurityZone());
+ if (LOG.isDebugEnabled()) LOG.debug("After IP zone lookup: " + event);
+ collector.emit(Arrays.asList(event.get("user"), event));
+ }catch(Exception ex){
+ LOG.error("error joining data, ignore it", ex);
+ }finally {
+ collector.ack(input);
+ }
+ }
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ declarer.declare(new Fields("user", "message"));
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/IPZoneDataJoinExecutor.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/IPZoneDataJoinExecutor.java b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/IPZoneDataJoinExecutor.java
deleted file mode 100644
index d633dcd..0000000
--- a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/IPZoneDataJoinExecutor.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.eagle.security.auditlog;
-
-import com.typesafe.config.Config;
-import org.apache.eagle.datastream.Collector;
-import org.apache.eagle.datastream.JavaStormStreamExecutor2;
-import org.apache.eagle.security.auditlog.timer.IPZonePollingJob;
-import org.apache.eagle.security.entity.IPZoneEntity;
-import org.apache.eagle.security.util.ExternalDataCache;
-import org.apache.eagle.security.util.ExternalDataJoiner;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import scala.Tuple2;
-
-import java.util.Map;
-import java.util.TreeMap;
-
-public class IPZoneDataJoinExecutor extends JavaStormStreamExecutor2<String, Map> {
- private static final Logger LOG = LoggerFactory.getLogger(IPZoneDataJoinExecutor.class);
- private Config config;
-
- @Override
- public void prepareConfig(Config config) {
- this.config = config;
- }
-
- @Override
- public void init() {
- // start IPZone data polling
- try{
- ExternalDataJoiner joiner = new ExternalDataJoiner(IPZonePollingJob.class, config, "1");
- joiner.start();
- }catch(Exception ex){
- LOG.error("Fail bring up quartz scheduler", ex);
- throw new IllegalStateException(ex);
- }
- }
-
- @Override
- public void flatMap(java.util.List<Object> input, Collector<Tuple2<String, Map>> outputCollector){
- Map<String, Object> toBeCopied = (Map<String, Object>)input.get(1);
- Map<String, Object> event = new TreeMap<String, Object>(toBeCopied); // shallow copy
- Map<String, IPZoneEntity> map = (Map<String, IPZoneEntity>) ExternalDataCache.getInstance().getJobResult(IPZonePollingJob.class);
- IPZoneEntity e = null;
- if(map != null){
- e = map.get(event.get("host"));
- }
- event.put("securityZone", e == null ? "NA" : e.getSecurityZone());
- if(LOG.isDebugEnabled()) LOG.debug("After IP zone lookup: " + event);
- outputCollector.collect(new Tuple2(event.get("user"), event));
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/FileSensitivityPollingJob.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/FileSensitivityPollingJob.java b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/FileSensitivityPollingJob.java
index a4fed79..375edc7 100644
--- a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/FileSensitivityPollingJob.java
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/FileSensitivityPollingJob.java
@@ -16,10 +16,14 @@
*/
package org.apache.eagle.security.auditlog.timer;
+import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.eagle.common.config.EagleConfigConstants;
+import org.apache.eagle.security.service.HdfsSensitivityEntity;
+import org.apache.eagle.security.service.IMetadataServiceClient;
+import org.apache.eagle.security.service.MetadataServiceClientImpl;
import org.apache.eagle.security.util.ExternalDataCache;
import org.apache.eagle.security.entity.FileSensitivityAPIEntity;
import org.quartz.Job;
@@ -29,9 +33,6 @@ import org.quartz.JobExecutionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.apache.eagle.log.entity.GenericServiceAPIResponseEntity;
-import org.apache.eagle.service.client.IEagleServiceClient;
-import org.apache.eagle.service.client.impl.EagleServiceClientImpl;
import com.google.common.base.Function;
import com.google.common.collect.Maps;
@@ -43,15 +44,15 @@ public class FileSensitivityPollingJob implements Job{
throws JobExecutionException {
JobDataMap jobDataMap = context.getJobDetail().getJobDataMap();
try{
- List<FileSensitivityAPIEntity> ipZones = load(jobDataMap);
- if(ipZones == null){
+ Collection<HdfsSensitivityEntity> sensitivityAPIEntities = load(jobDataMap);
+ if(sensitivityAPIEntities == null){
LOG.warn("File sensitivity information is empty");
return;
}
- Map<String, FileSensitivityAPIEntity> map = Maps.uniqueIndex(ipZones, new Function<FileSensitivityAPIEntity, String>(){
+ Map<String, HdfsSensitivityEntity> map = Maps.uniqueIndex(sensitivityAPIEntities, new Function<HdfsSensitivityEntity, String>(){
@Override
- public String apply(FileSensitivityAPIEntity input) {
- return input.getTags().get("filedir");
+ public String apply(HdfsSensitivityEntity input) {
+ return input.getFiledir();
}
});
ExternalDataCache.getInstance().setJobResult(getClass(), map);
@@ -60,7 +61,7 @@ public class FileSensitivityPollingJob implements Job{
}
}
- private List<FileSensitivityAPIEntity> load(JobDataMap jobDataMap) throws Exception{
+ private Collection<HdfsSensitivityEntity> load(JobDataMap jobDataMap) throws Exception{
Map<String, Object> map = (Map<String,Object>)jobDataMap.get(EagleConfigConstants.EAGLE_SERVICE);
String eagleServiceHost = (String)map.get(EagleConfigConstants.HOST);
Integer eagleServicePort = Integer.parseInt(map.get(EagleConfigConstants.PORT).toString());
@@ -68,15 +69,7 @@ public class FileSensitivityPollingJob implements Job{
String password = map.containsKey(EagleConfigConstants.PASSWORD) ? (String)map.get(EagleConfigConstants.PASSWORD) : null;
// load from eagle database
LOG.info("Load file sensitivity information from eagle service " + eagleServiceHost + ":" + eagleServicePort);
- IEagleServiceClient client = new EagleServiceClientImpl(eagleServiceHost, eagleServicePort, username, password);
- String query = "FileSensitivityService[]{*}";
- GenericServiceAPIResponseEntity<FileSensitivityAPIEntity> response = client.search()
- .pageSize(Integer.MAX_VALUE)
- .query(query)
- .send();
- client.close();
- if(response.getException() != null)
- throw new IllegalStateException(response.getException());
- return response.getObj();
+ IMetadataServiceClient client = new MetadataServiceClientImpl(eagleServiceHost, eagleServicePort, "/rest");
+ return client.listHdfsSensitivities();
}
}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/IPZonePollingJob.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/IPZonePollingJob.java b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/IPZonePollingJob.java
index 2f7efc8..dc80eb9 100644
--- a/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/IPZonePollingJob.java
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/main/java/org/apache/eagle/security/auditlog/timer/IPZonePollingJob.java
@@ -16,10 +16,13 @@
*/
package org.apache.eagle.security.auditlog.timer;
+import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.eagle.common.config.EagleConfigConstants;
+import org.apache.eagle.security.service.IMetadataServiceClient;
+import org.apache.eagle.security.service.MetadataServiceClientImpl;
import org.apache.eagle.security.util.ExternalDataCache;
import org.quartz.Job;
import org.quartz.JobDataMap;
@@ -29,7 +32,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.eagle.log.entity.GenericServiceAPIResponseEntity;
-import org.apache.eagle.security.entity.IPZoneEntity;
+import org.apache.eagle.security.service.IPZoneEntity;
import org.apache.eagle.service.client.IEagleServiceClient;
import org.apache.eagle.service.client.impl.EagleServiceClientImpl;
import com.google.common.base.Function;
@@ -44,7 +47,7 @@ public class IPZonePollingJob implements Job{
throws JobExecutionException {
JobDataMap jobDataMap = context.getJobDetail().getJobDataMap();
try{
- List<IPZoneEntity> ipZones = load(jobDataMap);
+ Collection<IPZoneEntity> ipZones = load(jobDataMap);
if(ipZones == null){
LOG.warn("Ipzone information is empty");
return;
@@ -52,7 +55,7 @@ public class IPZonePollingJob implements Job{
Map<String, IPZoneEntity> map = Maps.uniqueIndex(ipZones, new Function<IPZoneEntity, String>(){
@Override
public String apply(IPZoneEntity input) {
- return input.getTags().get("iphost");
+ return input.getIphost();
}
});
ExternalDataCache.getInstance().setJobResult(getClass(), map);
@@ -61,7 +64,7 @@ public class IPZonePollingJob implements Job{
}
}
- private List<IPZoneEntity> load(JobDataMap jobDataMap) throws Exception{
+ private Collection<IPZoneEntity> load(JobDataMap jobDataMap) throws Exception{
Map<String, Object> map = (Map<String,Object>)jobDataMap.get(EagleConfigConstants.EAGLE_SERVICE);
String eagleServiceHost = (String)map.get(EagleConfigConstants.HOST);
Integer eagleServicePort = Integer.parseInt(map.get(EagleConfigConstants.PORT).toString());
@@ -69,15 +72,7 @@ public class IPZonePollingJob implements Job{
String password = map.containsKey(EagleConfigConstants.PASSWORD) ? (String)map.get(EagleConfigConstants.PASSWORD) : null;
// load from eagle database
LOG.info("Load ip zone information from eagle service " + eagleServiceHost + ":" + eagleServicePort);
- IEagleServiceClient client = new EagleServiceClientImpl(eagleServiceHost, eagleServicePort, username, password);
- String query = "IPZoneService[]{*}";
- GenericServiceAPIResponseEntity<IPZoneEntity> response = client.search()
- .pageSize(Integer.MAX_VALUE)
- .query(query)
- .send();
- client.close();
- if(response.getException() != null)
- throw new IllegalStateException(response.getException());
- return response.getObj();
+ IMetadataServiceClient client = new MetadataServiceClientImpl(eagleServiceHost, eagleServicePort, "/rest");
+ return client.listIPZones();
}
}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HdfsAuditLogAppProvider.xml
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HdfsAuditLogAppProvider.xml b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HdfsAuditLogAppProvider.xml
new file mode 100644
index 0000000..dadab98
--- /dev/null
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HdfsAuditLogAppProvider.xml
@@ -0,0 +1,247 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ ~ /*
+ ~ *
+ ~ * * Licensed to the Apache Software Foundation (ASF) under one or more
+ ~ * * contributor license agreements. See the NOTICE file distributed with
+ ~ * * this work for additional information regarding copyright ownership.
+ ~ * * The ASF licenses this file to You under the Apache License, Version 2.0
+ ~ * * (the "License"); you may not use this file except in compliance with
+ ~ * * the License. You may obtain a copy of the License at
+ ~ * * <p/>
+ ~ * * http://www.apache.org/licenses/LICENSE-2.0
+ ~ * * <p/>
+ ~ * * Unless required by applicable law or agreed to in writing, software
+ ~ * * distributed under the License is distributed on an "AS IS" BASIS,
+ ~ * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ * * See the License for the specific language governing permissions and
+ ~ * * limitations under the License.
+ ~ *
+ ~ */
+ -->
+
+<application>
+ <type>HdfsAuditLogApplication</type>
+ <name>Hdfs Audit Log Monitoring Application</name>
+ <version>0.5.0-incubating</version>
+ <appClass>org.apache.eagle.security.auditlog.HdfsAuditLogApplication</appClass>
+ <viewPath>/apps/example</viewPath>
+ <configuration>
+ <property>
+ <name>dataSourceConfig.topic</name>
+ <displayName>dataSourceConfig.topic</displayName>
+ <value>hdfs_audit_log</value>
+ <description>data source topic</description>
+ </property>
+ <property>
+ <name>dataSourceConfig.zkConnection</name>
+ <displayName>dataSourceConfig.zkConnection</displayName>
+ <value>server.eagle.apache.org</value>
+ <description>zk connection</description>
+ </property>
+ <property>
+ <name>dataSourceConfig.zkConnectionTimeoutMS</name>
+ <displayName>dataSourceConfig.zkConnectionTimeoutMS</displayName>
+ <value>15000</value>
+ <description>zk connection timeout in milliseconds</description>
+ </property>
+ <property>
+ <name>dataSourceConfig.fetchSize</name>
+ <displayName>dataSourceConfig.fetchSize</displayName>
+ <value>1048586</value>
+ <description>kafka fetch size</description>
+ </property>
+ <property>
+ <name>dataSourceConfig.transactionZKServers</name>
+ <displayName>dataSourceConfig.transactionZKServers</displayName>
+ <value>server.eagle.apache.org</value>
+ <description>zookeeper server for offset transaction</description>
+ </property>
+ <property>
+ <name>dataSourceConfig.transactionZKPort</name>
+ <displayName>dataSourceConfig.transactionZKPort</displayName>
+ <value>2181</value>
+ <description>zookeeper server port for offset transaction</description>
+ </property>
+ <property>
+ <name>dataSourceConfig.transactionZKRoot</name>
+ <displayName>dataSourceConfig.transactionZKRoot</displayName>
+ <value>/consumers</value>
+ <description>offset transaction root</description>
+ </property>
+ <property>
+ <name>dataSourceConfig.consumerGroupId</name>
+ <displayName>dataSourceConfig.consumerGroupId</displayName>
+ <value>eagle.hdfsaudit.consumer</value>
+ <description>kafka consumer group Id</description>
+ </property>
+ <property>
+ <name>dataSourceConfig.transactionStateUpdateMS</name>
+ <displayName>dataSourceConfig.transactionStateUpdateMS</displayName>
+ <value>2000</value>
+ <description>zk upate</description>
+ </property>
+ <property>
+ <name>dataSourceConfig.schemeCls</name>
+ <displayName>dataSourceConfig.schemeCls</displayName>
+ <value>storm.kafka.StringScheme</value>
+ <description>scheme class</description>
+ </property>
+ <property>
+ <name>dataSourceConfig.transactionZKPort</name>
+ <displayName>dataSourceConfig.transactionZKPort</displayName>
+ <value>2181</value>
+ <description>zookeeper server port for offset transaction</description>
+ </property>
+ <property>
+ <name>dataSourceConfig.transactionZKPort</name>
+ <displayName>dataSourceConfig.transactionZKPort</displayName>
+ <value>2181</value>
+ <description>zookeeper server port for offset transaction</description>
+ </property>
+ <property>
+ <name>topology.numOfSpoutTasks</name>
+ <displayName>topology.numOfSpoutTasks</displayName>
+ <value>2</value>
+ <description>number of spout tasks</description>
+ </property>
+ <property>
+ <name>topology.numOfParserTasks</name>
+ <displayName>topology.numOfParserTasks</displayName>
+ <value>2</value>
+ <description>number of parser tasks</description>
+ </property>
+ <property>
+ <name>topology.numOfJoinTasks</name>
+ <displayName>topology.numOfJoinTasks</displayName>
+ <value>2</value>
+ <description>number of external join tasks</description>
+ </property>
+ <property>
+ <name>topology.numOfSinkTasks</name>
+ <displayName>topology.numOfSinkTasks</displayName>
+ <value>2</value>
+ <description>number of sink tasks</description>
+ </property>
+ <property>
+ <name>eagleProps.dataJoinPollIntervalSec</name>
+ <displayName>eagleProps.dataJoinPollIntervalSec</displayName>
+ <value>30</value>
+ <description>interval in seconds for polling</description>
+ </property>
+ <property>
+ <name>eagleProps.eagleService.host</name>
+ <displayName>eagleProps.eagleService.host</displayName>
+ <value>localhost</value>
+ <description>eagle service host</description>
+ </property>
+ <property>
+ <name>eagleProps.eagleService.port</name>
+ <displayName>eagleProps.eagleService.port</displayName>
+ <value>8080</value>
+ <description>eagle service port</description>
+ </property>
+ <property>
+ <name>eagleProps.eagleService.username</name>
+ <displayName>eagleProps.eagleService.username</displayName>
+ <value>admin</value>
+ <description>eagle service username</description>
+ </property>
+ <property>
+ <name>eagleProps.eagleService.password</name>
+ <displayName>eagleProps.eagleService.password</displayName>
+ <value>secret</value>
+ <description>eagle service password</description>
+ </property>
+ <property>
+ <name>dataSinkConfig.topic</name>
+ <displayName>dataSinkConfig.topic</displayName>
+ <value>hdfs_audit_log_parsed</value>
+ <description>topic for kafka data sink</description>
+ </property>
+ <property>
+ <name>dataSinkConfig.brokerList</name>
+ <displayName>dataSinkConfig.brokerList</displayName>
+ <value>sandbox.hortonworks.com:6667</value>
+ <description>kafka broker list</description>
+ </property>
+ <property>
+ <name>dataSinkConfig.serializerClass</name>
+ <displayName>dataSinkConfig.serializerClass</displayName>
+ <value>kafka.serializer.StringEncoder</value>
+ <description>serializer class Kafka message value</description>
+ </property>
+ <property>
+ <name>dataSinkConfig.keySerializerClass</name>
+ <displayName>dataSinkConfig.keySerializerClass</displayName>
+ <value>kafka.serializer.StringEncoder</value>
+ <description>serializer class Kafka message key</description>
+ </property>
+
+ <!-- properties for hdfs file system access and attribute resolver-->
+ <property>
+ <name>fs.defaultFS</name>
+ <displayName>fs.defaultFS</displayName>
+ <value>hdfs://server.eagle.apache.org:8020</value>
+ <description>hdfs endpoint</description>
+ </property>
+ </configuration>
+ <streams>
+ <stream>
+ <streamId>hdfs_audit_log_stream</streamId>
+ <description>Hdfs Audit Log Stream</description>
+ <validate>true</validate>
+ <timeseries>true</timeseries>
+ <columns>
+ <column>
+ <name>action</name>
+ <type>string</type>
+ </column>
+ <column>
+ <name>host</name>
+ <type>string</type>
+ </column>
+ <column>
+ <name>status</name>
+ <type>string</type>
+ </column>
+ <column>
+ <name>timestamp</name>
+ <type>long</type>
+ </column>
+ </columns>
+ </stream>
+ </streams>
+ <docs>
+ <install>
+# Step 1: Create source kafka topic named "${site}_example_source_topic"
+
+./bin/kafka-topics.sh --create --topic example_source_topic --replication-factor 1 --replication 1
+
+# Step 2: Set up data collector to flow data into kafka topic in
+
+./bin/logstash -f log_collector.conf
+
+## `log_collector.conf` sample as following:
+
+input {
+
+}
+filter {
+
+}
+output{
+
+}
+
+# Step 3: start application
+
+# Step 4: monitor with featured portal or alert with policies
+ </install>
+ <uninstall>
+# Step 1: stop and uninstall application
+# Step 2: delete kafka topic named "${site}_example_source_topic"
+# Step 3: stop logstash
+ </uninstall>
+ </docs>
+</application>
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/services/org.apache.eagle.app.spi.ApplicationProvider
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/services/org.apache.eagle.app.spi.ApplicationProvider b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/services/org.apache.eagle.app.spi.ApplicationProvider
new file mode 100644
index 0000000..42cf62b
--- /dev/null
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/services/org.apache.eagle.app.spi.ApplicationProvider
@@ -0,0 +1,37 @@
+#
+# /*
+# *
+# * * Licensed to the Apache Software Foundation (ASF) under one or more
+# * * contributor license agreements. See the NOTICE file distributed with
+# * * this work for additional information regarding copyright ownership.
+# * * The ASF licenses this file to You under the Apache License, Version 2.0
+# * * (the "License"); you may not use this file except in compliance with
+# * * the License. You may obtain a copy of the License at
+# * * <p/>
+# * * http://www.apache.org/licenses/LICENSE-2.0
+# * * <p/>
+# * * Unless required by applicable law or agreed to in writing, software
+# * * distributed under the License is distributed on an "AS IS" BASIS,
+# * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# * * See the License for the specific language governing permissions and
+# * * limitations under the License.
+# *
+# */
+#
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+org.apache.eagle.security.auditlog.HdfsAuditLogAppProvider
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/application.conf
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/application.conf b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/application.conf
index 3c3572e..efa6467 100644
--- a/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/application.conf
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/application.conf
@@ -14,56 +14,42 @@
# limitations under the License.
{
- "envContextConfig" : {
- "env" : "storm",
- "mode" : "local",
- "topologyName" : "auditLogProcessTopology",
- "stormConfigFile" : "security-auditlog-storm.yaml",
- "parallelismConfig" : {
- "kafkaMsgConsumer" : 1,
- "hdfsAuditLogAlertExecutor*" : 1
- }
+ "appId" : "HdfsAuditLogApp",
+ "mode" : "LOCAL",
+ "siteId" : "testsite",
+ "topology" : {
+ "numOfTotalWorkers" : 2,
+ "numOfSpoutTasks" : 2,
+ "numOfParserTasks" : 2,
+ "numOfSensitivityJoinTasks" : 2,
+ "numOfIPZoneJoinTasks" : 2,
+ "numOfSinkTasks" : 2
},
"dataSourceConfig": {
- "topic" : "sandbox_hdfs_audit_log",
- "zkConnection" : "sandbox.hortonworks.com:2181",
+ "topic" : "hdfs_audit_log",
+ "zkConnection" : "server.eagle.apache.org:2181",
"zkConnectionTimeoutMS" : 15000,
"consumerGroupId" : "EagleConsumer",
"fetchSize" : 1048586,
- "deserializerClass" : "org.apache.eagle.security.auditlog.HdfsAuditLogKafkaDeserializer",
- "transactionZKServers" : "sandbox.hortonworks.com",
+ "transactionZKServers" : "server.eagle.apache.org",
"transactionZKPort" : 2181,
"transactionZKRoot" : "/consumers",
- "transactionStateUpdateMS" : 2000
- },
- "alertExecutorConfigs" : {
- "hdfsAuditLogAlertExecutor" : {
- "parallelism" : 1,
- "partitioner" : "org.apache.eagle.policy.DefaultPolicyPartitioner"
- "needValidation" : "true"
- }
+ "transactionStateUpdateMS" : 2000,
+ "schemeCls" : "storm.kafka.StringScheme"
},
"eagleProps" : {
- "site" : "sandbox",
- "application": "hdfsAuditLog",
"dataJoinPollIntervalSec" : 30,
- "mailHost" : "mailHost.com",
- "mailSmtpPort":"25",
- "mailDebug" : "true",
- "balancePartitionEnabled" : true,
- #"partitionRefreshIntervalInMin" : 60,
- #"kafkaStatisticRangeInMin" : 60,
"eagleService": {
"host": "localhost",
- "port": 38080,
+ "port": 9090,
"username": "admin",
"password": "secret"
- },
- "readHdfsUserCommandPatternFrom" : "file"
+ }
},
- "dynamicConfigSource" : {
- "enabled" : true,
- "initDelayMillis" : 0,
- "delayMillis" : 30000
+ "dataSinkConfig": {
+ "topic" : "hdfs_audit_log_parsed",
+ "brokerList" : "server.eagle.apache.org:6667",
+ "serializerClass" : "kafka.serializer.StringEncoder",
+ "keySerializerClass" : "kafka.serializer.StringEncoder"
}
-}
\ No newline at end of file
+}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/log4j.properties b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/log4j.properties
index 4a22987..e442c46 100644
--- a/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/log4j.properties
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/log4j.properties
@@ -13,17 +13,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-log4j.rootLogger=DEBUG, stdout, DRFA
+log4j.rootLogger=INFO, stdout, DRFA
eagle.log.dir=./logs
eagle.log.file=eagle.log
-#log4j.logger.org.apache.eagle.security.auditlog.IPZoneDataJoinExecutor=DEBUG
-#log4j.logger.org.apache.eagle.security.auditlog.FileSensitivityDataJoinExecutor=DEBUG
-log4j.logger.org.apache.eagle.security.auditlog.HdfsUserCommandReassembler=DEBUG
-#log4j.logger.org.apache.eagle.executor.AlertExecutor=DEBUG
-# standard output
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %p [%t] %c{2}[%L]: %m%n
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/security-auditlog-storm.yaml
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/security-auditlog-storm.yaml b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/security-auditlog-storm.yaml
deleted file mode 100644
index a68a323..0000000
--- a/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/security-auditlog-storm.yaml
+++ /dev/null
@@ -1,18 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-topology.workers: 1
-topology.acker.executors: 1
-topology.tasks: 1
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/test/java/org/apache/eagle/security/auditlog/TestUserCommandReassembler.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/test/java/org/apache/eagle/security/auditlog/TestUserCommandReassembler.java b/eagle-security/eagle-security-hdfs-auditlog/src/test/java/org/apache/eagle/security/auditlog/TestUserCommandReassembler.java
index a19e9b6..753eb41 100644
--- a/eagle-security/eagle-security-hdfs-auditlog/src/test/java/org/apache/eagle/security/auditlog/TestUserCommandReassembler.java
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/test/java/org/apache/eagle/security/auditlog/TestUserCommandReassembler.java
@@ -22,6 +22,8 @@ package org.apache.eagle.security.auditlog;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.eagle.datastream.Collector;
+import org.apache.eagle.security.hdfs.HDFSAuditLogObject;
+import org.apache.eagle.security.hdfs.HDFSAuditLogParser;
import org.junit.Assert;
import org.junit.Test;
import scala.Tuple2;
@@ -32,9 +34,18 @@ import java.util.*;
* Created by yonzhang on 11/24/15.
*/
public class TestUserCommandReassembler {
- private Map<String, Object> parseEvent(String log){
- HdfsAuditLogKafkaDeserializer deserializer = new HdfsAuditLogKafkaDeserializer(null);
- return (Map<String, Object>)deserializer.deserialize(log.getBytes());
+ private Map parseEvent(String log) throws Exception{
+ HDFSAuditLogParser deserializer = new HDFSAuditLogParser();
+ HDFSAuditLogObject entity = deserializer.parse(log);
+ Map<String, Object> map = new TreeMap<String, Object>();
+ map.put("src", entity.src);
+ map.put("dst", entity.dst);
+ map.put("host", entity.host);
+ map.put("timestamp", entity.timestamp);
+ map.put("allowed", entity.allowed);
+ map.put("user", entity.user);
+ map.put("cmd", entity.cmd);
+ return map;
}
/**
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/test/resources/securityAuditLog
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/test/resources/securityAuditLog b/eagle-security/eagle-security-hdfs-auditlog/src/test/resources/securityAuditLog
new file mode 100644
index 0000000..361304d
--- /dev/null
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/test/resources/securityAuditLog
@@ -0,0 +1,17 @@
+2015-04-24 12:49:16,145 INFO FSNamesystem.audit: allowed=true ugi=hdfs (auth:SIMPLE) ip=/10.0.2.15 cmd=getfileinfo src=/tmp dst=null perm=null proto=rpc
+2015-04-24 12:49:16,192 INFO FSNamesystem.audit: allowed=true ugi=hdfs (auth:SIMPLE) ip=/10.0.2.15 cmd=getfileinfo src=/user/ambari-qa dst=null perm=null proto=rpc
+2015-04-24 12:49:20,518 INFO FSNamesystem.audit: allowed=true ugi=hdfs (auth:SIMPLE) ip=/10.0.2.15 cmd=getfileinfo src=/tmp dst=null perm=null proto=rpc
+2015-04-24 12:49:20,570 INFO FSNamesystem.audit: allowed=true ugi=hdfs (auth:SIMPLE) ip=/10.0.2.15 cmd=getfileinfo src=/user/ambari-qa dst=null perm=null proto=rpc
+2015-04-24 12:49:20,587 INFO FSNamesystem.audit: allowed=true ugi=hdfs (auth:SIMPLE) ip=/10.0.2.15 cmd=getfileinfo src=/ dst=null perm=null proto=rpc
+2015-04-24 12:49:20,664 INFO FSNamesystem.audit: allowed=true ugi=hdfs (auth:SIMPLE) ip=/10.0.2.15 cmd=mkdirs src=/tmp dst=null perm=hdfs:hdfs:rwxr-xr-x proto=rpc
+2015-04-24 12:49:20,677 INFO FSNamesystem.audit: allowed=true ugi=hdfs (auth:SIMPLE) ip=/10.0.2.15 cmd=getfileinfo src=/user dst=null perm=null proto=rpc
+2015-04-24 12:49:20,686 INFO FSNamesystem.audit: allowed=true ugi=hdfs (auth:SIMPLE) ip=/10.0.2.15 cmd=mkdirs src=/user/ambari-qa dst=null perm=hdfs:hdfs:rwxr-xr-x proto=rpc
+2015-04-24 12:49:24,828 INFO FSNamesystem.audit: allowed=true ugi=hdfs (auth:SIMPLE) ip=/10.0.2.15 cmd=getfileinfo src=/tmp dst=null perm=null proto=rpc
+2015-04-24 12:49:24,915 INFO FSNamesystem.audit: allowed=true ugi=hdfs (auth:SIMPLE) ip=/10.0.2.15 cmd=setPermission src=/tmp dst=null perm=hdfs:hdfs:rwxrwxrwx proto=rpc
+2015-04-24 12:49:29,375 INFO FSNamesystem.audit: allowed=true ugi=hdfs (auth:SIMPLE) ip=/10.0.2.15 cmd=getfileinfo src=/user/ambari-qa dst=null perm=null proto=rpc
+2015-04-24 12:49:29,453 INFO FSNamesystem.audit: allowed=true ugi=hdfs (auth:SIMPLE) ip=/10.0.2.15 cmd=setPermission src=/user/ambari-qa dst=null perm=hdfs:hdfs:rwxrwx--- proto=rpc
+2015-04-24 12:49:33,542 INFO FSNamesystem.audit: allowed=true ugi=hdfs (auth:SIMPLE) ip=/10.0.2.15 cmd=getfileinfo src=/tmp dst=null perm=null proto=rpc
+2015-04-24 12:49:37,844 INFO FSNamesystem.audit: allowed=true ugi=hdfs (auth:SIMPLE) ip=/10.0.2.15 cmd=getfileinfo src=/user/ambari-qa dst=null perm=null proto=rpc
+2015-04-24 12:49:37,929 INFO FSNamesystem.audit: allowed=true ugi=hdfs (auth:SIMPLE) ip=/10.0.2.15 cmd=setOwner src=/user/ambari-qa dst=null perm=ambari-qa:hdfs:rwxrwx--- proto=rpc
+2015-04-24 12:51:31,798 INFO FSNamesystem.audit: allowed=true ugi=hdfs (auth:SIMPLE) ip=/10.0.2.15 cmd=getfileinfo src=/apps/hbase/data dst=null perm=null proto=rpc
+2015-04-24 12:51:31,863 INFO FSNamesystem.audit: allowed=true ugi=hdfs (auth:SIMPLE) ip=/10.0.2.15 cmd=getfileinfo src=/apps/hbase/staging dst=null perm=null proto=rpc
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/resolver/HDFSCommandResolver.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/resolver/HDFSCommandResolver.java b/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/resolver/HDFSCommandResolver.java
index b2a2671..a0a230a 100644
--- a/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/resolver/HDFSCommandResolver.java
+++ b/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/resolver/HDFSCommandResolver.java
@@ -16,6 +16,8 @@
*/
package org.apache.eagle.service.security.hdfs.resolver;
+import com.typesafe.config.Config;
+import org.apache.eagle.metadata.service.ApplicationEntityService;
import org.apache.eagle.service.alert.resolver.AttributeResolvable;
import org.apache.eagle.service.alert.resolver.AttributeResolveException;
import org.apache.eagle.service.alert.resolver.BadAttributeResolveRequestException;
@@ -33,6 +35,10 @@ import java.util.regex.Pattern;
public class HDFSCommandResolver implements AttributeResolvable<GenericAttributeResolveRequest,String> {
private final static Logger LOG = LoggerFactory.getLogger(HDFSCommandResolver.class);
+ public HDFSCommandResolver(ApplicationEntityService entityService, Config eagleServerConfig){
+
+ }
+
private final static String [] cmdStrs = {"open", "create", "append", "delete", "listfileinfo", "rename",
"mkdirs", "listStatus", "setReplication", "setOwner", "setPermission", "setTimes", "setXAttr", "removeXAttr", "getXAttrs",
"contentSummary", "createEncryptionZone", "checkAccess"};
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/resolver/HDFSResourceResolver.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/resolver/HDFSResourceResolver.java b/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/resolver/HDFSResourceResolver.java
index 4326c93..370d9a3 100644
--- a/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/resolver/HDFSResourceResolver.java
+++ b/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/resolver/HDFSResourceResolver.java
@@ -18,16 +18,19 @@ package org.apache.eagle.service.security.hdfs.resolver;
import java.util.ArrayList;
import java.util.List;
+import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.typesafe.config.Config;
-import org.apache.eagle.security.resolver.MetadataAccessConfigRepo;
+import org.apache.eagle.metadata.model.ApplicationEntity;
+import org.apache.eagle.metadata.service.ApplicationEntityService;
import org.apache.eagle.service.alert.resolver.AttributeResolvable;
import org.apache.eagle.service.alert.resolver.AttributeResolveException;
import org.apache.eagle.service.alert.resolver.BadAttributeResolveRequestException;
import org.apache.eagle.service.alert.resolver.GenericAttributeResolveRequest;
+import org.apache.eagle.service.security.hdfs.rest.HDFSResourceWebResource;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.slf4j.Logger;
@@ -45,6 +48,12 @@ import org.apache.eagle.service.security.hdfs.HDFSResourceConstants;
*/
public class HDFSResourceResolver implements AttributeResolvable<GenericAttributeResolveRequest,String> {
private final static Logger LOG = LoggerFactory.getLogger(HDFSResourceResolver.class);
+ private ApplicationEntityService entityService;
+
+ public HDFSResourceResolver(ApplicationEntityService entityService, Config eagleServerConfig){
+ this.entityService = entityService;
+ }
+
/**
* HDFS Resource Resolve API
*
@@ -54,10 +63,9 @@ public class HDFSResourceResolver implements AttributeResolvable<GenericAttribu
public List<String> resolve(GenericAttributeResolveRequest request)
throws AttributeResolveException {
List<String> result = new ArrayList<>();
- MetadataAccessConfigRepo repo = new MetadataAccessConfigRepo();
try {
- Config config = repo.getConfig(HDFSResourceConstants.HDFS_APPLICATION, request.getSite().trim());
- Configuration conf = repo.convert(config);
+ Map<String, Object> config = getAppConfig(request.getSite(), HDFSResourceWebResource.HDFS_APPLICATION);
+ Configuration conf = convert(config);
HDFSFileSystem fileSystem = new HDFSFileSystem(conf);
String query = request.getQuery().trim();
List<FileStatus> fileStatuses = null;
@@ -86,6 +94,19 @@ public class HDFSResourceResolver implements AttributeResolvable<GenericAttribu
}
}
+ private Map<String, Object> getAppConfig(String site, String appType){
+ ApplicationEntity entity = entityService.getBySiteIdAndAppType(site, appType);
+ return entity.getConfiguration();
+ }
+
+ private Configuration convert(Map<String, Object> originalConfig) throws Exception {
+ Configuration config = new Configuration();
+ for (Map.Entry<String, Object> entry : originalConfig.entrySet()) {
+ config.set(entry.getKey().toString(), entry.getValue().toString());
+ }
+ return config;
+ }
+
/**
* Validate the Passed Request Object
* It should have Site Id and File Path
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/resolver/HDFSSensitivityTypeResolver.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/resolver/HDFSSensitivityTypeResolver.java b/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/resolver/HDFSSensitivityTypeResolver.java
index f1d8808..5f3ec54 100644
--- a/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/resolver/HDFSSensitivityTypeResolver.java
+++ b/eagle-security/eagle-security-hdfs-web/src/main/java/org/apache/eagle/service/security/hdfs/resolver/HDFSSensitivityTypeResolver.java
@@ -16,6 +16,13 @@
*/
package org.apache.eagle.service.security.hdfs.resolver;
+import com.google.inject.Inject;
+import com.typesafe.config.Config;
+import org.apache.eagle.metadata.service.ApplicationEntityService;
+import org.apache.eagle.security.service.HBaseSensitivityEntity;
+import org.apache.eagle.security.service.HdfsSensitivityEntity;
+import org.apache.eagle.security.service.ISecurityMetadataDAO;
+import org.apache.eagle.security.service.MetadataDaoFactory;
import org.apache.eagle.service.alert.resolver.AttributeResolvable;
import org.apache.eagle.service.alert.resolver.AttributeResolveException;
import org.apache.eagle.service.alert.resolver.BadAttributeResolveRequestException;
@@ -24,16 +31,17 @@ import org.apache.eagle.service.security.hdfs.HDFSResourceSensitivityService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
import java.util.regex.Pattern;
public class HDFSSensitivityTypeResolver implements AttributeResolvable<GenericAttributeResolveRequest,String> {
private final static Logger LOG = LoggerFactory.getLogger(HDFSSensitivityTypeResolver.class);
- private HDFSResourceSensitivityService dao = new HDFSResourceSensitivityService();
- private Map<String, Map<String, String>> maps = dao.getAllFileSensitivityMap();
+ private ISecurityMetadataDAO dao;
+ @Inject
+ public HDFSSensitivityTypeResolver(ApplicationEntityService entityService, Config eagleServerConfig){
+ dao = MetadataDaoFactory.getMetadataDAO(eagleServerConfig);
+ }
private final static String SENSITIVETYPE_ATTRIBUTE_RESOLVE_FORMAT_HINT = "Sensitive type should be composed of a-z, A-Z, 0-9 or -";
@@ -41,6 +49,7 @@ public class HDFSSensitivityTypeResolver implements AttributeResolvable<GenericA
String query = request.getQuery().trim();
String site = request.getSite().trim();
List<String> res = new ArrayList<>();
+ Map<String, Map<String, String>> maps = getAllSensitivities();
Map<String, String> map = maps.get(site);
if(map == null) {
@@ -72,4 +81,16 @@ public class HDFSSensitivityTypeResolver implements AttributeResolvable<GenericA
public Class<GenericAttributeResolveRequest> getRequestClass() {
return GenericAttributeResolveRequest.class;
}
+
+ private Map<String, Map<String, String>> getAllSensitivities(){
+ Map<String, Map<String, String>> all = new HashMap<>();
+ Collection<HdfsSensitivityEntity> entities = dao.listHdfsSensitivities();
+ for(HdfsSensitivityEntity entity : entities){
+ if(!all.containsKey(entity.getSite())){
+ all.put(entity.getSite(), new HashMap<>());
+ }
+ all.get(entity.getSite()).put(entity.getFiledir(), entity.getSensitivityType());
+ }
+ return all;
+ }
}
[3/3] incubator-eagle git commit: hdfs, hbase,
mapr app conversion Author: Yong Zhang
Close: #334
Posted by yo...@apache.org.
hdfs,hbase,mapr app conversion
Author: Yong Zhang <yo...@gmail.com>
Close: #334
Project: http://git-wip-us.apache.org/repos/asf/incubator-eagle/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-eagle/commit/27513f7b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-eagle/tree/27513f7b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-eagle/diff/27513f7b
Branch: refs/heads/develop
Commit: 27513f7b7cf4d3c34523f811ecf4454611012368
Parents: 43d229e
Author: yonzhang <yo...@gmail.com>
Authored: Thu Aug 11 20:44:18 2016 -0700
Committer: yonzhang <yo...@gmail.com>
Committed: Thu Aug 11 20:44:18 2016 -0700
----------------------------------------------------------------------
.../eagle-alert-service/pom.xml | 5 +
.../resolver/AttributeResolveResource.java | 17 +-
.../resolver/AttributeResolverFactory.java | 20 +-
.../src/main/resources/log4j.properties | 4 +-
.../src/main/resources/log4j.properties | 4 +-
.../src/main/resources/log4j.properties | 4 +-
.../security/service/HdfsSensitivityEntity.java | 50 ++++
.../service/IMetadataServiceClient.java | 6 +-
.../eagle/security/service/IPZoneEntity.java | 41 +++
.../security/service/ISecurityMetadataDAO.java | 4 +
.../security/service/InMemMetadataDaoImpl.java | 30 +++
.../service/JDBCSecurityMetadataDAO.java | 196 +++++++++++----
.../security/service/MetadataDaoFactory.java | 46 ++++
.../service/MetadataServiceClientImpl.java | 38 ++-
.../SecurityExternalMetadataResource.java | 82 ++++++
.../hbase/HBaseAuditLogAppProvider.java | 17 --
.../hbase/HbaseAuditLogKafkaDeserializer.java | 62 -----
.../HbaseResourceSensitivityDataJoinBolt.java | 4 +-
.../HbaseResourceSensitivityPollingJob.java | 2 +-
....security.hbase.HBaseAuditLogAppProvider.xml | 18 +-
.../src/main/resources/application.conf | 13 +-
.../hbase/HbaseMetadataBrowseWebResource.java | 10 +-
.../hbase/HbaseSensitivityResourceService.java | 80 ------
.../hbase/SensitivityMetadataResource.java | 58 -----
.../hbase/dao/HbaseMetadataDAOImpl.java | 1 -
.../hbase/resolver/HbaseActionResolver.java | 5 +
.../hbase/resolver/HbaseMetadataResolver.java | 28 ++-
.../hbase/resolver/HbaseRequestResolver.java | 7 +
.../resolver/HbaseSensitivityTypeResolver.java | 26 +-
.../eagle-security-hdfs-auditlog/pom.xml | 5 +
.../run_auditlog_topology.sh | 17 --
.../run_hostname_lookkup.sh | 20 --
.../run_message_producer.sh | 19 --
.../run_message_producer_in_assembly.sh | 16 --
.../assembly/eagle-dam-auditlog-assembly.xml | 63 -----
.../AbstractHdfsAuditLogApplication.java | 117 +++++++++
.../auditlog/FileSensitivityDataJoinBolt.java | 111 +++++++++
.../FileSensitivityDataJoinExecutor.java | 87 -------
.../auditlog/HDFSAuditLogAppProvider.java | 34 +++
.../auditlog/HdfsAuditLogApplication.java | 47 ++++
.../auditlog/HdfsAuditLogKafkaDeserializer.java | 70 ------
.../HdfsAuditLogMonitoringTopology.java | 40 ---
.../auditlog/HdfsAuditLogParserBolt.java | 77 ++++++
.../auditlog/HdfsAuditLogProcessorMain.java | 114 ---------
.../security/auditlog/IPZoneDataJoinBolt.java | 87 +++++++
.../auditlog/IPZoneDataJoinExecutor.java | 67 -----
.../timer/FileSensitivityPollingJob.java | 31 +--
.../auditlog/timer/IPZonePollingJob.java | 23 +-
...ecurity.auditlog.HdfsAuditLogAppProvider.xml | 247 +++++++++++++++++++
...org.apache.eagle.app.spi.ApplicationProvider | 37 +++
.../src/main/resources/application.conf | 60 ++---
.../src/main/resources/log4j.properties | 7 +-
.../main/resources/security-auditlog-storm.yaml | 18 --
.../auditlog/TestUserCommandReassembler.java | 17 +-
.../src/test/resources/securityAuditLog | 17 ++
.../hdfs/resolver/HDFSCommandResolver.java | 6 +
.../hdfs/resolver/HDFSResourceResolver.java | 29 ++-
.../resolver/HDFSSensitivityTypeResolver.java | 31 ++-
.../hdfs/rest/HDFSResourceWebResource.java | 36 ++-
.../auditlog/MapRFSAuditLogApplication.java | 44 ++++
.../auditlog/MapRFSAuditLogParserBolt.java | 76 ++++++
.../auditlog/MapRFSAuditLogProcessorMain.java | 115 ---------
.../src/main/resources/log4j.properties | 2 +-
.../src/main/conf/configuration.yml | 21 --
.../src/main/conf/configuration.yml~HEAD | 21 ++
.../conf/configuration.yml~upstream_develop | 21 ++
eagle-server/pom.xml | 12 +
.../src/main/resources/application.conf | 2 +-
68 files changed, 1657 insertions(+), 1085 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-core/eagle-alert-parent/eagle-alert-service/pom.xml
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-alert-parent/eagle-alert-service/pom.xml b/eagle-core/eagle-alert-parent/eagle-alert-service/pom.xml
index 2f27723..e2a9222 100644
--- a/eagle-core/eagle-alert-parent/eagle-alert-service/pom.xml
+++ b/eagle-core/eagle-alert-parent/eagle-alert-service/pom.xml
@@ -54,6 +54,11 @@
<groupId>org.wso2.siddhi</groupId>
<artifactId>siddhi-extension-string</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.eagle</groupId>
+ <artifactId>eagle-metadata-base</artifactId>
+ <version>${project.version}</version>
+ </dependency>
</dependencies>
</project>
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-core/eagle-alert-parent/eagle-alert-service/src/main/java/org/apache/eagle/service/alert/resolver/AttributeResolveResource.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-alert-parent/eagle-alert-service/src/main/java/org/apache/eagle/service/alert/resolver/AttributeResolveResource.java b/eagle-core/eagle-alert-parent/eagle-alert-service/src/main/java/org/apache/eagle/service/alert/resolver/AttributeResolveResource.java
index bcffec1..20207ab 100644
--- a/eagle-core/eagle-alert-parent/eagle-alert-service/src/main/java/org/apache/eagle/service/alert/resolver/AttributeResolveResource.java
+++ b/eagle-core/eagle-alert-parent/eagle-alert-service/src/main/java/org/apache/eagle/service/alert/resolver/AttributeResolveResource.java
@@ -16,9 +16,11 @@
*/
package org.apache.eagle.service.alert.resolver;
+import com.google.inject.Inject;
+import com.typesafe.config.Config;
import org.apache.eagle.log.entity.GenericServiceAPIResponseEntity;
-import org.apache.eagle.service.common.EagleExceptionWrapper;
import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.eagle.metadata.service.ApplicationEntityService;
import javax.ws.rs.*;
import java.io.InputStream;
@@ -29,6 +31,15 @@ import java.util.List;
*/
@Path("/stream")
public class AttributeResolveResource {
+ private ApplicationEntityService entityService;
+ private Config eagleServerConfig;
+
+ @Inject
+ public AttributeResolveResource(ApplicationEntityService entityService, Config eagleServerConfig){
+ this.entityService = entityService;
+ this.eagleServerConfig = eagleServerConfig;
+ }
+
@POST
@Path("attributeresolve")
@Consumes({"application/json"})
@@ -38,7 +49,7 @@ public class AttributeResolveResource {
GenericServiceAPIResponseEntity response = new GenericServiceAPIResponseEntity();
try {
if(resolver == null) throw new AttributeResolveException("resolver is null");
- AttributeResolvable resolvable = AttributeResolverFactory.getAttributeResolver(resolver);
+ AttributeResolvable resolvable = AttributeResolverFactory.getAttributeResolver(resolver, entityService, eagleServerConfig);
ObjectMapper objectMapper = new ObjectMapper();
Class<?> resolveRequestClass = resolvable.getRequestClass();
if(resolveRequestClass == null) throw new AttributeResolveException("Request class is null for resolver "+resolver);
@@ -63,7 +74,7 @@ public class AttributeResolveResource {
GenericServiceAPIResponseEntity response = new GenericServiceAPIResponseEntity();
try {
if(resolver == null) throw new AttributeResolveException("resolver is null");
- AttributeResolvable resolvable = AttributeResolverFactory.getAttributeResolver(resolver);
+ AttributeResolvable resolvable = AttributeResolverFactory.getAttributeResolver(resolver, entityService, eagleServerConfig);
Class<?> resolveRequestClass = resolvable.getRequestClass();
if(resolveRequestClass == null) throw new AttributeResolveException("Request class is null for resolver "+resolver);
GenericAttributeResolveRequest resolveRequest = new GenericAttributeResolveRequest(query,site);
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-core/eagle-alert-parent/eagle-alert-service/src/main/java/org/apache/eagle/service/alert/resolver/AttributeResolverFactory.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-alert-parent/eagle-alert-service/src/main/java/org/apache/eagle/service/alert/resolver/AttributeResolverFactory.java b/eagle-core/eagle-alert-parent/eagle-alert-service/src/main/java/org/apache/eagle/service/alert/resolver/AttributeResolverFactory.java
index 4015fb7..a184aa0 100644
--- a/eagle-core/eagle-alert-parent/eagle-alert-service/src/main/java/org/apache/eagle/service/alert/resolver/AttributeResolverFactory.java
+++ b/eagle-core/eagle-alert-parent/eagle-alert-service/src/main/java/org/apache/eagle/service/alert/resolver/AttributeResolverFactory.java
@@ -16,6 +16,9 @@
*/
package org.apache.eagle.service.alert.resolver;
+import com.typesafe.config.Config;
+import org.apache.eagle.metadata.service.ApplicationEntityService;
+
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
@@ -25,26 +28,27 @@ import java.util.Map;
* @since 6/16/15
*/
public final class AttributeResolverFactory {
- private final static Map<String,AttributeResolvable> fieldResolvableCache = Collections.synchronizedMap(new HashMap<String,AttributeResolvable>());
- public static AttributeResolvable getAttributeResolver(String fieldResolverName) throws AttributeResolveException {
+ private final static Map<String,AttributeResolvable> fieldResolvableCache = Collections.synchronizedMap(new HashMap<>());
+ public static AttributeResolvable getAttributeResolver(String fieldResolverName,
+ ApplicationEntityService entityService,
+ Config eagleServerConfig) throws AttributeResolveException {
AttributeResolvable instance;
if(fieldResolvableCache.containsKey(fieldResolverName)){
instance = fieldResolvableCache.get(fieldResolverName);
} else {
try {
- instance = (AttributeResolvable) Class.forName(fieldResolverName).newInstance();
+ instance = (AttributeResolvable) Class.forName(fieldResolverName).
+ getConstructor(ApplicationEntityService.class, Config.class).
+ newInstance(entityService, eagleServerConfig);
fieldResolvableCache.put(fieldResolverName, instance);
} catch (ClassNotFoundException e) {
throw new AttributeResolveException("Attribute Resolver in type of "+fieldResolverName+" is not found",e);
} catch (InstantiationException | IllegalAccessException e) {
throw new AttributeResolveException(e);
+ } catch (Exception ex){
+ throw new AttributeResolveException(ex);
}
}
return instance;
}
-
- public static List resolve(String resolver, GenericAttributeResolveRequest request) throws AttributeResolveException {
- AttributeResolvable fieldResolver = getAttributeResolver(resolver);
- return fieldResolver.resolve(request);
- }
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-core/eagle-data-process/eagle-stream-process-api/src/main/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-data-process/eagle-stream-process-api/src/main/resources/log4j.properties b/eagle-core/eagle-data-process/eagle-stream-process-api/src/main/resources/log4j.properties
index ae58e6b..80e0aba 100644
--- a/eagle-core/eagle-data-process/eagle-stream-process-api/src/main/resources/log4j.properties
+++ b/eagle-core/eagle-data-process/eagle-stream-process-api/src/main/resources/log4j.properties
@@ -19,8 +19,8 @@ eagle.log.dir=./logs
eagle.log.file=eagle.log
-#log4j.logger.org.apache.eagle.security.auditlog.IPZoneDataJoinExecutor=DEBUG
-#log4j.logger.org.apache.eagle.security.auditlog.FileSensitivityDataJoinExecutor=DEBUG
+#log4j.logger.org.apache.eagle.security.auditlog.IPZoneDataJoinBolt=DEBUG
+#log4j.logger.org.apache.eagle.security.auditlog.FileSensitivityDataJoinBolt=DEBUG
log4j.logger.org.apache.eagle.security.auditlog.HdfsUserCommandReassembler=DEBUG
#log4j.logger.org.apache.eagle.executor.AlertExecutor=DEBUG
# standard output
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-examples/eagle-topology-example/src/main/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/eagle-examples/eagle-topology-example/src/main/resources/log4j.properties b/eagle-examples/eagle-topology-example/src/main/resources/log4j.properties
index 07f8402..ff98cd3 100644
--- a/eagle-examples/eagle-topology-example/src/main/resources/log4j.properties
+++ b/eagle-examples/eagle-topology-example/src/main/resources/log4j.properties
@@ -19,8 +19,8 @@ eagle.log.dir=./logs
eagle.log.file=eagle.log
-#log4j.logger.org.apache.eagle.security.auditlog.IPZoneDataJoinExecutor=DEBUG
-#log4j.logger.org.apache.eagle.security.auditlog.FileSensitivityDataJoinExecutor=DEBUG
+#log4j.logger.org.apache.eagle.security.auditlog.IPZoneDataJoinBolt=DEBUG
+#log4j.logger.org.apache.eagle.security.auditlog.FileSensitivityDataJoinBolt=DEBUG
log4j.logger.org.apache.eagle.security.auditlog.HdfsUserCommandReassembler=DEBUG
#log4j.logger.org.apache.eagle.executor.AlertExecutor=DEBUG
# standard output
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-metric-collection/src/main/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-metric-collection/src/main/resources/log4j.properties b/eagle-security/eagle-metric-collection/src/main/resources/log4j.properties
index 8a0919a..6c31ebc 100644
--- a/eagle-security/eagle-metric-collection/src/main/resources/log4j.properties
+++ b/eagle-security/eagle-metric-collection/src/main/resources/log4j.properties
@@ -19,8 +19,8 @@ eagle.log.dir=./logs
eagle.log.file=eagle.log
-#log4j.logger.eagle.security.auditlog.IPZoneDataJoinExecutor=DEBUG
-#log4j.logger.eagle.security.auditlog.FileSensitivityDataJoinExecutor=DEBUG
+#log4j.logger.eagle.security.auditlog.IPZoneDataJoinBolt=DEBUG
+#log4j.logger.eagle.security.auditlog.FileSensitivityDataJoinBolt=DEBUG
#log4j.logger.eagle.executor.AlertExecutor=DEBUG
# standard output
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/HdfsSensitivityEntity.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/HdfsSensitivityEntity.java b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/HdfsSensitivityEntity.java
new file mode 100644
index 0000000..fe82fd4
--- /dev/null
+++ b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/HdfsSensitivityEntity.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.eagle.security.service;
+
+/**
+ * Since 8/10/16.
+ */
+public class HdfsSensitivityEntity {
+ String site;
+ String filedir;
+ String sensitivityType;
+
+ public String getSite() {
+ return site;
+ }
+
+ public void setSite(String site) {
+ this.site = site;
+ }
+
+ public String getFiledir() {
+ return filedir;
+ }
+
+ public void setFiledir(String filedir) {
+ this.filedir = filedir;
+ }
+
+ public String getSensitivityType() {
+ return sensitivityType;
+ }
+
+ public void setSensitivityType(String sensitivityType) {
+ this.sensitivityType = sensitivityType;
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/IMetadataServiceClient.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/IMetadataServiceClient.java b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/IMetadataServiceClient.java
index e3c9f95..79db47e 100644
--- a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/IMetadataServiceClient.java
+++ b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/IMetadataServiceClient.java
@@ -27,6 +27,10 @@ import java.util.Collection;
* service stub to get metadata from remote metadata service
*/
public interface IMetadataServiceClient extends Closeable, Serializable {
- Collection<HBaseSensitivityEntity> listHBaseSensitivies();
+ Collection<HBaseSensitivityEntity> listHBaseSensitivities();
OpResult addHBaseSensitivity(Collection<HBaseSensitivityEntity> h);
+ Collection<HdfsSensitivityEntity> listHdfsSensitivities();
+ OpResult addHdfsSensitivity(Collection<HdfsSensitivityEntity> h);
+ Collection<IPZoneEntity> listIPZones();
+ OpResult addIPZone(Collection<IPZoneEntity> h);
}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/IPZoneEntity.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/IPZoneEntity.java b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/IPZoneEntity.java
new file mode 100644
index 0000000..1c53e14
--- /dev/null
+++ b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/IPZoneEntity.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.eagle.security.service;
+
+/**
+ * Since 8/10/16.
+ */
+public class IPZoneEntity {
+ String iphost;
+ String securityZone;
+
+ public String getIphost() {
+ return iphost;
+ }
+
+ public void setIphost(String iphost) {
+ this.iphost = iphost;
+ }
+
+ public String getSecurityZone() {
+ return securityZone;
+ }
+
+ public void setSecurityZone(String securityZone) {
+ this.securityZone = securityZone;
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/ISecurityMetadataDAO.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/ISecurityMetadataDAO.java b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/ISecurityMetadataDAO.java
index 534fb38..6158bac 100644
--- a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/ISecurityMetadataDAO.java
+++ b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/ISecurityMetadataDAO.java
@@ -25,4 +25,8 @@ import java.util.Collection;
public interface ISecurityMetadataDAO {
Collection<HBaseSensitivityEntity> listHBaseSensitivies();
OpResult addHBaseSensitivity(Collection<HBaseSensitivityEntity> h);
+ Collection<HdfsSensitivityEntity> listHdfsSensitivities();
+ OpResult addHdfsSensitivity(Collection<HdfsSensitivityEntity> h);
+ Collection<IPZoneEntity> listIPZones();
+ OpResult addIPZone(Collection<IPZoneEntity> h);
}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/InMemMetadataDaoImpl.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/InMemMetadataDaoImpl.java b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/InMemMetadataDaoImpl.java
index 27aeb57..1869538 100644
--- a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/InMemMetadataDaoImpl.java
+++ b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/InMemMetadataDaoImpl.java
@@ -37,6 +37,9 @@ public class InMemMetadataDaoImpl implements ISecurityMetadataDAO {
private static final Logger LOG = LoggerFactory.getLogger(InMemMetadataDaoImpl.class);
private Map<Pair<String, String>, HBaseSensitivityEntity> hBaseSensitivityEntities = new HashMap<>();
+ private Map<Pair<String, String>, HdfsSensitivityEntity> hdfsSensitivityEntities = new HashMap<>();
+ private Map<String, IPZoneEntity> ipZones = new HashMap<>();
+
@Inject
public InMemMetadataDaoImpl() {
@@ -55,4 +58,31 @@ public class InMemMetadataDaoImpl implements ISecurityMetadataDAO {
}
return new OpResult();
}
+
+ @Override
+ public Collection<HdfsSensitivityEntity> listHdfsSensitivities() {
+ return hdfsSensitivityEntities.values();
+ }
+
+ @Override
+ public OpResult addHdfsSensitivity(Collection<HdfsSensitivityEntity> h) {
+ for(HdfsSensitivityEntity e : h){
+ Pair p = new ImmutablePair<>(e.getSite(), e.getFiledir());
+ hdfsSensitivityEntities.put(p, e);
+ }
+ return new OpResult();
+ }
+
+ @Override
+ public Collection<IPZoneEntity> listIPZones() {
+ return ipZones.values();
+ }
+
+ @Override
+ public OpResult addIPZone(Collection<IPZoneEntity> h) {
+ for(IPZoneEntity e : h){
+ ipZones.put(e.getIphost(), e);
+ }
+ return new OpResult();
+ }
}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/JDBCSecurityMetadataDAO.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/JDBCSecurityMetadataDAO.java b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/JDBCSecurityMetadataDAO.java
index 3dcf50e..679f3d0 100644
--- a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/JDBCSecurityMetadataDAO.java
+++ b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/JDBCSecurityMetadataDAO.java
@@ -19,17 +19,15 @@
package org.apache.eagle.security.service;
-import com.google.inject.Inject;
-import org.apache.eagle.metadata.store.jdbc.JDBCMetadataQueryService;
+import com.typesafe.config.Config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import javax.sql.DataSource;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
+import java.sql.*;
+import java.util.ArrayList;
import java.util.Collection;
-import java.util.Collections;
+import java.util.function.BiFunction;
+import java.util.function.Function;
/**
* Since 8/8/16.
@@ -37,67 +35,71 @@ import java.util.Collections;
public class JDBCSecurityMetadataDAO implements ISecurityMetadataDAO {
private static final Logger LOG = LoggerFactory.getLogger(JDBCSecurityMetadataDAO.class);
+ private Config config;
/**
* composite primary key: site and hbase_resource
*/
- private final String TABLE_DDL_STATEMENT = "create table if not exists hbase_sensitivity_entity (site varchar(20), hbase_resource varchar(100), sensitivity_type varchar(20), primary key (site, hbase_resource));";
- private final String QUERY_ALL_STATEMENT = "SELECT site, hbase_resource, sensitivity_type FROM hbase_sensitivity_entity";
- private final String INSERT_STATEMENT = "INSERT INTO hbase_sensitivity_entity (site, hbase_resource, sensitivity_type) VALUES (?, ?, ?)";
+ private final String HBASE_QUERY_ALL_STATEMENT = "SELECT site, hbase_resource, sensitivity_type FROM hbase_sensitivity_entity";
+ private final String HBASE_INSERT_STATEMENT = "INSERT INTO hbase_sensitivity_entity (site, hbase_resource, sensitivity_type) VALUES (?, ?, ?)";
- private DataSource dataSource;
- private JDBCMetadataQueryService queryService;
+ private final String HDFS_QUERY_ALL_STATEMENT = "SELECT site, filedir, sensitivity_type FROM hdfs_sensitivity_entity";
+ private final String HDFS_INSERT_STATEMENT = "INSERT INTO hdfs_sensitivity_entity (site, filedir, sensitivity_type) VALUES (?, ?, ?)";
- /**
- * Inject datasource
- *
- * @param dataSource
- */
- @Inject
- public JDBCSecurityMetadataDAO(DataSource dataSource, JDBCMetadataQueryService queryService) {
- this.dataSource = dataSource;
- this.queryService = queryService;
- try {
- queryService.execute(TABLE_DDL_STATEMENT);
- } catch (SQLException e) {
- LOG.error("Unable to create table hbase_sensitivity_entity",e);
- throw new IllegalStateException("Unable to create table hbase_sensitivity_entity",e);
- }
+ private final String IPZONE_QUERY_ALL_STATEMENT = "SELECT iphost, security_zone FROM ip_securityzone";
+ private final String IPZONE_INSERT_STATEMENT = "INSERT INTO ip_securityzone (iphost, security_zone) VALUES (?, ?, ?)";
+
+ private final String HIVE_QUERY_ALL_STATEMENT = "SELECT site, hive_resource, sensitivity_type FROM hive_sensitivity_entity";
+ private final String HIVE_INSERT_STATEMENT = "INSERT INTO hive_sensitivity_entity (site, hive_resource, sensitivity_type) VALUES (?, ?, ?)";
+
+ // get connection url from config
+ public JDBCSecurityMetadataDAO(Config config){
+ this.config = config;
}
- @Override
- public Collection<HBaseSensitivityEntity> listHBaseSensitivies() {
+ private Collection listEntities(String query, Function<ResultSet, Object> selectFun){
+ Connection connection = null;
+ PreparedStatement statement = null;
+ Collection ret = new ArrayList<>();
+ ResultSet rs = null;
try {
- return queryService.query(QUERY_ALL_STATEMENT,(rs -> {
- HBaseSensitivityEntity entity = new HBaseSensitivityEntity();
- entity.setSite(rs.getString(1));
- entity.setHbaseResource(rs.getString(2));
- entity.setSensitivityType(rs.getString(3));
- return entity;
- }));
- } catch (SQLException e) {
- LOG.error("Error in querying all from hbase_sensitivity_entity table", e);
+ connection = getJdbcConnection();
+ statement = connection.prepareStatement(query);
+ rs = statement.executeQuery();
+ while (rs.next()) {
+ ret.add(selectFun.apply(rs));
+ }
+ }catch(Exception e) {
+ LOG.error("error in querying table with query {}", query, e);
+ }finally{
+ try{
+ if(rs != null)
+ rs.close();
+ if(statement != null)
+ statement.close();
+ if(connection != null)
+ connection.close();
+ }catch(Exception ex){
+ LOG.error("error in closing database resources", ex);
+ }
}
- return Collections.emptyList();
+ return ret;
}
- @Override
- public OpResult addHBaseSensitivity(Collection<HBaseSensitivityEntity> h) {
+ private OpResult addEntities(String query, Collection h, BiFunction<Object, PreparedStatement, PreparedStatement> setFunc){
Connection connection = null;
PreparedStatement statement = null;
try{
- connection = dataSource.getConnection();
- statement = connection.prepareStatement(INSERT_STATEMENT);
+ connection = getJdbcConnection();
+ statement = connection.prepareStatement(query);
connection.setAutoCommit(false);
- for(HBaseSensitivityEntity entity : h){
- statement.setString(1, entity.getSite());
- statement.setString(2, entity.getHbaseResource());
- statement.setString(3, entity.getSensitivityType());
+ for(Object entity : h){
+ setFunc.apply(entity, statement);
statement.addBatch();
}
statement.executeBatch();
connection.commit();
}catch(Exception ex){
- LOG.error("error in querying hbase_sensitivity_entity table", ex);
+ LOG.error("error in querying hdfs_sensitivity_entity table", ex);
}finally {
try {
if (statement != null)
@@ -108,6 +110,102 @@ public class JDBCSecurityMetadataDAO implements ISecurityMetadataDAO {
LOG.error("error in closing database resources", ex);
}
}
- return null;
+ return new OpResult();
+ }
+
+ @Override
+ public Collection<HBaseSensitivityEntity> listHBaseSensitivies() {
+ return listEntities(HBASE_QUERY_ALL_STATEMENT, rs -> {
+ try {
+ HBaseSensitivityEntity entity = new HBaseSensitivityEntity();
+ entity.setSite(rs.getString(1));
+ entity.setHbaseResource(rs.getString(2));
+ entity.setSensitivityType(rs.getString(3));
+ return entity;
+ }catch(Exception ex){ throw new IllegalStateException(ex);}
+ });
+ }
+
+
+ @Override
+ public OpResult addHBaseSensitivity(Collection<HBaseSensitivityEntity> h) {
+ return addEntities(HBASE_INSERT_STATEMENT, h, (entity, statement) -> {
+ HBaseSensitivityEntity e = (HBaseSensitivityEntity)entity;
+ try {
+ statement.setString(1, e.getSite());
+ statement.setString(2, e.getHbaseResource());
+ statement.setString(3, e.getSensitivityType());
+ }catch(Exception ex){
+ throw new IllegalStateException(ex);
+ }
+ return statement;
+ });
+ }
+
+ @Override
+ public Collection<HdfsSensitivityEntity> listHdfsSensitivities() {
+ return listEntities(HDFS_QUERY_ALL_STATEMENT, rs -> {
+ try {
+ HdfsSensitivityEntity entity = new HdfsSensitivityEntity();
+ entity.setSite(rs.getString(1));
+ entity.setFiledir(rs.getString(2));
+ entity.setSensitivityType(rs.getString(3));
+ return entity;
+ }catch(Exception ex){ throw new IllegalStateException(ex);}
+ });
+ }
+
+ @Override
+ public OpResult addHdfsSensitivity(Collection<HdfsSensitivityEntity> h) {
+ return addEntities(HDFS_INSERT_STATEMENT, h, (entity, statement) -> {
+ HdfsSensitivityEntity e = (HdfsSensitivityEntity)entity;
+ try {
+ statement.setString(1, e.getSite());
+ statement.setString(2, e.getFiledir());
+ statement.setString(3, e.getSensitivityType());
+ }catch(Exception ex){
+ throw new IllegalStateException(ex);
+ }
+ return statement;
+ });
+ }
+
+ @Override
+ public Collection<IPZoneEntity> listIPZones() {
+ return listEntities(IPZONE_QUERY_ALL_STATEMENT, rs -> {
+ try {
+ IPZoneEntity entity = new IPZoneEntity();
+ entity.setIphost(rs.getString(1));
+ entity.setSecurityZone(rs.getString(2));
+ return entity;
+ }catch(Exception ex){ throw new IllegalStateException(ex);}
+ });
+ }
+
+ @Override
+ public OpResult addIPZone(Collection<IPZoneEntity> h) {
+ return addEntities(IPZONE_INSERT_STATEMENT, h, (entity, statement) -> {
+ IPZoneEntity e = (IPZoneEntity)entity;
+ try {
+ statement.setString(1, e.getIphost());
+ statement.setString(2, e.getSecurityZone());
+ }catch(Exception ex){
+ throw new IllegalStateException(ex);
+ }
+ return statement;
+ });
+ }
+
+ private Connection getJdbcConnection() throws Exception {
+ Connection connection;
+ try {
+ connection = DriverManager.getConnection(config.getString("metadata.jdbc.url"),
+ config.getString("metadata.jdbc.username"),
+ config.getString("metadata.jdbc.password"));
+ } catch (Exception e) {
+ LOG.error("error get connection for {}", config.getString("metadata.jdbc.url"), e);
+ throw e;
+ }
+ return connection;
}
}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/MetadataDaoFactory.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/MetadataDaoFactory.java b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/MetadataDaoFactory.java
new file mode 100644
index 0000000..91240a4
--- /dev/null
+++ b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/MetadataDaoFactory.java
@@ -0,0 +1,46 @@
+/*
+ *
+ * *
+ * * * Licensed to the Apache Software Foundation (ASF) under one or more
+ * * * contributor license agreements. See the NOTICE file distributed with
+ * * * this work for additional information regarding copyright ownership.
+ * * * The ASF licenses this file to You under the Apache License, Version 2.0
+ * * * (the "License"); you may not use this file except in compliance with
+ * * * the License. You may obtain a copy of the License at
+ * * * <p/>
+ * * * http://www.apache.org/licenses/LICENSE-2.0
+ * * * <p/>
+ * * * Unless required by applicable law or agreed to in writing, software
+ * * * distributed under the License is distributed on an "AS IS" BASIS,
+ * * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * * * See the License for the specific language governing permissions and
+ * * * limitations under the License.
+ * *
+ *
+ */
+
+package org.apache.eagle.security.service;
+
+import com.typesafe.config.Config;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @since Apr 12, 2016
+ *
+ */
+public class MetadataDaoFactory {
+ private static final Logger LOG = LoggerFactory.getLogger(MetadataDaoFactory.class);
+
+ public static ISecurityMetadataDAO getMetadataDAO(Config eagleServerConfig) {
+ String storeCls = eagleServerConfig.getString("metadata.store");
+
+ ISecurityMetadataDAO dao = null;
+ if (storeCls.equalsIgnoreCase("org.apache.eagle.metadata.service.memory.MemoryMetadataStore")) {
+ dao = new InMemMetadataDaoImpl();
+ } else {
+ dao = new JDBCSecurityMetadataDAO(eagleServerConfig);
+ }
+ return dao;
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/MetadataServiceClientImpl.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/MetadataServiceClientImpl.java b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/MetadataServiceClientImpl.java
index 676bde5..cac4630 100644
--- a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/MetadataServiceClientImpl.java
+++ b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/MetadataServiceClientImpl.java
@@ -40,10 +40,14 @@ public class MetadataServiceClientImpl implements IMetadataServiceClient {
private static final Logger LOG = LoggerFactory.getLogger(MetadataServiceClientImpl.class);
- private static final String METADATA_LIST_HBASE_SENSITIVITY_PATH = "/metadata/sensitivity/hbase";
- private static final String METADATA_ADD_HBASE_SENSITIVITY_PATH = "/metadata/sensitivity/hbase";
+ private static final String METADATA_LIST_HBASE_SENSITIVITY_PATH = "/metadata/security/hbaseSensitivity";
+ private static final String METADATA_ADD_HBASE_SENSITIVITY_PATH = "/metadata/security/hbaseSensitivity";
- private static final String METADATA_CLEAR_PATH = "/metadata/clear";
+ private static final String METADATA_LIST_HDFS_SENSITIVITY_PATH = "/metadata/security/hdfsSensitivity";
+ private static final String METADATA_ADD_HDFS_SENSITIVITY_PATH = "/metadata/security/hdfsSensitivity";
+
+ private static final String METADATA_LIST_IPZONE_PATH = "/metadata/security/ipzone";
+ private static final String METADATA_ADD_IPZONE_PATH = "/metadata/security/ipzone";
private static final String EAGLE_CORRELATION_CONTEXT = "metadataService.context";
private static final String EAGLE_CORRELATION_SERVICE_PORT = "metadataService.port";
@@ -100,7 +104,7 @@ public class MetadataServiceClientImpl implements IMetadataServiceClient {
}
@Override
- public Collection<HBaseSensitivityEntity> listHBaseSensitivies() {
+ public Collection<HBaseSensitivityEntity> listHBaseSensitivities() {
return list(METADATA_LIST_HBASE_SENSITIVITY_PATH, new GenericType<List<HBaseSensitivityEntity>>() {
});
}
@@ -111,4 +115,30 @@ public class MetadataServiceClientImpl implements IMetadataServiceClient {
r.accept(MediaType.APPLICATION_JSON_TYPE).type(MediaType.APPLICATION_JSON).post(h);
return new OpResult();
}
+
+ @Override
+ public Collection<HdfsSensitivityEntity> listHdfsSensitivities() {
+ return list(METADATA_LIST_HDFS_SENSITIVITY_PATH, new GenericType<List<HdfsSensitivityEntity>>() {
+ });
+ }
+
+ @Override
+ public OpResult addHdfsSensitivity(Collection<HdfsSensitivityEntity> h) {
+ WebResource r = client.resource(basePath + METADATA_ADD_HDFS_SENSITIVITY_PATH);
+ r.accept(MediaType.APPLICATION_JSON_TYPE).type(MediaType.APPLICATION_JSON).post(h);
+ return new OpResult();
+ }
+
+ @Override
+ public Collection<IPZoneEntity> listIPZones(){
+ return list(METADATA_LIST_IPZONE_PATH, new GenericType<List<IPZoneEntity>>() {
+ });
+ }
+
+ @Override
+ public OpResult addIPZone(Collection<IPZoneEntity> h){
+ WebResource r = client.resource(basePath + METADATA_ADD_IPZONE_PATH);
+ r.accept(MediaType.APPLICATION_JSON_TYPE).type(MediaType.APPLICATION_JSON).post(h);
+ return new OpResult();
+ }
}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/SecurityExternalMetadataResource.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/SecurityExternalMetadataResource.java b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/SecurityExternalMetadataResource.java
new file mode 100644
index 0000000..404c10d
--- /dev/null
+++ b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/service/SecurityExternalMetadataResource.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.eagle.security.service;
+
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
+import com.typesafe.config.Config;
+import org.apache.eagle.metadata.service.ApplicationEntityService;
+
+import javax.ws.rs.*;
+import java.util.Collection;
+
+/**
+ * Since 6/10/16.
+ */
+@Path("/metadata/security")
+@Singleton
+public class SecurityExternalMetadataResource {
+ private ApplicationEntityService entityService;
+ private ISecurityMetadataDAO dao;
+ @Inject
+ public SecurityExternalMetadataResource(ApplicationEntityService entityService, Config eagleServerConfig){
+ this.entityService = entityService;
+ dao = MetadataDaoFactory.getMetadataDAO(eagleServerConfig);
+ }
+
+ @Path("/hbaseSensitivity")
+ @GET
+ @Produces("application/json")
+ public Collection<HBaseSensitivityEntity> getHBaseSensitivites(@QueryParam("site") String site){
+ return dao.listHBaseSensitivies();
+ }
+
+ @Path("/hbaseSensitivity")
+ @POST
+ @Consumes("application/json")
+ public void addHBaseSensitivities(Collection<HBaseSensitivityEntity> list){
+ dao.addHBaseSensitivity(list);
+ }
+
+ @Path("/hdfsSensitivity")
+ @GET
+ @Produces("application/json")
+ public Collection<HdfsSensitivityEntity> getHdfsSensitivities(@QueryParam("site") String site){
+ return dao.listHdfsSensitivities();
+ }
+
+ @Path("/hdfsSensitivity")
+ @POST
+ @Consumes("application/json")
+ public void addHdfsSensitivities(Collection<HdfsSensitivityEntity> list){
+ dao.addHdfsSensitivity(list);
+ }
+
+ @Path("/ipzone")
+ @GET
+ @Produces("application/json")
+ public Collection<IPZoneEntity> getIPZones(){
+ return dao.listIPZones();
+ }
+
+ @Path("/ipzone")
+ @POST
+ @Consumes("application/json")
+ public void addIPZones(Collection<IPZoneEntity> list){
+ dao.addIPZone(list);
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HBaseAuditLogAppProvider.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HBaseAuditLogAppProvider.java b/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HBaseAuditLogAppProvider.java
index dd672a5..2cfee10 100644
--- a/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HBaseAuditLogAppProvider.java
+++ b/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HBaseAuditLogAppProvider.java
@@ -36,21 +36,4 @@ public class HBaseAuditLogAppProvider extends AbstractApplicationProvider<HBaseA
public HBaseAuditLogApplication getApplication() {
return new HBaseAuditLogApplication();
}
-
- @Override
- public void register(ModuleRegistry registry) {
- registry.register(MemoryMetadataStore.class, new AbstractModule() {
- @Override
- protected void configure() {
- bind(ISecurityMetadataDAO.class).to(InMemMetadataDaoImpl.class);
- }
- });
-
- registry.register(JDBCMetadataStore.class, new AbstractModule() {
- @Override
- protected void configure() {
- bind(ISecurityMetadataDAO.class).to(JDBCSecurityMetadataDAO.class);
- }
- });
- }
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseAuditLogKafkaDeserializer.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseAuditLogKafkaDeserializer.java b/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseAuditLogKafkaDeserializer.java
deleted file mode 100644
index 5b37519..0000000
--- a/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseAuditLogKafkaDeserializer.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-package org.apache.eagle.security.hbase;
-
-
-import org.apache.eagle.dataproc.impl.storm.kafka.SpoutKafkaMessageDeserializer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Map;
-import java.util.Properties;
-import java.util.TreeMap;
-
-
-public class HbaseAuditLogKafkaDeserializer implements SpoutKafkaMessageDeserializer {
- private static Logger LOG = LoggerFactory.getLogger(HbaseAuditLogKafkaDeserializer.class);
- private Properties props;
-
- public HbaseAuditLogKafkaDeserializer(Properties props){
- this.props = props;
- }
-
- @Override
- public Object deserialize(byte[] arg0) {
- String logLine = new String(arg0);
-
- HbaseAuditLogParser parser = new HbaseAuditLogParser();
- try{
- HbaseAuditLogObject entity = parser.parse(logLine);
- if(entity == null) return null;
-
- Map<String, Object> map = new TreeMap<String, Object>();
- map.put("action", entity.action);
- map.put("host", entity.host);
- map.put("status", entity.status);
- map.put("request", entity.request);
- map.put("scope", entity.scope);
- map.put("user", entity.user);
- map.put("timestamp", entity.timestamp);
- return map;
- }catch(Exception ex){
- LOG.error("Failing parse audit log:" + logLine, ex);
- return null;
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseResourceSensitivityDataJoinBolt.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseResourceSensitivityDataJoinBolt.java b/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseResourceSensitivityDataJoinBolt.java
index d8d9d6b..cb2fa38 100644
--- a/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseResourceSensitivityDataJoinBolt.java
+++ b/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseResourceSensitivityDataJoinBolt.java
@@ -47,13 +47,13 @@ public class HbaseResourceSensitivityDataJoinBolt extends BaseRichBolt {
@Override
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
this.collector = collector;
- // start hive resource data polling
+ // start hbase sensitivity data polling
try {
ExternalDataJoiner joiner = new ExternalDataJoiner(
HbaseResourceSensitivityPollingJob.class, config, context.getThisComponentId() + "." + context.getThisTaskIndex());
joiner.start();
} catch(Exception ex){
- LOG.error("Fail to bring up quartz scheduler.", ex);
+ LOG.error("Fail bringing up quartz scheduler.", ex);
throw new IllegalStateException(ex);
}
}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseResourceSensitivityPollingJob.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseResourceSensitivityPollingJob.java b/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseResourceSensitivityPollingJob.java
index 603ed5a..cd479f3 100644
--- a/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseResourceSensitivityPollingJob.java
+++ b/eagle-security/eagle-security-hbase-auditlog/src/main/java/org/apache/eagle/security/hbase/HbaseResourceSensitivityPollingJob.java
@@ -68,6 +68,6 @@ public class HbaseResourceSensitivityPollingJob extends AbstractResourceSensitiv
+ eagleServiceHost + ":" + eagleServicePort);
IMetadataServiceClient client = new MetadataServiceClientImpl(eagleServiceHost, eagleServicePort, "/rest");
- return client.listHBaseSensitivies();
+ return client.listHBaseSensitivities();
}
}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hbase-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.HBaseAuditLogAppProvider.xml
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.HBaseAuditLogAppProvider.xml b/eagle-security/eagle-security-hbase-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.HBaseAuditLogAppProvider.xml
index 04c3925..0225a50 100644
--- a/eagle-security/eagle-security-hbase-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.HBaseAuditLogAppProvider.xml
+++ b/eagle-security/eagle-security-hbase-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.HBaseAuditLogAppProvider.xml
@@ -19,8 +19,8 @@
-->
<application>
- <type>HBaseAuditLogApplication</type>
- <name>HBase Audit Log Monitoring Application</name>
+ <type>HbaseAuditLogApplication</type>
+ <name>Hbase Audit Log Monitoring Application</name>
<version>0.5.0-incubating</version>
<appClass>org.apache.eagle.security.hbase.HBaseAuditLogApplication</appClass>
<viewPath>/apps/example</viewPath>
@@ -28,7 +28,7 @@
<property>
<name>dataSourceConfig.topic</name>
<displayName>dataSourceConfig.topic</displayName>
- <value>sandbox_hbase_audit_log</value>
+ <value>hbase_audit_log</value>
<description>data source topic</description>
</property>
<property>
@@ -154,7 +154,7 @@
<property>
<name>dataSinkConfig.topic</name>
<displayName>dataSinkConfig.topic</displayName>
- <value>sandbox_hbase_audit_log_parsed</value>
+ <value>hbase_audit_log_parsed</value>
<description>topic for kafka data sink</description>
</property>
<property>
@@ -175,12 +175,8 @@
<value>kafka.serializer.StringEncoder</value>
<description>serializer class Kafka message key</description>
</property>
- <property>
- <name>metadata.store</name>
- <displayName>metadata.store</displayName>
- <value>org.apache.eagle.security.service.InMemMetadataDaoImpl</value>
- <description>implementation class for metadata store</description>
- </property>
+
+ <!-- properties for attribute resolver-->
<property>
<name>hbase.zookeeper.quorum</name>
<displayName>hbase.zookeeper.quorum</displayName>
@@ -203,7 +199,7 @@
<streams>
<stream>
<streamId>hbase_audit_log_stream</streamId>
- <description>HBase Audit Log Stream</description>
+ <description>Hbase Audit Log Stream</description>
<validate>true</validate>
<timeseries>true</timeseries>
<columns>
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hbase-auditlog/src/main/resources/application.conf
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-auditlog/src/main/resources/application.conf b/eagle-security/eagle-security-hbase-auditlog/src/main/resources/application.conf
index 5990450..3947cba 100644
--- a/eagle-security/eagle-security-hbase-auditlog/src/main/resources/application.conf
+++ b/eagle-security/eagle-security-hbase-auditlog/src/main/resources/application.conf
@@ -22,15 +22,14 @@
"numOfSpoutTasks" : 2,
"numOfParserTasks" : 2,
"numOfJoinTasks" : 2,
- "numOfSinkTasks" : 2,
- "name" : "sandbox-hbaseAuditLog-topology",
+ "numOfSinkTasks" : 2
},
"dataSourceConfig": {
- "topic" : "sandbox_hbase_audit_log",
- "zkConnection" : "sandbox.hortonworks.com:2181",
+ "topic" : "hbase_audit_log",
+ "zkConnection" : "server.eagle.apache.org:2181",
"zkConnectionTimeoutMS" : 15000,
"fetchSize" : 1048586,
- "transactionZKServers" : "sandbox.hortonworks.com",
+ "transactionZKServers" : "server.eagle.apache.org",
"transactionZKPort" : 2181,
"transactionZKRoot" : "/consumers",
"consumerGroupId" : "eagle.hbaseaudit.consumer",
@@ -47,8 +46,8 @@
}
},
"dataSinkConfig": {
- "topic" : "sandbox_hbase_audit_log_parsed",
- "brokerList" : "sandbox.hortonworks.com:6667",
+ "topic" : "hbase_audit_log_parsed",
+ "brokerList" : "server.eagle.apache.org:6667",
"serializerClass" : "kafka.serializer.StringEncoder",
"keySerializerClass" : "kafka.serializer.StringEncoder"
}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/HbaseMetadataBrowseWebResource.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/HbaseMetadataBrowseWebResource.java b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/HbaseMetadataBrowseWebResource.java
index 441af8f..9b98649 100644
--- a/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/HbaseMetadataBrowseWebResource.java
+++ b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/HbaseMetadataBrowseWebResource.java
@@ -26,6 +26,7 @@ import org.apache.eagle.security.entity.HbaseResourceEntity;
import org.apache.eagle.security.resolver.MetadataAccessConfigRepo;
import org.apache.eagle.security.service.HBaseSensitivityEntity;
import org.apache.eagle.security.service.ISecurityMetadataDAO;
+import org.apache.eagle.security.service.MetadataDaoFactory;
import org.apache.eagle.service.common.EagleExceptionWrapper;
import org.apache.eagle.service.security.hbase.dao.HbaseMetadataDAOImpl;
import org.apache.hadoop.conf.Configuration;
@@ -40,16 +41,15 @@ import java.util.regex.Pattern;
@Path("/hbaseResource")
public class HbaseMetadataBrowseWebResource {
private static Logger LOG = LoggerFactory.getLogger(HbaseMetadataBrowseWebResource.class);
- private MetadataAccessConfigRepo repo = new MetadataAccessConfigRepo();
final public static String HBASE_APPLICATION = "HBaseAuditLogApplication";
private ApplicationEntityService entityService;
private ISecurityMetadataDAO dao;
@Inject
- public HbaseMetadataBrowseWebResource(ApplicationEntityService entityService, ISecurityMetadataDAO metadataDAO){
+ public HbaseMetadataBrowseWebResource(ApplicationEntityService entityService, Config eagleServerConfig){
this.entityService = entityService;
- this.dao = metadataDAO;
+ this.dao = MetadataDaoFactory.getMetadataDAO(eagleServerConfig);
}
private Map<String, Map<String, String>> getAllSensitivities(){
@@ -147,8 +147,8 @@ public class HbaseMetadataBrowseWebResource {
List<String> columns = null;
List<HbaseResourceEntity> values = new ArrayList<>();
try {
- Config config = repo.getConfig(HBASE_APPLICATION, site);
- Configuration conf = repo.convert(config);
+ Map<String, Object> config = getAppConfig(site, HBASE_APPLICATION);
+ Configuration conf = convert(config);
HbaseMetadataDAOImpl dao = new HbaseMetadataDAOImpl(conf);
String tableName = String.format("%s:%s", namespace, table);
columns = dao.getColumnFamilies(tableName);
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/HbaseSensitivityResourceService.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/HbaseSensitivityResourceService.java b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/HbaseSensitivityResourceService.java
deleted file mode 100644
index bc4d42b..0000000
--- a/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/HbaseSensitivityResourceService.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.eagle.service.security.hbase;
-
-
-import org.apache.eagle.log.entity.GenericServiceAPIResponseEntity;
-import org.apache.eagle.security.entity.HbaseResourceSensitivityAPIEntity;
-import org.apache.eagle.service.generic.GenericEntityServiceResource;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class HbaseSensitivityResourceService {
- private static Logger LOG = LoggerFactory.getLogger(HbaseSensitivityResourceService.class);
-
- public Map<String, Map<String, String>> getAllHbaseSensitivityMap(){
- GenericEntityServiceResource resource = new GenericEntityServiceResource();
- /* parameters are: query, startTime, endTime, pageSzie, startRowkey, treeAgg, timeSeries, intervalmin, top, filterIfMissing,
- * parallel, metricName*/
- GenericServiceAPIResponseEntity ret = resource.search("HbaseResourceSensitivityService[]{*}", null, null, Integer.MAX_VALUE, null, false, false, 0L, 0, false,
- 0, null, false);
- List<HbaseResourceSensitivityAPIEntity> list = (List<HbaseResourceSensitivityAPIEntity>) ret.getObj();
- if( list == null )
- return Collections.emptyMap();
- Map<String, Map<String, String>> res = new HashMap<String, Map<String, String>>();
-
- for(HbaseResourceSensitivityAPIEntity entity : list){
- String site = entity.getTags().get("site");
- if(entity.getTags().containsKey("hbaseResource")) {
- if(res.get(site) == null){
- res.put(site, new HashMap<String, String>());
- }
- Map<String, String> resSensitivityMap = res.get(site);
- resSensitivityMap.put(entity.getTags().get("hbaseResource"), entity.getSensitivityType());
- }
- else {
- if(LOG.isDebugEnabled()) {
- LOG.debug("An invalid sensitivity entity is detected" + entity);
- }
- }
- }
- return res;
- }
-
- public Map<String, String> getHbaseSensitivityMap(String site){
- GenericEntityServiceResource resource = new GenericEntityServiceResource();
- String queryFormat = "HbaseResourceSensitivityService[@site=\"%s\"]{*}";
- GenericServiceAPIResponseEntity ret = resource.search(String.format(queryFormat, site), null, null, Integer.MAX_VALUE, null, false, false, 0L, 0, false,
- 0, null, false);
- List<HbaseResourceSensitivityAPIEntity> list = (List<HbaseResourceSensitivityAPIEntity>) ret.getObj();
- if( list == null )
- return Collections.emptyMap();
- Map<String, String> resSensitivityMap = new HashMap<String, String>();
- for(HbaseResourceSensitivityAPIEntity entity : list){
- if(entity.getTags().containsKey("hbaseResource")) {
- resSensitivityMap.put(entity.getTags().get("hbaseResource"), entity.getSensitivityType());
- }
- }
- return resSensitivityMap;
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/SensitivityMetadataResource.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/SensitivityMetadataResource.java b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/SensitivityMetadataResource.java
deleted file mode 100644
index ccbff29..0000000
--- a/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/SensitivityMetadataResource.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- *
- * * Licensed to the Apache Software Foundation (ASF) under one or more
- * * contributor license agreements. See the NOTICE file distributed with
- * * this work for additional information regarding copyright ownership.
- * * The ASF licenses this file to You under the Apache License, Version 2.0
- * * (the "License"); you may not use this file except in compliance with
- * * the License. You may obtain a copy of the License at
- * * <p/>
- * * http://www.apache.org/licenses/LICENSE-2.0
- * * <p/>
- * * Unless required by applicable law or agreed to in writing, software
- * * distributed under the License is distributed on an "AS IS" BASIS,
- * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * * See the License for the specific language governing permissions and
- * * limitations under the License.
- *
- */
-package org.apache.eagle.service.security.hbase;
-
-import com.google.inject.Inject;
-import com.google.inject.Singleton;
-import org.apache.eagle.metadata.service.ApplicationEntityService;
-import org.apache.eagle.security.service.HBaseSensitivityEntity;
-import org.apache.eagle.security.service.ISecurityMetadataDAO;
-
-import javax.ws.rs.*;
-import java.util.Collection;
-
-/**
- * Since 6/10/16.
- */
-@Path("/metadata/sensitivity")
-@Singleton
-public class SensitivityMetadataResource {
- private ApplicationEntityService entityService;
- private ISecurityMetadataDAO dao;
-
- @Inject
- public SensitivityMetadataResource(ApplicationEntityService entityService, ISecurityMetadataDAO metadataDAO){
- this.entityService = entityService;
- this.dao = metadataDAO;
- }
-
- @Path("/hbase")
- @GET
- @Produces("application/json")
- public Collection<HBaseSensitivityEntity> getHBaseSensitivites(@QueryParam("site") String site){
- return dao.listHBaseSensitivies();
- }
-
- @Path("/hbase")
- @POST
- @Consumes("application/json")
- public void addHBaseSensitivities(Collection<HBaseSensitivityEntity> list){
- dao.addHBaseSensitivity(list);
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/dao/HbaseMetadataDAOImpl.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/dao/HbaseMetadataDAOImpl.java b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/dao/HbaseMetadataDAOImpl.java
index 5d42ba8..ed52bd7 100644
--- a/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/dao/HbaseMetadataDAOImpl.java
+++ b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/dao/HbaseMetadataDAOImpl.java
@@ -35,7 +35,6 @@ public class HbaseMetadataDAOImpl {
private Configuration hBaseConfiguration;
-
public HbaseMetadataDAOImpl(Configuration config) {
this.hBaseConfiguration = HBaseConfiguration.create();
this.hBaseConfiguration.addResource(config);
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseActionResolver.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseActionResolver.java b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseActionResolver.java
index bd6991a..3baa1da 100644
--- a/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseActionResolver.java
+++ b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseActionResolver.java
@@ -18,6 +18,8 @@
package org.apache.eagle.service.security.hbase.resolver;
+import com.typesafe.config.Config;
+import org.apache.eagle.metadata.service.ApplicationEntityService;
import org.apache.eagle.security.resolver.AbstractCommandResolver;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -26,6 +28,9 @@ import java.util.Arrays;
public class HbaseActionResolver extends AbstractCommandResolver {
+ public HbaseActionResolver(ApplicationEntityService entityService, Config eagleServerConfig){
+
+ }
private final static Logger LOG = LoggerFactory.getLogger(HbaseActionResolver.class);
private final String [] cmdStrs = {"ADMIN", "READ", "WRITE", "CREATE", "EXECUTE"};
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseMetadataResolver.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseMetadataResolver.java b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseMetadataResolver.java
index 7b13926..6dd82e4 100644
--- a/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseMetadataResolver.java
+++ b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseMetadataResolver.java
@@ -18,9 +18,9 @@
package org.apache.eagle.service.security.hbase.resolver;
-
import com.typesafe.config.Config;
-import org.apache.eagle.security.resolver.MetadataAccessConfigRepo;
+import org.apache.eagle.metadata.model.ApplicationEntity;
+import org.apache.eagle.metadata.service.ApplicationEntityService;
import org.apache.eagle.service.alert.resolver.AttributeResolvable;
import org.apache.eagle.service.alert.resolver.AttributeResolveException;
import org.apache.eagle.service.alert.resolver.BadAttributeResolveRequestException;
@@ -32,10 +32,29 @@ import org.apache.hadoop.conf.Configuration;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.Map;
import java.util.regex.Pattern;
public class HbaseMetadataResolver implements AttributeResolvable<GenericAttributeResolveRequest,String> {
+ private ApplicationEntityService entityService;
+ public HbaseMetadataResolver(ApplicationEntityService entityService, Config eagleServerConfig){
+ this.entityService = entityService;
+ }
+
+ private Map<String, Object> getAppConfig(String site, String appType){
+ ApplicationEntity entity = entityService.getBySiteIdAndAppType(site, appType);
+ return entity.getConfiguration();
+ }
+
+ private Configuration convert(Map<String, Object> originalConfig) throws Exception {
+ Configuration config = new Configuration();
+ for (Map.Entry<String, Object> entry : originalConfig.entrySet()) {
+ config.set(entry.getKey().toString(), entry.getValue().toString());
+ }
+ return config;
+ }
+
@Override
public List<String> resolve(GenericAttributeResolveRequest request) throws AttributeResolveException {
String query = request.getQuery().trim();
@@ -43,9 +62,8 @@ public class HbaseMetadataResolver implements AttributeResolvable<GenericAttribu
String[] subResources = query.split(":");
try {
- MetadataAccessConfigRepo repo = new MetadataAccessConfigRepo();
- Config config = repo.getConfig(HbaseMetadataBrowseWebResource.HBASE_APPLICATION, site);
- Configuration conf = repo.convert(config);
+ Map<String, Object> config = getAppConfig(site, HbaseMetadataBrowseWebResource.HBASE_APPLICATION);
+ Configuration conf = convert(config);
HbaseMetadataDAOImpl dao = new HbaseMetadataDAOImpl(conf);
switch (subResources.length) {
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseRequestResolver.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseRequestResolver.java b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseRequestResolver.java
index 35cf10c..8467f40 100644
--- a/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseRequestResolver.java
+++ b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseRequestResolver.java
@@ -18,6 +18,8 @@
package org.apache.eagle.service.security.hbase.resolver;
+import com.typesafe.config.Config;
+import org.apache.eagle.metadata.service.ApplicationEntityService;
import org.apache.eagle.security.resolver.AbstractCommandResolver;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -29,6 +31,11 @@ import java.util.List;
public class HbaseRequestResolver extends AbstractCommandResolver {
private final static Logger LOG = LoggerFactory.getLogger(HbaseRequestResolver.class);
+
+ public HbaseRequestResolver(ApplicationEntityService entityService, Config eagleServerConfig){
+
+ }
+
private final String [] master = {"createTable", "modifyTable", "deleteTable", "truncateTable", "addColumn", "modifyColumn",
"deleteColumn", "enableTable", "disableTable", "disableAclTable", "move", "assign", "unassign",
"regionOffline", "balance", "balanceSwitch", "shutdown", "stopMaster", "snapshot",
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseSensitivityTypeResolver.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseSensitivityTypeResolver.java b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseSensitivityTypeResolver.java
index f53b2f2..9df322b 100644
--- a/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseSensitivityTypeResolver.java
+++ b/eagle-security/eagle-security-hbase-web/src/main/java/org/apache/eagle/service/security/hbase/resolver/HbaseSensitivityTypeResolver.java
@@ -17,22 +17,42 @@
package org.apache.eagle.service.security.hbase.resolver;
+import com.typesafe.config.Config;
+import org.apache.eagle.metadata.service.ApplicationEntityService;
import org.apache.eagle.security.resolver.AbstractSensitivityTypeResolver;
-import org.apache.eagle.service.security.hbase.HbaseSensitivityResourceService;
+import org.apache.eagle.security.service.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.util.Collection;
+import java.util.HashMap;
import java.util.Map;
public class HbaseSensitivityTypeResolver extends AbstractSensitivityTypeResolver {
private final static Logger LOG = LoggerFactory.getLogger(HbaseSensitivityTypeResolver.class);
+ private ISecurityMetadataDAO dao;
+
+ public HbaseSensitivityTypeResolver(ApplicationEntityService entityService, Config eagleServerConfig){
+ // todo I know this is ugly, but push abstraction to later
+ dao = MetadataDaoFactory.getMetadataDAO(eagleServerConfig);
+ }
@Override
public void init() {
- HbaseSensitivityResourceService dao = new HbaseSensitivityResourceService();
- Map<String, Map<String, String>> maps = dao.getAllHbaseSensitivityMap();
+ Map<String, Map<String, String>> maps = getAllSensitivities();
this.setSensitivityMaps(maps);
}
+ private Map<String, Map<String, String>> getAllSensitivities(){
+ Map<String, Map<String, String>> all = new HashMap<>();
+ Collection<HBaseSensitivityEntity> entities = dao.listHBaseSensitivies();
+ for(HBaseSensitivityEntity entity : entities){
+ if(!all.containsKey(entity.getSite())){
+ all.put(entity.getSite(), new HashMap<>());
+ }
+ all.get(entity.getSite()).put(entity.getHbaseResource(), entity.getSensitivityType());
+ }
+ return all;
+ }
}
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/pom.xml
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/pom.xml b/eagle-security/eagle-security-hdfs-auditlog/pom.xml
index bd5e788..18c4908 100644
--- a/eagle-security/eagle-security-hdfs-auditlog/pom.xml
+++ b/eagle-security/eagle-security-hdfs-auditlog/pom.xml
@@ -64,5 +64,10 @@
<version>${project.version}</version>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.eagle</groupId>
+ <artifactId>eagle-app-base</artifactId>
+ <version>${project.version}</version>
+ </dependency>
</dependencies>
</project>
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/run_auditlog_topology.sh
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/run_auditlog_topology.sh b/eagle-security/eagle-security-hdfs-auditlog/run_auditlog_topology.sh
deleted file mode 100755
index 38d8ce9..0000000
--- a/eagle-security/eagle-security-hdfs-auditlog/run_auditlog_topology.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-export JAVA_HOME=$(/usr/libexec/java_home -v 1.7)
-mvn -X exec:java -Dexec.mainClass="eagle.security.auditlog.HdfsAuditLogProcessorMain"
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/run_hostname_lookkup.sh
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/run_hostname_lookkup.sh b/eagle-security/eagle-security-hdfs-auditlog/run_hostname_lookkup.sh
deleted file mode 100755
index fffd8c0..0000000
--- a/eagle-security/eagle-security-hdfs-auditlog/run_hostname_lookkup.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-### refer to http://mojo.codehaus.org/exec-maven-plugin/usage.html
-### java goal is to execute program within maven JVM
-export JAVA_HOME=$(/usr/libexec/java_home -v 1.7)
-###mvn -X exec:java -Dexec.args="-input /Users/user1/Downloads/hdfs-audit.log.2015-01-22-22 -maxNum 10000000" -PhostIdentifier
-mvn -X exec:java -Dexec.mainClass="eagle.app.security.dataproc.util.AuditLogIP2HostIdentifier" -Dexec.args="-input /Users/user1/Downloads/hdfs-audit.log.2015-01-22-22 -maxNum 10000000"
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/run_message_producer.sh
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/run_message_producer.sh b/eagle-security/eagle-security-hdfs-auditlog/run_message_producer.sh
deleted file mode 100755
index dbe50f3..0000000
--- a/eagle-security/eagle-security-hdfs-auditlog/run_message_producer.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-export JAVA_HOME=$(/usr/libexec/java_home -v 1.7)
-### mvn -X exec:java -Dexec.args="-input /Users/user1/Downloads/hdfs-audit.log.2015-01-22-22 -maxNum 10000000 -topic hdfs_audit_log" -Pproducer
-mvn exec:java -Dexec.mainClass="eagle.security.auditlog.util.AuditLogKafkaProducer" -Dexec.args="-input /Users/user1/Downloads/hdfs-audit.log.2015-01-22-22 -maxNum 20000 -topic hdfs_audit_log"
-
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/run_message_producer_in_assembly.sh
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/run_message_producer_in_assembly.sh b/eagle-security/eagle-security-hdfs-auditlog/run_message_producer_in_assembly.sh
deleted file mode 100644
index 128926d..0000000
--- a/eagle-security/eagle-security-hdfs-auditlog/run_message_producer_in_assembly.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-java -cp target/eagle-app-security-activity-monitoring-dataproc-1.0.0-jar-with-dependencies.jar:/Users/user1/.m2/repository/org/slf4j/slf4j-api/1.4.3/slf4j-api-1.4.3.jar:/Users/user1/.m2/repository/org/slf4j/slf4j-log4j12/1.4.3/slf4j-log4j12-1.4.3.jar eagle.app.security.dataproc.util.AuditLogKafkaProducer -input /Users/user1/projects/eagle-app-security-activity-monitoring/eagle-app-security-activity-monitoring-dataproc/src/main/resources/auditlog/auditlog.1 --topic hdfs_audit_log
http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/27513f7b/eagle-security/eagle-security-hdfs-auditlog/src/assembly/eagle-dam-auditlog-assembly.xml
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/assembly/eagle-dam-auditlog-assembly.xml b/eagle-security/eagle-security-hdfs-auditlog/src/assembly/eagle-dam-auditlog-assembly.xml
deleted file mode 100644
index 0acf619..0000000
--- a/eagle-security/eagle-security-hdfs-auditlog/src/assembly/eagle-dam-auditlog-assembly.xml
+++ /dev/null
@@ -1,63 +0,0 @@
-<?xml version="1.0"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one or more
- ~ contributor license agreements. See the NOTICE file distributed with
- ~ this work for additional information regarding copyright ownership.
- ~ The ASF licenses this file to You under the Apache License, Version 2.0
- ~ (the "License"); you may not use this file except in compliance with
- ~ the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-<assembly xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
- <id>assembly</id>
- <formats>
- <format>jar</format>
- </formats>
- <includeBaseDirectory>false</includeBaseDirectory>
- <dependencySets>
- <dependencySet>
- <outputDirectory>/</outputDirectory>
- <useProjectArtifact>false</useProjectArtifact>
- <unpack>true</unpack>
- <scope>runtime</scope>
- <unpackOptions>
- <excludes>
- <exclude>**/application.conf</exclude>
- <exclude>**/defaults.yaml</exclude>
- <exclude>**/storm.yaml</exclude>
- <exclude>**/storm.yaml.1</exclude>
- <exclude>**/log4j.properties</exclude>
- </excludes>
- </unpackOptions>
- <excludes>
- <exclude>org.apache.storm:storm-core</exclude>
- <exclude>org.slf4j:slf4j-api</exclude>
- <exclude>org.slf4j:log4j-over-slf4j</exclude>
- <exclude>org.slf4j:slf4j-log4j12</exclude>
- <exclude>log4j:log4j</exclude>
- <exclude>asm:asm</exclude>
- <exclude>org.apache.log4j.wso2:log4j</exclude>
- </excludes>
- </dependencySet>
- </dependencySets>
- <fileSets>
- <fileSet>
- <directory>${project.build.outputDirectory}</directory>
- <outputDirectory>/</outputDirectory>
- <excludes>
- <exclude>application.conf</exclude>
- <exclude>log4j.properties</exclude>
- <exclude>**/storm.yaml.1</exclude>
- </excludes>
- </fileSet>
- </fileSets>
-</assembly>