You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@eagle.apache.org by qi...@apache.org on 2016/10/30 02:28:12 UTC

incubator-eagle git commit: [EAGLE-696] Update HdfsAuditLogApp Configuration

Repository: incubator-eagle
Updated Branches:
  refs/heads/master 4ff963b47 -> 3eacc8283


[EAGLE-696] Update HdfsAuditLogApp Configuration

https://issues.apache.org/jira/browse/EAGLE-696

Author: Zhao, Qingwen <qi...@apache.org>

Closes #581 from qingwen220/EAGLE-696.


Project: http://git-wip-us.apache.org/repos/asf/incubator-eagle/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-eagle/commit/3eacc828
Tree: http://git-wip-us.apache.org/repos/asf/incubator-eagle/tree/3eacc828
Diff: http://git-wip-us.apache.org/repos/asf/incubator-eagle/diff/3eacc828

Branch: refs/heads/master
Commit: 3eacc828313a76afed017d8f3c846bd767ac20d8
Parents: 4ff963b
Author: Zhao, Qingwen <qi...@apache.org>
Authored: Sun Oct 30 10:28:02 2016 +0800
Committer: Zhao, Qingwen <qi...@apache.org>
Committed: Sun Oct 30 10:28:02 2016 +0800

----------------------------------------------------------------------
 ...e.alert.app.AlertUnitTopologyAppProvider.xml |  2 +-
 .../metadata/resource/MetadataResource.java     |  4 +-
 .../metadata/impl/JdbcDatabaseHandler.java      | 34 +++++++--
 .../metadata/impl/JdbcMetadataDaoImpl.java      |  6 +-
 .../impl/storm/kafka/KafkaSpoutProvider.java    | 14 ++--
 .../security/enrich/AbstractDataEnrichLCM.java  |  4 +-
 .../eagle/security/enrich/DataEnrichJob.java    |  2 +-
 .../security/enrich/ExternalDataJoiner.java     |  8 +-
 ....security.hbase.HBaseAuditLogAppProvider.xml | 75 ++++++-------------
 ...ecurity.auditlog.HdfsAuditLogAppProvider.xml | 77 ++++++--------------
 ....auditlog.HiveQueryMonitoringAppProvider.xml | 28 +------
 11 files changed, 101 insertions(+), 153 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/3eacc828/eagle-core/eagle-alert-parent/eagle-alert-app/src/main/resources/META-INF/providers/org.apache.eagle.alert.app.AlertUnitTopologyAppProvider.xml
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-alert-parent/eagle-alert-app/src/main/resources/META-INF/providers/org.apache.eagle.alert.app.AlertUnitTopologyAppProvider.xml b/eagle-core/eagle-alert-parent/eagle-alert-app/src/main/resources/META-INF/providers/org.apache.eagle.alert.app.AlertUnitTopologyAppProvider.xml
index 7b12917..06ccaea 100644
--- a/eagle-core/eagle-alert-parent/eagle-alert-app/src/main/resources/META-INF/providers/org.apache.eagle.alert.app.AlertUnitTopologyAppProvider.xml
+++ b/eagle-core/eagle-alert-parent/eagle-alert-app/src/main/resources/META-INF/providers/org.apache.eagle.alert.app.AlertUnitTopologyAppProvider.xml
@@ -17,7 +17,7 @@
   -->
 
 <application>
-    <type>AlertUnitTopologyApp</type>
+    <type>ALERT_UNIT_TOPOLOGY_APP</type>
     <name>Alert Engine</name>
     <description>Real-time Alert Engine</description>
     <version>0.5.0-incubating</version>

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/3eacc828/eagle-core/eagle-alert-parent/eagle-alert/alert-metadata-parent/alert-metadata-service/src/main/java/org/apache/eagle/service/metadata/resource/MetadataResource.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-alert-parent/eagle-alert/alert-metadata-parent/alert-metadata-service/src/main/java/org/apache/eagle/service/metadata/resource/MetadataResource.java b/eagle-core/eagle-alert-parent/eagle-alert/alert-metadata-parent/alert-metadata-service/src/main/java/org/apache/eagle/service/metadata/resource/MetadataResource.java
index 5110766..49a9611 100644
--- a/eagle-core/eagle-alert-parent/eagle-alert/alert-metadata-parent/alert-metadata-service/src/main/java/org/apache/eagle/service/metadata/resource/MetadataResource.java
+++ b/eagle-core/eagle-alert-parent/eagle-alert/alert-metadata-parent/alert-metadata-service/src/main/java/org/apache/eagle/service/metadata/resource/MetadataResource.java
@@ -368,9 +368,9 @@ public class MetadataResource {
         return results;
     }
 
-    @Path("/publishments/{pubId}")
+    @Path("/publishments/{name}")
     @DELETE
-    public OpResult removePublishment(@PathParam("pubId") String pubId) {
+    public OpResult removePublishment(@PathParam("name") String pubId) {
         return dao.removePublishment(pubId);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/3eacc828/eagle-core/eagle-alert-parent/eagle-alert/alert-metadata-parent/alert-metadata/src/main/java/org/apache/eagle/alert/metadata/impl/JdbcDatabaseHandler.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-alert-parent/eagle-alert/alert-metadata-parent/alert-metadata/src/main/java/org/apache/eagle/alert/metadata/impl/JdbcDatabaseHandler.java b/eagle-core/eagle-alert-parent/eagle-alert/alert-metadata-parent/alert-metadata/src/main/java/org/apache/eagle/alert/metadata/impl/JdbcDatabaseHandler.java
index d9580f1..571cdda 100644
--- a/eagle-core/eagle-alert-parent/eagle-alert/alert-metadata-parent/alert-metadata/src/main/java/org/apache/eagle/alert/metadata/impl/JdbcDatabaseHandler.java
+++ b/eagle-core/eagle-alert-parent/eagle-alert/alert-metadata-parent/alert-metadata/src/main/java/org/apache/eagle/alert/metadata/impl/JdbcDatabaseHandler.java
@@ -42,7 +42,7 @@ public class JdbcDatabaseHandler {
     private static final String DELETE_STATEMENT = "DELETE FROM %s WHERE id=?";
     private static final String UPDATE_STATEMENT = "UPDATE %s set value=? WHERE id=?";
     private static final String QUERY_ALL_STATEMENT = "SELECT value FROM %s";
-    private static final String QUERY_CONDITION_STATEMENT = "SELECT value FROM %s WHERE id=%s";
+    private static final String QUERY_CONDITION_STATEMENT = "SELECT value FROM %s WHERE id=?";
     private static final String QUERY_ORDERBY_STATEMENT = "SELECT value FROM %s ORDER BY id %s";
     private static final String QUERY_ALL_STATEMENT_WITH_SIZE = "SELECT value FROM %s limit %s";
 
@@ -164,9 +164,9 @@ public class JdbcDatabaseHandler {
         return executeSelectStatement(clz, query);
     }
 
-    public <T> T listWithFilter(String key, Class<T> clz) {
+    public <T> T listTop(Class<T> clz, String sortType) {
         String tb = getTableName(clz.getSimpleName());
-        String query = String.format(QUERY_CONDITION_STATEMENT, tb, key);
+        String query = String.format(QUERY_ORDERBY_STATEMENT, tb, sortType);
         List<T> result = executeSelectStatement(clz, query);
         if (result.isEmpty()) {
             return null;
@@ -175,10 +175,32 @@ public class JdbcDatabaseHandler {
         }
     }
 
-    public <T> T listTop(Class<T> clz, String sortType) {
+    public <T> T listWithFilter(String key, Class<T> clz) {
+        return executeSelectByIdStatement(clz, key);
+    }
+
+    public <T> T executeSelectByIdStatement(Class<T> clz, String id) {
         String tb = getTableName(clz.getSimpleName());
-        String query = String.format(QUERY_ORDERBY_STATEMENT, tb, sortType);
-        List<T> result = executeSelectStatement(clz, query);
+        List<T> result = new LinkedList<>();
+        try {
+            PreparedStatement statement = connection.prepareStatement(String.format(QUERY_CONDITION_STATEMENT, tb));
+            statement.setString(1, id);
+            ResultSet rs = statement.executeQuery();
+            while (rs.next()) {
+                //String key = rs.getString(1);
+                String json = rs.getString(1);
+                try {
+                    T obj = mapper.readValue(json, clz);
+                    result.add(obj);
+                } catch (IOException e) {
+                    LOG.error("deserialize config item failed!", e);
+                }
+            }
+            rs.close();
+            statement.close();
+        } catch (SQLException e) {
+            e.printStackTrace();
+        }
         if (result.isEmpty()) {
             return null;
         } else {

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/3eacc828/eagle-core/eagle-alert-parent/eagle-alert/alert-metadata-parent/alert-metadata/src/main/java/org/apache/eagle/alert/metadata/impl/JdbcMetadataDaoImpl.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-alert-parent/eagle-alert/alert-metadata-parent/alert-metadata/src/main/java/org/apache/eagle/alert/metadata/impl/JdbcMetadataDaoImpl.java b/eagle-core/eagle-alert-parent/eagle-alert/alert-metadata-parent/alert-metadata/src/main/java/org/apache/eagle/alert/metadata/impl/JdbcMetadataDaoImpl.java
index adf26d8..af5da51 100644
--- a/eagle-core/eagle-alert-parent/eagle-alert/alert-metadata-parent/alert-metadata/src/main/java/org/apache/eagle/alert/metadata/impl/JdbcMetadataDaoImpl.java
+++ b/eagle-core/eagle-alert-parent/eagle-alert/alert-metadata-parent/alert-metadata/src/main/java/org/apache/eagle/alert/metadata/impl/JdbcMetadataDaoImpl.java
@@ -74,7 +74,11 @@ public class JdbcMetadataDaoImpl implements IMetadataDao {
 
     @Override
     public List<AlertPublishEvent> listAlertPublishEvent(int size) {
-        return handler.listSubset(AlertPublishEvent.class, size);
+        if (size <= 0) {
+            return handler.list(AlertPublishEvent.class);
+        } else {
+            return handler.listSubset(AlertPublishEvent.class, size);
+        }
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/3eacc828/eagle-core/eagle-data-process/src/main/java/org/apache/eagle/dataproc/impl/storm/kafka/KafkaSpoutProvider.java
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-data-process/src/main/java/org/apache/eagle/dataproc/impl/storm/kafka/KafkaSpoutProvider.java b/eagle-core/eagle-data-process/src/main/java/org/apache/eagle/dataproc/impl/storm/kafka/KafkaSpoutProvider.java
index e8c40ae..26440e8 100644
--- a/eagle-core/eagle-data-process/src/main/java/org/apache/eagle/dataproc/impl/storm/kafka/KafkaSpoutProvider.java
+++ b/eagle-core/eagle-data-process/src/main/java/org/apache/eagle/dataproc/impl/storm/kafka/KafkaSpoutProvider.java
@@ -87,12 +87,14 @@ public class KafkaSpoutProvider implements StormSpoutProvider {
                 zkRoot + "/" + topic,
                 groupId);
 
-        // transaction zkServers
-        String[] txZkServers = context.hasPath("txZkServers") ? context.getString("txZkServers").split(",") : new String[]{"localhost:2181"};
-        spoutConfig.zkServers = Arrays.asList(txZkServers).stream().map(server -> server.split(":")[0]).collect(Collectors.toList());
-        // transaction zkPort
-        spoutConfig.zkPort = Integer.parseInt(txZkServers[0].split(":")[1]);
-        LOG.info("txZkServers:" + spoutConfig.zkServers + ", zkPort:" + spoutConfig.zkPort);
+        // transaction zkServers to store kafka consumer offset. Default to use storm zookeeper
+        if (context.hasPath("txZkServers")) {
+            String[] txZkServers = context.getString("txZkServers").split(",");
+            spoutConfig.zkServers = Arrays.asList(txZkServers).stream().map(server -> server.split(":")[0]).collect(Collectors.toList());
+            spoutConfig.zkPort = Integer.parseInt(txZkServers[0].split(":")[1]);
+            LOG.info("txZkServers:" + spoutConfig.zkServers + ", zkPort:" + spoutConfig.zkPort);
+        }
+
         // transaction update interval
         spoutConfig.stateUpdateIntervalMs = context.hasPath("transactionStateUpdateMS") ? context.getLong("transactionStateUpdateMS") : 2000;
         // Kafka fetch size

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/3eacc828/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/enrich/AbstractDataEnrichLCM.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/enrich/AbstractDataEnrichLCM.java b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/enrich/AbstractDataEnrichLCM.java
index 002bf82..3316829 100644
--- a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/enrich/AbstractDataEnrichLCM.java
+++ b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/enrich/AbstractDataEnrichLCM.java
@@ -37,8 +37,8 @@ public abstract class AbstractDataEnrichLCM<T, K> implements DataEnrichLCM<T, K>
 
     @Override
     public Collection<T> loadExternal() {
-        String eagleServiceHost = config.getString("eagleService.host");
-        Integer eagleServicePort = config.getInt("eagleService.port");
+        String eagleServiceHost = config.getString("service.host");
+        Integer eagleServicePort = config.getInt("service.port");
 
         // load from eagle database
         LOG.info("Load sensitivity information from eagle service " + eagleServiceHost + ":" + eagleServicePort);

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/3eacc828/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/enrich/DataEnrichJob.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/enrich/DataEnrichJob.java b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/enrich/DataEnrichJob.java
index 13777bf..aa9b878 100644
--- a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/enrich/DataEnrichJob.java
+++ b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/enrich/DataEnrichJob.java
@@ -49,7 +49,7 @@ public class DataEnrichJob implements Job {
                 );
             ExternalDataCache.getInstance().setJobResult(lcm.getClass(), map);
         } catch(Exception ex) {
-            LOG.error("Fail to load hbase resource sensitivity data", ex);
+            LOG.error("Fail to load sensitivity data", ex);
         }
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/3eacc828/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/enrich/ExternalDataJoiner.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/enrich/ExternalDataJoiner.java b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/enrich/ExternalDataJoiner.java
index 1f593a8..8859dd3 100644
--- a/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/enrich/ExternalDataJoiner.java
+++ b/eagle-security/eagle-security-common/src/main/java/org/apache/eagle/security/enrich/ExternalDataJoiner.java
@@ -38,7 +38,7 @@ public class ExternalDataJoiner {
 	private String taskId;
 	private Config config;
 
-	private static final String DATA_JOIN_POLL_INTERVALSEC = "dataJoinPollIntervalSec";
+	private static final String DATA_JOIN_POLL_INTERVALSEC = "dataEnrich.dataJoinPollIntervalSec";
 	private static final String QUARTZ_GROUP_NAME = "dataJoiner";
 
 	private final int defaultIntervalSeconds = 60;
@@ -77,8 +77,10 @@ public class ExternalDataJoiner {
 		     .build();
 
 		// for trigger
-		Object interval = jobDataMap.get(DATA_JOIN_POLL_INTERVALSEC);
-        int dataJoinPollIntervalSec = (interval == null ? defaultIntervalSeconds : Integer.parseInt(interval.toString()));
+        int dataJoinPollIntervalSec = defaultIntervalSeconds;
+        if (this.config.hasPath(DATA_JOIN_POLL_INTERVALSEC)) {
+            dataJoinPollIntervalSec = this.config.getInt(DATA_JOIN_POLL_INTERVALSEC);
+        }
 		Trigger trigger = TriggerBuilder.newTrigger()
 			  .withIdentity(jobId + ".trigger", group)
 		      .startNow()

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/3eacc828/eagle-security/eagle-security-hbase-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.HBaseAuditLogAppProvider.xml
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hbase-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.HBaseAuditLogAppProvider.xml b/eagle-security/eagle-security-hbase-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.HBaseAuditLogAppProvider.xml
index 1cfd068..414765d 100644
--- a/eagle-security/eagle-security-hbase-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.HBaseAuditLogAppProvider.xml
+++ b/eagle-security/eagle-security-hbase-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.hbase.HBaseAuditLogAppProvider.xml
@@ -32,7 +32,8 @@
             <name>dataSourceConfig.zkConnection</name>
             <displayName>dataSourceConfig.zkConnection</displayName>
             <value>server.eagle.apache.org</value>
-            <description>zk connection</description>
+            <description>kafka broker zookeeper</description>
+            <required>true</required>
         </property>
         <property>
             <name>dataSourceConfig.zkConnectionTimeoutMS</name>
@@ -47,22 +48,10 @@
             <description>kafka fetch size</description>
         </property>
         <property>
-            <name>dataSourceConfig.transactionZKServers</name>
-            <displayName>dataSourceConfig.transactionZKServers</displayName>
-            <value>server.eagle.apache.org</value>
-            <description>zookeeper server for offset transaction</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.transactionZKPort</name>
-            <displayName>dataSourceConfig.transactionZKPort</displayName>
-            <value>2181</value>
-            <description>zookeeper server port for offset transaction</description>
-        </property>
-        <property>
             <name>dataSourceConfig.transactionZKRoot</name>
             <displayName>dataSourceConfig.transactionZKRoot</displayName>
             <value>/consumers</value>
-            <description>offset transaction root</description>
+            <description>spout offset transaction root</description>
         </property>
         <property>
             <name>dataSourceConfig.consumerGroupId</name>
@@ -83,18 +72,6 @@
             <description>scheme class</description>
         </property>
         <property>
-            <name>dataSourceConfig.transactionZKPort</name>
-            <displayName>dataSourceConfig.transactionZKPort</displayName>
-            <value>2181</value>
-            <description>zookeeper server port for offset transaction</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.transactionZKPort</name>
-            <displayName>dataSourceConfig.transactionZKPort</displayName>
-            <value>2181</value>
-            <description>zookeeper server port for offset transaction</description>
-        </property>
-        <property>
             <name>topology.numOfSpoutTasks</name>
             <displayName>topology.numOfSpoutTasks</displayName>
             <value>2</value>
@@ -119,35 +96,12 @@
             <description>number of sink tasks</description>
         </property>
         <property>
-            <name>eagleProps.dataJoinPollIntervalSec</name>
-            <displayName>eagleProps.dataJoinPollIntervalSec</displayName>
+            <name>dataEnrich.dataJoinPollIntervalSec</name>
+            <displayName>Data Join Poll Interval Sec</displayName>
             <value>30</value>
             <description>interval in seconds for polling</description>
         </property>
-        <property>
-            <name>eagleProps.eagleService.host</name>
-            <displayName>eagleProps.eagleService.host</displayName>
-            <value>localhost</value>
-            <description>eagle service host</description>
-        </property>
-        <property>
-            <name>eagleProps.eagleService.port</name>
-            <displayName>eagleProps.eagleService.port</displayName>
-            <value>8080</value>
-            <description>eagle service port</description>
-        </property>
-        <property>
-            <name>eagleProps.eagleService.username</name>
-            <displayName>eagleProps.eagleService.username</displayName>
-            <value>admin</value>
-            <description>eagle service username</description>
-        </property>
-        <property>
-            <name>eagleProps.eagleService.password</name>
-            <displayName>eagleProps.eagleService.password</displayName>
-            <value>secret</value>
-            <description>eagle service password</description>
-        </property>
+
         <property>
             <name>dataSinkConfig.topic</name>
             <displayName>dataSinkConfig.topic</displayName>
@@ -159,6 +113,7 @@
             <displayName>dataSinkConfig.brokerList</displayName>
             <value>sandbox.hortonworks.com:6667</value>
             <description>kafka broker list</description>
+            <required>true</required>
         </property>
         <property>
             <name>dataSinkConfig.serializerClass</name>
@@ -201,6 +156,22 @@
             <timeseries>true</timeseries>
             <columns>
                 <column>
+                    <name>sensitivityType</name>
+                    <type>string</type>
+                </column>
+                <column>
+                    <name>scope</name>
+                    <type>string</type>
+                </column>
+                <column>
+                    <name>user</name>
+                    <type>string</type>
+                </column>
+                <column>
+                    <name>request</name>
+                    <type>string</type>
+                </column>
+                <column>
                     <name>action</name>
                     <type>string</type>
                 </column>

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/3eacc828/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HdfsAuditLogAppProvider.xml
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HdfsAuditLogAppProvider.xml b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HdfsAuditLogAppProvider.xml
index bb302c1..8df96df 100644
--- a/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HdfsAuditLogAppProvider.xml
+++ b/eagle-security/eagle-security-hdfs-auditlog/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HdfsAuditLogAppProvider.xml
@@ -21,44 +21,44 @@
   -->
 
 <application>
-    <type>HdfsAuditLogApplication</type>
+    <type>HDFS_AUDIT_LOG_MONITOR_APP</type>
     <name>Hdfs Audit Log Monitor</name>
     <version>0.5.0-incubating</version>
     <configuration>
         <!-- topology related configurations -->
         <property>
             <name>workers</name>
-            <displayName>workers</displayName>
+            <displayName>storm workers</displayName>
             <value>1</value>
             <description>number of topology workers</description>
         </property>
         <property>
             <name>topology.numOfSpoutTasks</name>
-            <displayName>topology.numOfSpoutTasks</displayName>
+            <displayName>Topology Spout Tasks</displayName>
             <value>2</value>
             <description>number of spout tasks</description>
         </property>
         <property>
             <name>topology.numOfParserTasks</name>
-            <displayName>topology.numOfParserTasks</displayName>
+            <displayName>Topology Parser Tasks</displayName>
             <value>2</value>
             <description>number of parser tasks</description>
         </property>
         <property>
             <name>topology.numOfSensitivityJoinTasks</name>
-            <displayName>topology.numOfSensitivityJoinTasks</displayName>
+            <displayName>Topology Sensitivity JoinTasks</displayName>
             <value>2</value>
             <description>number of sensitivity join tasks</description>
         </property>
         <property>
             <name>topology.numOfIPZoneJoinTasks</name>
-            <displayName>topology.numOfIPZoneJoinTasks</displayName>
+            <displayName>Topology IPZone JoinTasks</displayName>
             <value>2</value>
             <description>number of ip zone join tasks</description>
         </property>
         <property>
             <name>topology.numOfSinkTasks</name>
-            <displayName>topology.numOfSinkTasks</displayName>
+            <displayName>Topology Sink Tasks</displayName>
             <value>2</value>
             <description>number of sink tasks</description>
         </property>
@@ -66,110 +66,81 @@
         <!-- data source configurations -->
         <property>
             <name>dataSourceConfig.topic</name>
-            <displayName>dataSourceConfig.topic</displayName>
+            <displayName>Kafka Topic for Data Consumption</displayName>
             <value>hdfs_audit_log</value>
-            <description>data source topic</description>
+            <description>kafka topic for data consumption</description>
         </property>
         <property>
             <name>dataSourceConfig.zkConnection</name>
-            <displayName>dataSourceConfig.zkConnection</displayName>
-            <value>localhost</value>
-            <description>zk connection</description>
-        </property>
-        <property>
-            <name>dataSourceConfig.txZkServers</name>
-            <displayName>dataSourceConfig.txZkServers</displayName>
+            <displayName>Kafka Zookeeper Connection</displayName>
             <value>localhost:2181</value>
-            <description>zookeeper server for offset transaction</description>
+            <description>kafka broker zk connection</description>
+            <required>true</required>
         </property>
         <property>
             <name>dataSourceConfig.schemeCls</name>
-            <displayName>dataSourceConfig.schemeCls</displayName>
+            <displayName>Kafka Consumer SchemeCls</displayName>
             <value>storm.kafka.StringScheme</value>
             <description>scheme class</description>
+            <required>true</required>
         </property>
 
         <!-- data enrich configurations -->
         <property>
             <name>dataEnrich.dataJoinPollIntervalSec</name>
-            <displayName>eagleProps.dataJoinPollIntervalSec</displayName>
+            <displayName>Data Join Poll Interval Sec</displayName>
             <value>30</value>
             <description>interval in seconds for polling</description>
         </property>
 
-        <!-- eagle service configurations -->
-        <property>
-            <name>eagleService.host</name>
-            <displayName>eagleService.host</displayName>
-            <value>localhost</value>
-            <description>eagle service host</description>
-        </property>
-        <property>
-            <name>eagleService.port</name>
-            <displayName>eagleService.port</displayName>
-            <value>9090</value>
-            <description>eagle service port</description>
-        </property>
-        <property>
-            <name>eagleService.username</name>
-            <displayName>eagleService.username</displayName>
-            <value>admin</value>
-            <description>eagle service username</description>
-        </property>
-        <property>
-            <name>eagleService.password</name>
-            <displayName>eagleService.password</displayName>
-            <value>secret</value>
-            <description>eagle service password</description>
-        </property>
-
         <!-- data sink configurations -->
         <property>
             <name>dataSinkConfig.topic</name>
-            <displayName>dataSinkConfig.topic</displayName>
+            <displayName>Kafka Topic for Parsed Data Sink</displayName>
             <value>hdfs_audit_log_enriched</value>
             <description>topic for kafka data sink</description>
         </property>
         <property>
             <name>dataSinkConfig.brokerList</name>
-            <displayName>dataSinkConfig.brokerList</displayName>
+            <displayName>Kafka BrokerList for Data Sink</displayName>
             <value>localhost:6667</value>
             <description>kafka broker list</description>
+            <required>true</required>
         </property>
         <property>
             <name>dataSinkConfig.serializerClass</name>
-            <displayName>dataSinkConfig.serializerClass</displayName>
+            <displayName>Kafka Producer SerializerClass</displayName>
             <value>kafka.serializer.StringEncoder</value>
             <description>serializer class Kafka message value</description>
         </property>
         <property>
             <name>dataSinkConfig.keySerializerClass</name>
-            <displayName>dataSinkConfig.keySerializerClass</displayName>
+            <displayName>Kafka Producer keySerializerClass</displayName>
             <value>kafka.serializer.StringEncoder</value>
             <description>serializer class Kafka message key</description>
         </property>
 
         <property>
             <name>dataSinkConfig.producerType</name>
-            <displayName>dataSinkConfig.keySerializerClass</displayName>
+            <displayName>Kafka Producer Type</displayName>
             <value>async</value>
             <description>whether the messages are sent asynchronously in a background thread</description>
         </property>
         <property>
             <name>dataSinkConfig.numBatchMessages</name>
-            <displayName>dataSinkConfig.numBatchMessages</displayName>
+            <displayName>Kafka Producer NumBatchMessages</displayName>
             <value>4096</value>
             <description>number of messages to send in one batch when using async mode</description>
         </property>
         <property>
             <name>dataSinkConfig.maxQueueBufferMs</name>
-            <displayName>dataSinkConfig.maxQueueBufferMs</displayName>
+            <displayName>Kafka Producer MaxQueueBufferMs</displayName>
             <value>5000</value>
             <description>maximum time to buffer data when using async mode</description>
         </property>
         <property>
             <name>dataSinkConfig.requestRequiredAcks</name>
-            <displayName>dataSinkConfig.requestRequiredAcks</displayName>
+            <displayName>Kafka Producer RequestRequiredAcks</displayName>
             <value>0</value>
             <description>value controls when a produce request is considered completed</description>
         </property>

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/3eacc828/eagle-security/eagle-security-hive/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HiveQueryMonitoringAppProvider.xml
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hive/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HiveQueryMonitoringAppProvider.xml b/eagle-security/eagle-security-hive/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HiveQueryMonitoringAppProvider.xml
index 931bee6..1f61881 100644
--- a/eagle-security/eagle-security-hive/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HiveQueryMonitoringAppProvider.xml
+++ b/eagle-security/eagle-security-hive/src/main/resources/META-INF/providers/org.apache.eagle.security.auditlog.HiveQueryMonitoringAppProvider.xml
@@ -95,36 +95,12 @@
             <description>number of sink tasks</description>
         </property>
         <property>
-            <name>eagleProps.dataJoinPollIntervalSec</name>
-            <displayName>eagleProps.dataJoinPollIntervalSec</displayName>
+            <name>dataEnrich.dataJoinPollIntervalSec</name>
+            <displayName>Data Join Poll Interval Sec</displayName>
             <value>30</value>
             <description>interval in seconds for polling</description>
         </property>
         <property>
-            <name>eagleProps.eagleService.host</name>
-            <displayName>eagleProps.eagleService.host</displayName>
-            <value>localhost</value>
-            <description>eagle service host</description>
-        </property>
-        <property>
-            <name>eagleProps.eagleService.port</name>
-            <displayName>eagleProps.eagleService.port</displayName>
-            <value>8080</value>
-            <description>eagle service port</description>
-        </property>
-        <property>
-            <name>eagleProps.eagleService.username</name>
-            <displayName>eagleProps.eagleService.username</displayName>
-            <value>admin</value>
-            <description>eagle service username</description>
-        </property>
-        <property>
-            <name>eagleProps.eagleService.password</name>
-            <displayName>eagleProps.eagleService.password</displayName>
-            <value>secret</value>
-            <description>eagle service password</description>
-        </property>
-        <property>
             <name>dataSinkConfig.topic</name>
             <displayName>dataSinkConfig.topic</displayName>
             <value>hive_query_parsed</value>