You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@metron.apache.org by ce...@apache.org on 2015/12/17 21:46:03 UTC

[01/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Repository: incubator-metron
Updated Branches:
  refs/heads/master 34faa3004 -> 70e84c031


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/hbase-site.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/hbase-site.xml b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/hbase-site.xml
new file mode 100644
index 0000000..8d812a9
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/hbase-site.xml
@@ -0,0 +1,131 @@
+<!--Tue Apr  1 18:16:39 2014-->
+  <configuration>
+    <property>
+    <name>hbase.tmp.dir</name>
+    <value>/disk/h/hbase</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.chunkpool.maxsize</name>
+    <value>0.5</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.codecs</name>
+    <value>lzo,gz,snappy</value>
+  </property>
+    <property>
+    <name>hbase.hstore.flush.retries.number</name>
+    <value>120</value>
+  </property>
+    <property>
+    <name>hbase.client.keyvalue.maxsize</name>
+    <value>10485760</value>
+  </property>
+    <property>
+    <name>hbase.rootdir</name>
+    <value>hdfs://nn1:8020/apps/hbase/data</value>
+  </property>
+    <property>
+    <name>hbase.defaults.for.version.skip</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.client.scanner.caching</name>
+    <value>100</value>
+  </property>
+    <property>
+    <name>hbase.superuser</name>
+    <value>hbase</value>
+  </property>
+    <property>
+    <name>hfile.block.cache.size</name>
+    <value>0.40</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.checksum.verify</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.mslab.enabled</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.max.filesize</name>
+    <value>107374182400</value>
+  </property>
+    <property>
+    <name>hbase.cluster.distributed</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>zookeeper.session.timeout</name>
+    <value>30000</value>
+  </property>
+    <property>
+    <name>zookeeper.znode.parent</name>
+    <value>/hbase-unsecure</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.global.memstore.lowerLimit</name>
+    <value>0.38</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.handler.count</name>
+    <value>240</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.mslab.chunksize</name>
+    <value>8388608</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.quorum</name>
+    <value>zkpr1,zkpr2,zkpr3</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.useMulti</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.majorcompaction</name>
+    <value>86400000</value>
+  </property>
+    <property>
+    <name>hbase.hstore.blockingStoreFiles</name>
+    <value>200</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.property.clientPort</name>
+    <value>2181</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.flush.size</name>
+    <value>134217728</value>
+  </property>
+    <property>
+    <name>hbase.security.authorization</name>
+    <value>false</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.global.memstore.upperLimit</name>
+    <value>0.4</value>
+  </property>
+    <property>
+    <name>hbase.hstore.compactionThreshold</name>
+    <value>4</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.block.multiplier</name>
+    <value>8</value>
+  </property>
+    <property>
+    <name>hbase.security.authentication</name>
+    <value>simple</value>
+  </property>
+    <property>
+    <name>dfs.client.read.shortcircuit</name>
+    <value>true</value>
+  </property>
+  <property>
+    <name>dfs.domain.socket.path</name>
+    <value>/var/run/hdfs/dn_socket</value>
+  </property>
+  </configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/pom.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/pom.xml b/opensoc-streaming/pom.xml
index 8f48583..bbd4e2e 100644
--- a/opensoc-streaming/pom.xml
+++ b/opensoc-streaming/pom.xml
@@ -14,7 +14,7 @@
 	<modelVersion>4.0.0</modelVersion>
 	<groupId>com.opensoc</groupId>
 	<artifactId>OpenSOC-Streaming</artifactId>
-	<version>0.3BETA-SNAPSHOT</version>
+	<version>0.6BETA</version>
 	<packaging>pom</packaging>
 	<name>OpenSOC-Streaming</name>
 	<description>Stream analytics for OpenSOC</description>
@@ -30,6 +30,7 @@
 		<global_junit_version>4.4</global_junit_version>
 		<global_guava_version>18.0</global_guava_version>
 		<global_json_schema_validator_version>2.2.5</global_json_schema_validator_version>
+		<global_slf4j_version>1.7.7</global_slf4j_version>
 	</properties>
 	<licenses>
 		<license>
@@ -49,7 +50,7 @@
 			</properties>
 		</developer>
 	</developers>
-	
+
 
 	<modules>
 		<module>OpenSOC-Common</module>
@@ -59,6 +60,7 @@
 		<module>OpenSOC-Alerts</module>
 		<module>OpenSOC-DataLoads</module>
 		<module>OpenSOC-Topologies</module>
+		<module>OpenSOC-Pcap_Service</module>
 	</modules>
 	<dependencies>
 		<dependency>
@@ -75,6 +77,7 @@
 			<plugin>
 				<groupId>org.apache.maven.plugins</groupId>
 				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.18</version>
 				<configuration>
 					<systemProperties>
 						<property>
@@ -98,6 +101,7 @@
 			<plugin>
 				<groupId>org.apache.maven.plugins</groupId>
 				<artifactId>maven-pmd-plugin</artifactId>
+				<version>3.3</version>
 				<configuration>
 					<targetJdk>1.7</targetJdk>
 				</configuration>
@@ -110,4 +114,11 @@
 			</plugin>
 		</plugins>
 	</reporting>
+	<repositories>
+
+		<repository>
+			<id>clojars.org</id>
+			<url>http://clojars.org/repo</url>
+		</repository>
+	</repositories>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/readme.md
----------------------------------------------------------------------
diff --git a/opensoc-streaming/readme.md b/opensoc-streaming/readme.md
index d70667f..c912153 100644
--- a/opensoc-streaming/readme.md
+++ b/opensoc-streaming/readme.md
@@ -1,137 +1,13 @@
 #Current Build
 
-The latest build of OpenSOC-Streaming is 0.3BETA.  We are still in the process of merging/porting additional
-features from our production code base into this open source release.  This release will be followed by
-a number of additional beta releases until the port is complete.  We will also work on getting additional 
-documentation and user/developer guides to the community as soon as we can.  At this time we offer no support
-for the beta software, but will try to respond to requests as promptly as we can.
+The latest build of OpenSOC-Streaming is 0.6BETA.
 
-# OpenSOC-Streaming
-
-Extensible set of Storm topologies and topology attributes for streaming, enriching, indexing, and storing telemetry in Hadoop.  General information on OpenSOC is available at www.getopensoc.com
-
-For OpenSOC FAQ please read the following wiki entry:  https://github.com/OpenSOC/opensoc-streaming/wiki/OpenSOC-FAQ
-
-
-# Usage Instructions
-
-## Message Parser Bolt
-
-Bolt for parsing telemetry messages into a JSON format
-
-```
-TelemetryParserBolt parser_bolt = new TelemetryParserBolt()
-				.withMessageParser(new BasicSourcefireParser())
-				.withOutputFieldName(topology_name);
-```
-				
-###Parameters:
-
-MesageParser: parsers a raw message to JSON. Parsers listed below are available
-- BasicSourcefireParser: will parse a Sourcefire message to JSON
-- BasicBroParser: will parse a Bro message to JSON
-
-OutputFieldName: name of the output field emitted by the bolt
-
-## Telemetry Indexing Bolt
-
-Bolt for indexing JSON telemetry messages in ElasticSearch or Solr
-
-```
-TelemetryIndexingBolt indexing_bolt = new TelemetryIndexingBolt()
-				.withIndexIP(ElasticSearchIP).withIndexPort(elasticSearchPort)
-				.withClusterName(ElasticSearchClusterName)
-				.withIndexName(ElasticSearchIndexName)
-				.withDocumentName(ElasticSearchDocumentName).withBulk(bulk)
-				.withOutputFieldName(topology_name)
-				.withIndexAdapter(new ESBaseBulkAdapter());
-```
-
-###Parameters:
-
-IndexAdapter: adapter and strategy for indexing.  Adapters listed below are available
-- ESBaseBulkAdapter: adapter for bulk loading telemetry into a single index in ElasticSearch
-- ESBulkRotatingAdapter: adapter for bulk loading telemetry into Elastic search, rotating once per hour, and applying a single alias to all rotated indexes
-- SolrAdapter (stubbed out, on roadmap)
-
-OutputFieldName: name of the output field emitted by the bolt
-
-IndexIP: IP of ElasticSearch/Solr
-
-IndexPort: Port of ElasticSearch/Solr
-
-ClusterName: ClusterName of ElasticSearch/Solr
-
-IndexName: IndexName of ElasticSearch/Solr
-
-DocumentName: DocumentName of ElasticSearch/Solr
-
-Bulk: number of documents to bulk load into ElasticSearch/Solr.  If no value is passed, default is 10
+We are still in the process of merging/porting additional features from our production code base into this open source release. This release will be followed by a number of additional beta releases until the port is complete. We will also work on getting additional documentation and user/developer guides to the community as soon as we can. At this time we offer no support for the beta software, but will try to respond to requests as promptly as we can.
 
-## Enrichment Bolt
-
-This bolt is for enriching telemetry messages with additional metadata from external data sources.  At the time of the release the data sources supported are GeoIP (MaxMind GeoLite), WhoisDomain, Collective Intelligence Framework (CIF), and Lancope. In order to use the bolt the data sources have to be setup and data has to be bulk-loaded into them.  The information on bulk-loading data sources and making them interoperable with the enrichment bolt is provided in the following wiki entries:
-
-- GeoIP:  https://github.com/OpenSOC/opensoc-streaming/wiki/Setting-up-GeoLite-Data
-- WhoisDomain: https://github.com/OpenSOC/opensoc-streaming/wiki/Setting-up-Whois-Data
-- CIF Feeds: https://github.com/OpenSOC/opensoc-streaming/wiki/Setting-up-CIF-Data
-- Lancope Metadata: https://github.com/OpenSOC/opensoc-streaming/wiki/Setting-up-Lancope-data
- 
-```
-Map<String, Pattern> patterns = new HashMap<String, Pattern>();
-		patterns.put("originator_ip_regex", Pattern.compile("ip_src_addr\":\"(.*?)\""));
-		patterns.put("responder_ip_regex", Pattern.compile("ip_dst_addr\":\"(.*?)\""));
-
-GeoMysqlAdapter geo_adapter = new GeoMysqlAdapter("IP", 0, "test", "test");
-
-GenericEnrichmentBolt geo_enrichment = new GenericEnrichmentBolt()
-				.withEnrichmentTag(geo_enrichment_tag)
-				.withOutputFieldName(topology_name).withAdapter(geo_adapter)
-				.withMaxTimeRetain(MAX_TIME_RETAIN)
-				.withMaxCacheSize(MAX_CACHE_SIZE).withPatterns(patterns);
-```
-
-###Parameters:
-
-GeoAdapter: adapter for the MaxMind GeoLite dataset.  Adapters listed below are available
-- GeoMysqlAdapter: pulls geoIP data from MqSQL database
-- GeoPosgreSQLAdapter: pulls geoIP data from Posgress database (on road map, not yet available)
-
-WhoisAdapter: adapter for whois database.  Adapters listed below are available
-- WhoisHBaseAdapter: adapter for HBase
-
-CIFAdapter: Hortonworks to document
-
-LancopeAdapter: Hortonworks to document
-
-originator_ip_regex: regex to extract the source ip form message
-
-responder_ip_regex: regex to extract dest ip from message
-The single bolt is currently undergoing testing and will be uploaded shortly
-
-geo_enrichment_tag: JSON field indicating how to tag the original message with the enrichment... {original_message:some_message, {geo_enrichment_tag:{from:xxx},{to:xxx}}}
-
-MAX_TIME_RETAIN: this bolt utilizes in-memory cache. this variable (in minutes) indicates now long to retain each entry in the cache
-
-MAX_CACHE_SIZE: this value defines the maximum size of the cache after which entries are evicted from cache
-
-OutputFieldName: name of the output field emitted by the bolt
-
-
-## Internal Test Spout
-
-We provide a capability to test a topology with messages stored in a file and packaged in a jar that is sent to storm.  This functionality is exposed through a special spout that is able to replay test messages into a topology.
-
-```
-GenericInternalTestSpout test_spout = new GenericInternalTestSpout()
-				.withFilename("sourcefire_enriched").withRepeating(false)
-				.withMilisecondDelay(100);
-```
-
-###Parameters
+# OpenSOC-Streaming
 
-Filename: name of a file in a jar you want to replay
+Extensible set of Storm topologies and topology attributes for streaming, enriching, indexing, and storing telemetry in Hadoop.  General information on OpenSOC is available at http://opensoc.github.io
 
-Repeating: do you want to repeatedly play messages or stop after all the messages in the file have been read
+# Documentation
 
-WithMilisecondDelay: the amount of the delay (sleep) between replayed messages
+Please see documentation within each individual module for description and usage instructions. Sample topologies are provided under OpenSOC_Topologies to get you started with the framework. We pre-assume knowledge of Hadoop, Storm, Kafka, and HBase.


[19/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/pom.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/pom.xml b/opensoc-streaming/OpenSOC-EnrichmentAdapters/pom.xml
index 2f52ad8..fb21130 100644
--- a/opensoc-streaming/OpenSOC-EnrichmentAdapters/pom.xml
+++ b/opensoc-streaming/OpenSOC-EnrichmentAdapters/pom.xml
@@ -15,22 +15,24 @@
 	<parent>
 		<groupId>com.opensoc</groupId>
 		<artifactId>OpenSOC-Streaming</artifactId>
-		<version>0.3BETA-SNAPSHOT</version>
+		<version>0.6BETA</version>
 	</parent>
 	<artifactId>OpenSOC-EnrichmentAdapters</artifactId>
 
 	<properties>
+       <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>		
 		<mysql.version>5.1.31</mysql.version>
 		<slf4j.version>1.7.7</slf4j.version>
 		<hbase.client.version>0.96.1-hadoop2</hbase.client.version>
-		<storm.hdfs.version>0.1.2</storm.hdfs.version>	
+		<storm.hdfs.version>0.1.2</storm.hdfs.version>
 		<guava.version>17.0</guava.version>
 	</properties>
 	<dependencies>
 		<dependency>
 			<groupId>com.opensoc</groupId>
 			<artifactId>OpenSOC-Common</artifactId>
-			<version>${parent.version}</version>
+			<version>${project.parent.version}</version>
 		</dependency>
 		<dependency>
 			<groupId>org.slf4j</groupId>
@@ -62,12 +64,24 @@
 			<groupId>org.apache.hadoop</groupId>
 			<artifactId>hadoop-hdfs</artifactId>
 			<version>${global_hadoop_version}</version>
+			  <exclusions>
+				<exclusion>
+				   <artifactId>servlet-api</artifactId>
+				   <groupId>javax.servlet</groupId>
+				  </exclusion>
+		    </exclusions>					
 		</dependency>
 		<dependency>
 			<groupId>org.apache.storm</groupId>
 			<artifactId>storm-core</artifactId>
 			<version>${global_storm_version}</version>
 			<scope>provided</scope>
+			  <exclusions>
+				<exclusion>
+				   <artifactId>servlet-api</artifactId>
+				   <groupId>javax.servlet</groupId>
+				  </exclusion>
+		    </exclusions>					
 		</dependency>
 		<dependency>
 			<groupId>com.google.guava</groupId>
@@ -78,56 +92,74 @@
 			<groupId>org.apache.hadoop</groupId>
 			<artifactId>hadoop-common</artifactId>
 			<version>${global_hadoop_version}</version>
+			<exclusions>
+				<exclusion>
+				   <artifactId>servlet-api</artifactId>
+				   <groupId>javax.servlet</groupId>
+				  </exclusion>
+		    </exclusions>			
+		</dependency>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>${global_junit_version}</version>
+		</dependency>
+		<dependency>
+			<groupId>commons-validator</groupId>
+			<artifactId>commons-validator</artifactId>
+			<version>1.4.0</version>
 		</dependency>
-  		<dependency>
-  			<groupId>junit</groupId>
-  			<artifactId>junit</artifactId>
-  			<version>3.8.2</version>
-  		</dependency>	
-  		<dependency>
-  	    <groupId>commons-validator</groupId>
-    <artifactId>commons-validator</artifactId>
-    <version>1.4.0</version>
-    </dependency>	
-  		
-  			
+
 	</dependencies>
-   <reporting>
-    <plugins>
-     <plugin>
-     <groupId>org.apache.maven.plugins</groupId>
-     <artifactId>maven-surefire-plugin</artifactId>
-     	<configuration>
-	   		<systemProperties>
-	   		    <property>
-	   		         <name>mode</name>
-	   		         <value>local</value>
-	   		    </property>
-	   		</systemProperties>
-		</configuration>
-     </plugin>
-	<!-- Normally, dependency report takes time, skip it -->
-      <plugin>
-		<groupId>org.apache.maven.plugins</groupId>
-		<artifactId>maven-project-info-reports-plugin</artifactId>
-		<version>2.7</version>
-	 
-		<configuration>
-	          <dependencyLocationsEnabled>false</dependencyLocationsEnabled>
-		</configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.codehaus.mojo</groupId>
-        <artifactId>emma-maven-plugin</artifactId>
-        <version>1.0-alpha-3</version>
-      </plugin>    
-      <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-pmd-plugin</artifactId>
-          <configuration>
-            <targetJdk>1.7</targetJdk>
-	  </configuration>
-        </plugin>        
-    </plugins>
-  </reporting>  	
+	<reporting>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<configuration>
+					<systemProperties>
+						<property>
+							<name>mode</name>
+							<value>global</value>
+						</property>
+					</systemProperties>
+				</configuration>
+			</plugin>
+			<!-- Normally, dependency report takes time, skip it -->
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-project-info-reports-plugin</artifactId>
+				<version>2.7</version>
+
+				<configuration>
+					<dependencyLocationsEnabled>false</dependencyLocationsEnabled>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.codehaus.mojo</groupId>
+				<artifactId>emma-maven-plugin</artifactId>
+				<version>1.0-alpha-3</version>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-pmd-plugin</artifactId>
+				<configuration>
+					<targetJdk>1.7</targetJdk>
+				</configuration>
+			</plugin>
+		</plugins>
+	</reporting>
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>3.1</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/readme.md
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/readme.md b/opensoc-streaming/OpenSOC-EnrichmentAdapters/readme.md
new file mode 100644
index 0000000..7c08218
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-EnrichmentAdapters/readme.md
@@ -0,0 +1,125 @@
+#OpenSOC-Enrichments
+
+##Module Description
+
+This module enables enrichment of message metafields with additional information from various enrichment sources.  Currently there is only a limited number of enrichments available, but this is an extensible framework that can be extended with additional enrichments.  Enrichments currently available are geo, whois, hosts, and CIF.
+
+##Message Format
+
+Enrichment bolts are designed to go after the parser bolts.  Parser bolts will parse the telemetry, taking it from its native format and producing a standard JSON that would look like so:
+
+```json
+{
+"message": 
+{"ip_src_addr": xxxx, 
+"ip_dst_addr": xxxx, 
+"ip_src_port": xxxx, 
+"ip_dst_port": xxxx, 
+"protocol": xxxx, 
+"additional-field 1": xxx,
+}
+
+}
+```
+
+A single enrichment bolt would enrich the message and produce a JSON enrichment and attach it to the message.  Enrichments are stackable so multiple enrichments can be attached sequentially after a single parser bolt.  Stacked enrichments would produce messages under the "enrichment" tag and attach it to the message like so:
+
+```json
+{
+"message": 
+{"ip_src_addr": xxxx, 
+"ip_dst_addr": xxxx, 
+"ip_src_port": xxxx, 
+"ip_dst_port": xxxx, 
+"protocol": xxxx, 
+"additional-field 1": xxxx,
+},
+"enrichment" : {"geo": xxxx, "whois": xxxx, "hosts": xxxxx, "CIF": "xxxxx"}
+
+}
+```
+
+##Enrichment Sources
+
+Each enrichment has to have an anrichment source which can serve as a lookup table for enriching relevant message fields.  In order to minimize the use of additional platforms and tools we primarily try to rely on HBase as much as possible to store the enrichment information for lookup by key.  In order to use Hbase we have to pre-process the enrichment feeds for bulk-loading into HBase with specific key format optimized for retrieval as well as utilize caches within the enrichment bolts to be able to provide enrichments real-time.  Our wiki contains information on how to setup the environment, pre-process feeds, and plug in the enrichment sources.
+
+##Enrichment Bolt
+
+The enrichment bolt is designed to be extensible to be re-used for all kinds of enrichment processes.  The bolt signature for declaration in a storm topology is as follows:
+
+
+
+```
+GenericEnrichmentBolt geo_enrichment = new GenericEnrichmentBolt()
+.withEnrichmentTag(
+config.getString("bolt.enrichment.geo.enrichment_tag"))
+.withAdapter(geo_adapter)
+.withMaxTimeRetain(
+config.getInt("bolt.enrichment.geo.MAX_TIME_RETAIN_MINUTES"))
+.withMaxCacheSize(
+config.getInt("bolt.enrichment.geo.MAX_CACHE_SIZE_OBJECTS_NUM"))
+.withKeys(geo_keys).withMetricConfiguration(config);
+
+```
+
+EnrichmentTag - Name of the enrichment (geo, whois, hosts, etc)
+Keys - Keys which this enrichment is able to enrich (hosts field for hosts enrichment, source_ip, dest_ip, for geo enrichment, etc)
+MaxTimeToRetain & MaxCacheSize - define the caching policy of the enrichment bolt
+Adapter - which adapter to use with the enrichment bolt instance
+
+###Geo Adapter
+Geo adapter is able to do geo enrichment on hosts and destination IPs.  The open source verison of the geo adapter uses the free Geo feeds from MaxMind.  The format of these feeds does not easily lend itself to a no-sql DB so this adapter is designed to work with mySql.  But it is extensible enough to be made work with a variety of other back ends.
+
+The signature of a geo adapter is as follows;
+
+```
+GeoMysqlAdapter geo_adapter = new GeoMysqlAdapter(
+config.getString("mysql.ip"), config.getInt("mysql.port"),
+config.getString("mysql.username"),
+config.getString("mysql.password"),
+config.getString("bolt.enrichment.geo.adapter.table"));
+
+```
+
+###Hosts Adapter
+The hosts adapter is designed to enrich message format with the static host information that can be read from a standard text file.  This adapter is intended for use with a network crawling script that can identify all customer assets and place them in a text file.  For example, this script would identify all workstations, printers, appliantces, etc.  Then if any of these assets are seen in the telemetry messages flowing through the adapter this enrichment would fire and the relevant known information about a host would be attached.  We are currently working on porting this adapter to work with HBase, but this work is not ready yet.  The known hosts file is located under the /etc/whitelists config directory of OpenSOC.
+
+The signature of the hosts adapter is as follows:
+
+```
+Map<String, JSONObject> known_hosts = SettingsLoader
+.loadKnownHosts(hosts_path);
+
+HostFromPropertiesFileAdapter host_adapter = new HostFromPropertiesFileAdapter(
+known_hosts);
+
+```
+* The source and dest ips refer to the name of the message JSON key where the host information is located
+
+###Whois Adapter
+Whois adapter enriches the host name with additional whois information obtained from our proprietary Cisco feed.  The enricher itself is provided in this open source distribution, but the feed is not.  You have to have your own feed in order to use it.  Alternatively, you can contact us for providing you with this feed, but we would have to charge you a fee (we can't distribute it for free). The implemetation of the whois enrichment we provide works with HBase
+
+The signature of the whois adapter is as follows:
+
+```
+
+EnrichmentAdapter whois_adapter = new WhoisHBaseAdapter(
+config.getString("bolt.enrichment.whois.hbase.table.name"),
+config.getString("kafka.zk.list"),
+config.getString("kafka.zk.port"));
+```
+
+###CIF Adapter
+CIF adapter is designed to take in CIF feeds and cross-reference them against every message processed by Storm.  If there is a hit then the relevant information is attached to the message.  
+
+The signature of the CIF adapter is as follows:
+
+```
+CIFHbaseAdapter = new CIFHbaseAdapter(config
+.getString("kafka.zk.list"), config
+.getString("kafka.zk.port"), config
+.getString("bolt.enrichment.cif.tablename")))
+```
+
+##Stacking Enrichments
+Enrichments can be stacked.  By default each enrichment bolt listens on the "message" stream.  In order to create and stack enrichment bolts create a new bolt and instantiate the appropariate adapter.  You can look at our sample topologies to see how enrichments can be stacked
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/geo/GeoMysqlAdapter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/geo/GeoMysqlAdapter.java b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/geo/GeoMysqlAdapter.java
index cfb6673..d62632b 100644
--- a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/geo/GeoMysqlAdapter.java
+++ b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/geo/GeoMysqlAdapter.java
@@ -35,7 +35,7 @@ public class GeoMysqlAdapter extends AbstractGeoAdapter {
 	private String _username;
 	private String _password;
 	private String _tablename;
-	InetAddressValidator ipvalidator = new InetAddressValidator();
+	private InetAddressValidator ipvalidator = new InetAddressValidator();
 
 	public GeoMysqlAdapter(String ip, int port, String username,
 			String password, String tablename) {
@@ -141,6 +141,8 @@ public class GeoMysqlAdapter extends AbstractGeoAdapter {
 			jo.put("longitude", resultSet.getString("longitude"));
 			jo.put("dmaCode", resultSet.getString("dmaCode"));
 			jo.put("locID", resultSet.getString("locID"));
+			
+			jo.put("location_point", jo.get("longitude") + "," + jo.get("latitude"));
 
 			_LOG.debug("Returning enrichment: " + jo);
 
@@ -172,7 +174,6 @@ public class GeoMysqlAdapter extends AbstractGeoAdapter {
 
 			_LOG.info("[OpenSOC] Set JDBC connection....");
 
-
 			return true;
 		} catch (Exception e) {
 			e.printStackTrace();

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/host/HostFromPropertiesFileAdapter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/host/HostFromPropertiesFileAdapter.java b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/host/HostFromPropertiesFileAdapter.java
index b393fb5..e6f693a 100644
--- a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/host/HostFromPropertiesFileAdapter.java
+++ b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/host/HostFromPropertiesFileAdapter.java
@@ -21,6 +21,7 @@ import java.util.Map;
 
 import org.json.simple.JSONObject;
 
+@SuppressWarnings("serial")
 public class HostFromPropertiesFileAdapter extends AbstractHostAdapter {
 	
 	Map<String, JSONObject> _known_hosts;
@@ -40,7 +41,8 @@ public class HostFromPropertiesFileAdapter extends AbstractHostAdapter {
 			return false;
 	}
 
-	@Override
+	@SuppressWarnings("unchecked")
+    @Override
 	public JSONObject enrich(String metadata) {
 		
 		

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/threat/AbstractThreatAdapter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/threat/AbstractThreatAdapter.java b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/threat/AbstractThreatAdapter.java
new file mode 100644
index 0000000..395ee48
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/threat/AbstractThreatAdapter.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.opensoc.enrichment.adapters.threat;
+
+import java.io.Serializable;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.opensoc.enrichment.interfaces.EnrichmentAdapter;
+
+public abstract class AbstractThreatAdapter implements EnrichmentAdapter,Serializable{
+
+	
+	private static final long serialVersionUID = 1524030932856141771L;
+	protected static final Logger LOG = LoggerFactory
+			.getLogger(AbstractThreatAdapter.class);
+	
+	abstract public boolean initializeAdapter();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/threat/ThreatHbaseAdapter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/threat/ThreatHbaseAdapter.java b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/threat/ThreatHbaseAdapter.java
new file mode 100644
index 0000000..97d02d4
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/threat/ThreatHbaseAdapter.java
@@ -0,0 +1,129 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.opensoc.enrichment.adapters.threat;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.log4j.Logger;
+
+@SuppressWarnings("unchecked")
+public class ThreatHbaseAdapter extends AbstractThreatAdapter {
+
+	private static final long serialVersionUID = 1L;
+	private String _tableName;
+	private HTableInterface table;
+	private String _quorum;
+	private String _port;
+
+	public ThreatHbaseAdapter(String quorum, String port, String tableName) {
+		_quorum = quorum;
+		_port = port;
+		_tableName = tableName;
+	}
+
+	/** The LOGGER. */
+	private static final Logger LOGGER = Logger
+			.getLogger(ThreatHbaseAdapter.class);
+
+	public JSONObject enrich(String metadata) {
+
+		JSONObject output = new JSONObject();
+		LOGGER.debug("=======Looking Up For:" + metadata);
+		output.putAll(getThreatObject(metadata));
+
+		return output;
+	}
+
+	@SuppressWarnings({ "rawtypes", "deprecation" })
+	protected Map getThreatObject(String key) {
+
+		LOGGER.debug("=======Pinging HBase For:" + key);
+		
+		Get get = new Get(Bytes.toBytes(key));
+		Result rs;
+		Map output = new HashMap();
+
+		try {
+			rs = table.get(get);
+
+			if (!rs.isEmpty()) {
+				byte[] source_family = Bytes.toBytes("source");
+				JSONParser parser = new JSONParser();
+				
+				Map<byte[], byte[]> sourceFamilyMap = rs.getFamilyMap(source_family);
+				
+				for (Map.Entry<byte[], byte[]> entry  : sourceFamilyMap.entrySet()) {
+					String k = Bytes.toString(entry.getKey());
+					LOGGER.debug("=======Found intel from source: " + k);
+					output.put(k,parser.parse(Bytes.toString(entry.getValue())));
+	            }
+			}
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		} catch (ParseException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}
+		return output;
+	}
+
+	@Override
+	public boolean initializeAdapter() {
+
+		// Initialize HBase Table
+		Configuration conf = null;
+		conf = HBaseConfiguration.create();
+		conf.set("hbase.zookeeper.quorum", _quorum);
+		conf.set("hbase.zookeeper.property.clientPort", _port);
+
+		try {
+			LOGGER.debug("=======Connecting to HBASE===========");
+			LOGGER.debug("=======ZOOKEEPER = "
+					+ conf.get("hbase.zookeeper.quorum"));
+			HConnection connection = HConnectionManager.createConnection(conf);
+			table = connection.getTable(_tableName);
+			return true;
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			LOGGER.debug("=======Unable to Connect to HBASE===========");
+			e.printStackTrace();
+		}
+
+		return false;
+	}
+
+	
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/whois/WhoisHBaseAdapter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/whois/WhoisHBaseAdapter.java b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/whois/WhoisHBaseAdapter.java
index 838f8fe..503618a 100644
--- a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/whois/WhoisHBaseAdapter.java
+++ b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/adapters/whois/WhoisHBaseAdapter.java
@@ -18,6 +18,7 @@
 package com.opensoc.enrichment.adapters.whois;
 
 import java.io.IOException;
+import java.util.Arrays;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -29,6 +30,9 @@ import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Result;
 import org.json.simple.JSONObject;
 
+import com.google.common.base.Joiner;
+import com.opensoc.tldextractor.BasicTldExtractor;
+
 public class WhoisHBaseAdapter extends AbstractWhoisAdapter {
 
 	/**
@@ -39,6 +43,7 @@ public class WhoisHBaseAdapter extends AbstractWhoisAdapter {
 	private String _table_name;
 	private String _quorum;
 	private String _port;
+	private BasicTldExtractor tldex = new BasicTldExtractor();
 
 	public WhoisHBaseAdapter(String table_name, String quorum, String port) {
 		_table_name = table_name;
@@ -88,11 +93,13 @@ public class WhoisHBaseAdapter extends AbstractWhoisAdapter {
 	}
 
 	@SuppressWarnings({ "unchecked", "deprecation" })
-	public JSONObject enrich(String metadata) {
+	public JSONObject enrich(String metadataIn) {
+		
+		String metadata = tldex.extract2LD(metadataIn);
 
 		LOG.trace("[OpenSOC] Pinging HBase For:" + metadata);
 
-
+        
 		JSONObject output = new JSONObject();
 		JSONObject payload = new JSONObject();
 
@@ -108,12 +115,22 @@ public class WhoisHBaseAdapter extends AbstractWhoisAdapter {
 			output.put("whois", payload);
 
 		} catch (IOException e) {
-			output.put(metadata, "{}");
+			payload.put(metadata, "{}");
+			output.put("whois", payload);
 			e.printStackTrace();
 		}
 
 		return output;
 
 	}
+	
+//	private String format(String input) {
+//		String output = input;
+//		String[] tokens = input.split("\\.");
+//		if(tokens.length > 2) {
+//			output = Joiner.on(".").join(Arrays.copyOfRange(tokens, 1, tokens.length));;
+//		}
+//		return output;
+//	}
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/common/AbstractEnrichmentBolt.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/common/AbstractEnrichmentBolt.java b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/common/AbstractEnrichmentBolt.java
index f7aa0fa..be1ef96 100644
--- a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/common/AbstractEnrichmentBolt.java
+++ b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/common/AbstractEnrichmentBolt.java
@@ -51,8 +51,8 @@ public abstract class AbstractEnrichmentBolt extends BaseRichBolt {
 	protected String _OutputFieldName;
 
 	protected String _enrichment_tag;
-	protected Long _MAX_CACHE_SIZE;
-	protected Long _MAX_TIME_RETAIN;
+	protected Long _MAX_CACHE_SIZE_OBJECTS_NUM;
+	protected Long _MAX_TIME_RETAIN_MINUTES;
 
 	// JSON Keys to be enriched
 	protected List<String> _jsonKeys;
@@ -86,10 +86,10 @@ public abstract class AbstractEnrichmentBolt extends BaseRichBolt {
 			throw new IllegalStateException("OutputFieldName must be specified");
 		if (this._enrichment_tag == null)
 			throw new IllegalStateException("enrichment_tag must be specified");
-		if (this._MAX_CACHE_SIZE == null)
-			throw new IllegalStateException("MAX_CACHE_SIZE must be specified");
-		if (this._MAX_TIME_RETAIN == null)
-			throw new IllegalStateException("MAX_TIME_RETAIN must be specified");
+		if (this._MAX_CACHE_SIZE_OBJECTS_NUM == null)
+			throw new IllegalStateException("MAX_CACHE_SIZE_OBJECTS_NUM must be specified");
+		if (this._MAX_TIME_RETAIN_MINUTES == null)
+			throw new IllegalStateException("MAX_TIME_RETAIN_MINUTES must be specified");
 		if (this._adapter == null)
 			throw new IllegalStateException("Adapter must be specified");
 		if (this._jsonKeys == null)
@@ -102,8 +102,8 @@ public abstract class AbstractEnrichmentBolt extends BaseRichBolt {
 			}
 		};
 
-		cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE)
-				.expireAfterWrite(_MAX_TIME_RETAIN, TimeUnit.MINUTES)
+		cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE_OBJECTS_NUM)
+				.expireAfterWrite(_MAX_TIME_RETAIN_MINUTES, TimeUnit.MINUTES)
 				.build(loader);
 
 		boolean success = _adapter.initializeAdapter();

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/common/GenericEnrichmentBolt.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/common/GenericEnrichmentBolt.java b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/common/GenericEnrichmentBolt.java
index 2735a51..37c151f 100644
--- a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/common/GenericEnrichmentBolt.java
+++ b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/java/com/opensoc/enrichment/common/GenericEnrichmentBolt.java
@@ -34,9 +34,9 @@ import backtype.storm.tuple.Tuple;
 import backtype.storm.tuple.Values;
 
 import com.opensoc.enrichment.interfaces.EnrichmentAdapter;
+import com.opensoc.helpers.topology.ErrorGenerator;
 import com.opensoc.json.serialization.JSONEncoderHelper;
 import com.opensoc.metrics.MetricReporter;
-import com.opensoc.topologyhelpers.ErrorGenerator;
 
 /**
  * Uses an adapter to enrich telemetry messages with additional metadata
@@ -99,24 +99,24 @@ public class GenericEnrichmentBolt extends AbstractEnrichmentBolt {
 	}
 
 	/**
-	 * @param MAX_CACHE_SIZE
+	 * @param MAX_CACHE_SIZE_OBJECTS_NUM
 	 *            Maximum size of cache before flushing
 	 * @return Instance of this class
 	 */
 
-	public GenericEnrichmentBolt withMaxCacheSize(long MAX_CACHE_SIZE) {
-		_MAX_CACHE_SIZE = MAX_CACHE_SIZE;
+	public GenericEnrichmentBolt withMaxCacheSize(long MAX_CACHE_SIZE_OBJECTS_NUM) {
+		_MAX_CACHE_SIZE_OBJECTS_NUM = MAX_CACHE_SIZE_OBJECTS_NUM;
 		return this;
 	}
 
 	/**
-	 * @param MAX_TIME_RETAIN
+	 * @param MAX_TIME_RETAIN_MINUTES
 	 *            Maximum time to retain cached entry before expiring
 	 * @return Instance of this class
 	 */
 
-	public GenericEnrichmentBolt withMaxTimeRetain(long MAX_TIME_RETAIN) {
-		_MAX_TIME_RETAIN = MAX_TIME_RETAIN;
+	public GenericEnrichmentBolt withMaxTimeRetain(long MAX_TIME_RETAIN_MINUTES) {
+		_MAX_TIME_RETAIN_MINUTES = MAX_TIME_RETAIN_MINUTES;
 		return this;
 	}
 
@@ -186,6 +186,11 @@ public class GenericEnrichmentBolt extends AbstractEnrichmentBolt {
 							+ "not present in message " + message);
 					continue;
 				}
+				
+				// If the field is empty, no need to enrich
+				if ( jsonvalue.length() == 0) {
+					continue;
+				}
 
 				JSONObject enrichment = cache.getUnchecked(jsonvalue);
 				LOG.trace("[OpenSOC] Enriched: " + jsonkey + " -> "
@@ -239,7 +244,7 @@ public class GenericEnrichmentBolt extends AbstractEnrichmentBolt {
 				failCounter.inc();
 			}
 			
-			JSONObject error = ErrorGenerator.generateErrorMessage("Enrichment problem: " + in_json, e.toString());
+			JSONObject error = ErrorGenerator.generateErrorMessage("Enrichment problem: " + in_json, e);
 			_collector.emit("error", new Values(error));
 		}
 		

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/resources/hbase-site.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/resources/hbase-site.xml b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/resources/hbase-site.xml
index dc7cba5..8d812a9 100644
--- a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/resources/hbase-site.xml
+++ b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/main/resources/hbase-site.xml
@@ -1,90 +1,131 @@
-<!--Tue Feb 11 02:34:08 2014 -->
-<configuration>
-
-	<property>
-		<name>hbase.regionserver.global.memstore.lowerLimit</name>
-		<value>0.38</value>
-	</property>
-	<property>
-		<name>zookeeper.session.timeout</name>
-		<value>20</value>
-	</property>
-
-	<property>
-		<name>hbase.security.authorization</name>
-		<value>false</value>
-	</property>
-	<property>
-		<name>hbase.cluster.distributed</name>
-		<value>true</value>
-	</property>
-	
-	<property>
-		<name>hbase.hstore.flush.retries.number</name>
-		<value>120</value>
-	</property>
-	<property>
-		<name>hbase.hregion.memstore.block.multiplier</name>
-		<value>4</value>
-	</property>
-	<property>
-		<name>hbase.hstore.blockingStoreFiles</name>
-		<value>200</value>
-	</property>
-	<property>
-		<name>hbase.defaults.for.version.skip</name>
-		<value>true</value>
-	</property>
-	<property>
-		<name>hbase.regionserver.global.memstore.upperLimit</name>
-		<value>0.4</value>
-	</property>
-	<property>
-		<name>hbase.hregion.memstore.mslab.enabled</name>
-		<value>true</value>
-	</property>
-	<property>
-		<name>hbase.client.keyvalue.maxsize</name>
-		<value>10485760</value>
-	</property>
-	<property>
-		<name>hbase.superuser</name>
-		<value>hbase</value>
-	</property>
-	<property>
-		<name>hfile.block.cache.size</name>
-		<value>0.40</value>
-	</property>
-	<property>
-		<name>zookeeper.znode.parent</name>
-		<value>/hbase-unsecure</value>
-	</property>
-	<property>
-		<name>hbase.hregion.max.filesize</name>
-		<value>10737418240</value>
-	</property>
-	<property>
-		<name>hbase.zookeeper.property.clientPort</name>
-		<value>2181</value>
-	</property>
-	<property>
-		<name>hbase.security.authentication</name>
-		<value>simple</value>
-	</property>
-	<property>
-		<name>hbase.client.scanner.caching</name>
-		<value>100</value>
-	</property>
-	<property>
-		<name>hbase.hregion.memstore.flush.size</name>
-		<value>134217728</value>
-	</property>
-	<property>
-		<name>hbase.hregion.majorcompaction</name>
-		<value>86400000</value>
-	</property>
-	<property>
-		<name>hbase.client.write.buffer</name>
-		<value>500000000</value>
-	</property>
-</configuration>
\ No newline at end of file
+<!--Tue Apr  1 18:16:39 2014-->
+  <configuration>
+    <property>
+    <name>hbase.tmp.dir</name>
+    <value>/disk/h/hbase</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.chunkpool.maxsize</name>
+    <value>0.5</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.codecs</name>
+    <value>lzo,gz,snappy</value>
+  </property>
+    <property>
+    <name>hbase.hstore.flush.retries.number</name>
+    <value>120</value>
+  </property>
+    <property>
+    <name>hbase.client.keyvalue.maxsize</name>
+    <value>10485760</value>
+  </property>
+    <property>
+    <name>hbase.rootdir</name>
+    <value>hdfs://nn1:8020/apps/hbase/data</value>
+  </property>
+    <property>
+    <name>hbase.defaults.for.version.skip</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.client.scanner.caching</name>
+    <value>100</value>
+  </property>
+    <property>
+    <name>hbase.superuser</name>
+    <value>hbase</value>
+  </property>
+    <property>
+    <name>hfile.block.cache.size</name>
+    <value>0.40</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.checksum.verify</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.mslab.enabled</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.max.filesize</name>
+    <value>107374182400</value>
+  </property>
+    <property>
+    <name>hbase.cluster.distributed</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>zookeeper.session.timeout</name>
+    <value>30000</value>
+  </property>
+    <property>
+    <name>zookeeper.znode.parent</name>
+    <value>/hbase-unsecure</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.global.memstore.lowerLimit</name>
+    <value>0.38</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.handler.count</name>
+    <value>240</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.mslab.chunksize</name>
+    <value>8388608</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.quorum</name>
+    <value>zkpr1,zkpr2,zkpr3</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.useMulti</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.majorcompaction</name>
+    <value>86400000</value>
+  </property>
+    <property>
+    <name>hbase.hstore.blockingStoreFiles</name>
+    <value>200</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.property.clientPort</name>
+    <value>2181</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.flush.size</name>
+    <value>134217728</value>
+  </property>
+    <property>
+    <name>hbase.security.authorization</name>
+    <value>false</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.global.memstore.upperLimit</name>
+    <value>0.4</value>
+  </property>
+    <property>
+    <name>hbase.hstore.compactionThreshold</name>
+    <value>4</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.block.multiplier</name>
+    <value>8</value>
+  </property>
+    <property>
+    <name>hbase.security.authentication</name>
+    <value>simple</value>
+  </property>
+    <property>
+    <name>dfs.client.read.shortcircuit</name>
+    <value>true</value>
+  </property>
+  <property>
+    <name>dfs.domain.socket.path</name>
+    <value>/var/run/hdfs/dn_socket</value>
+  </property>
+  </configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/java/com/opensoc/enrichment/adapters/cif/CIFHbaseAdapterTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/java/com/opensoc/enrichment/adapters/cif/CIFHbaseAdapterTest.java b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/java/com/opensoc/enrichment/adapters/cif/CIFHbaseAdapterTest.java
index e7810d4..82390e9 100644
--- a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/java/com/opensoc/enrichment/adapters/cif/CIFHbaseAdapterTest.java
+++ b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/java/com/opensoc/enrichment/adapters/cif/CIFHbaseAdapterTest.java
@@ -18,6 +18,7 @@
  */
 package com.opensoc.enrichment.adapters.cif;
 
+import java.net.InetAddress;
 import java.util.Properties;
 
 import com.opensoc.test.AbstractTestContext;
@@ -37,6 +38,7 @@ public class CIFHbaseAdapterTest extends AbstractTestContext {
     private static CIFHbaseAdapter cifHbaseAdapter=null;
 
 
+
     /**
      * Constructs a new <code>CIFHbaseAdapterTest</code> instance.
      * @param name
@@ -70,8 +72,33 @@ public class CIFHbaseAdapterTest extends AbstractTestContext {
 
     protected void setUp() throws Exception {
         super.setUp();
+        
         Properties prop = super.getTestProperties();
         assertNotNull(prop);
+        
+        if(skipTests(this.getMode())){
+            return;//skip tests
+        }
+        
+        String[] zk = prop.get("kafka.zk.list").toString().split(",");
+        
+        for(String z : zk)
+        {
+        	InetAddress address = InetAddress.getByName(z);
+            boolean reachable = address.isReachable(100);
+
+            if(!reachable)
+            {
+            	this.setMode("local");
+            	//throw new Exception("Unable to reach zookeeper, skipping CIF adapter test");
+            	break;
+            }
+            
+        }
+        
+        if(skipTests(this.getMode()))
+            return;//skip tests
+            
         System.out.println("kafka.zk.list ="+(String) prop.get("kafka.zk.list"));
         System.out.println("kafka.zk.list ="+(String) prop.get("kafka.zk.port"));   
         System.out.println("kafka.zk.list ="+(String) prop.get("bolt.enrichment.cif.tablename"));   

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/java/com/opensoc/enrichment/adapters/geo/GeoMysqlAdapterTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/java/com/opensoc/enrichment/adapters/geo/GeoMysqlAdapterTest.java b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/java/com/opensoc/enrichment/adapters/geo/GeoMysqlAdapterTest.java
index 173819b..ca54500 100644
--- a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/java/com/opensoc/enrichment/adapters/geo/GeoMysqlAdapterTest.java
+++ b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/java/com/opensoc/enrichment/adapters/geo/GeoMysqlAdapterTest.java
@@ -16,21 +16,22 @@
  */
 package com.opensoc.enrichment.adapters.geo;
 
+import java.net.URL;
 import java.util.Properties;
 
 import org.json.simple.JSONObject;
 
-import com.opensoc.test.AbstractTestContext;
+import com.opensoc.test.AbstractSchemaTest;
 
  /**
  * <ul>
- * <li>Title: </li>
- * <li>Description: </li>
+ * <li>Title: GeoMySqlAdapterTest</li>
+ * <li>Description: Tests for GeoMySqlAdapter</li>
  * <li>Created: Aug 25, 2014</li>
  * </ul>
  * @version $Revision: 1.1 $
  */
-public class GeoMysqlAdapterTest extends AbstractTestContext  {
+public class GeoMysqlAdapterTest extends AbstractSchemaTest {
 
     private static GeoMysqlAdapter geoMySqlAdapter=null;
     private static boolean connected=false;
@@ -72,9 +73,12 @@ public class GeoMysqlAdapterTest extends AbstractTestContext  {
             System.out.println(getClass().getName()+" Skipping Tests !!Local Mode");
             return;//skip tests
        }else{
-        geoMySqlAdapter=new GeoMysqlAdapter((String)prop.get("mysql.ip"), (new Integer((String)prop.get("mysql.port"))).intValue(),(String)prop.get("mysql.username"),(String)prop.get("mysql.password"), (String)prop.get("bolt.enrichment.geo.adapter.table"));
-        connected =geoMySqlAdapter.initializeAdapter();
-        assertTrue(connected);
+           GeoMysqlAdapterTest.setGeoMySqlAdapter(new GeoMysqlAdapter((String)prop.get("mysql.ip"), (new Integer((String)prop.get("mysql.port"))).intValue(),(String)prop.get("mysql.username"),(String)prop.get("mysql.password"), (String)prop.get("bolt.enrichment.geo.adapter.table")));
+           connected =geoMySqlAdapter.initializeAdapter();
+           assertTrue(connected);
+           URL schema_url = getClass().getClassLoader().getResource(
+               "TestSchemas/GeoMySqlSchema.json");
+           super.setSchemaJsonString(super.readSchemaFromFile(schema_url));  
        }
     }
 
@@ -85,7 +89,7 @@ public class GeoMysqlAdapterTest extends AbstractTestContext  {
 
     protected void tearDown() throws Exception {
         super.tearDown();
-        geoMySqlAdapter=null;
+        GeoMysqlAdapterTest.setGeoMySqlAdapter(null);
     }
 
     /**
@@ -95,16 +99,24 @@ public class GeoMysqlAdapterTest extends AbstractTestContext  {
         if(skipTests(this.getMode())){
             return;//skip tests
        }else{
-        JSONObject json = geoMySqlAdapter.enrich("72.163.4.161");
-        
-        //assert Geo Response is not null
-        assertNotNull(json);
-        
-        //assert LocId is not null
-        assertNotNull(json.get("locID"));
+           
+         try {           
+                JSONObject json = geoMySqlAdapter.enrich("72.163.4.161");
+                
+                //assert Geo Response is not null
+                System.out.println("json ="+json);
+                assertNotNull(json);
         
-        //assert right LocId is being returned
-        assertEquals("4522",json.get("locID"));
+                assertEquals(true, super.validateJsonData(super.getSchemaJsonString(), json.toString()));
+                //assert LocId is not null
+                assertNotNull(json.get("locID"));
+                
+                //assert right LocId is being returned
+                assertEquals("4522",json.get("locID"));    
+         } catch (Exception e) {
+            e.printStackTrace();
+            fail("Json validation Failed");
+         }
        }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/java/com/opensoc/enrichment/adapters/whois/WhoisHBaseAdapterTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/java/com/opensoc/enrichment/adapters/whois/WhoisHBaseAdapterTest.java b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/java/com/opensoc/enrichment/adapters/whois/WhoisHBaseAdapterTest.java
index 3d2e219..3057c13 100644
--- a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/java/com/opensoc/enrichment/adapters/whois/WhoisHBaseAdapterTest.java
+++ b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/java/com/opensoc/enrichment/adapters/whois/WhoisHBaseAdapterTest.java
@@ -18,6 +18,7 @@
  */
 package com.opensoc.enrichment.adapters.whois;
 
+import java.net.InetAddress;
 import java.util.Properties;
 
 import org.json.simple.JSONObject;
@@ -68,13 +69,39 @@ public class WhoisHBaseAdapterTest extends AbstractTestContext {
         super.setUp();
         Properties prop = super.getTestProperties();
         assertNotNull(prop);   
+        
         if(skipTests(this.getMode())){
             return;//skip tests
-       }else{ 
+        }
+        
+        String[] zk = prop.get("kafka.zk.list").toString().split(",");
+        
+        for(String z : zk)
+        {
+        	InetAddress address = InetAddress.getByName(z);
+            boolean reachable = address.isReachable(100);
+
+            if(!reachable)
+            {
+            	this.setMode("local");
+            	break;
+            	//throw new Exception("Unable to reach zookeeper, skipping WHois adapter test");
+            }
+            
+            System.out.println("kafka.zk.list ="+(String) prop.get("kafka.zk.list"));
+            System.out.println("kafka.zk.list ="+(String) prop.get("kafka.zk.port"));   
+            System.out.println("kafka.zk.list ="+(String) prop.get("bolt.enrichment.cif.tablename")); 
+            
+        }
+        
+        if(skipTests(this.getMode())){
+            System.out.println("Local Mode Skipping tests !! ");
+        }else{
             whoisHbaseAdapter=new WhoisHBaseAdapter((String)prop.get("bolt.enrichment.whois.hbase.table.name"),(String)prop.get("kafka.zk.list"),(String)prop.get("kafka.zk.port"));
             connected =whoisHbaseAdapter.initializeAdapter();
             assertTrue(connected);
-       }
+        }
+       
     }
 
     /* 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/CIFHbaseAdapterTest.properties
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/CIFHbaseAdapterTest.properties b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/CIFHbaseAdapterTest.properties
index 43ef4f6..8217353 100644
--- a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/CIFHbaseAdapterTest.properties
+++ b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/CIFHbaseAdapterTest.properties
@@ -1,11 +1,11 @@
 kafka.zk.port=2181
-kafka.zk.list=zkpr1,zkpr2,zkpr3
-kafka.zk=zkpr1:2181,zkpr2:2181,zkpr3:2181
+kafka.zk.list=zkpr1
+kafka.zk=zkpr1:2181
 
 #CIF Enrichment
 bolt.enrichment.cif.tablename=cif_table
 bolt.enrichment.cif.host=tld
 bolt.enrichment.cif.email=email
-bolt.enrichment.cif.MAX_CACHE_SIZE=10000
-bolt.enrichment.cif.MAX_TIME_RETAIN=10
+bolt.enrichment.cif.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.cif.MAX_TIME_RETAIN_MINUTES=10
 bolt.enrichment.cif.enrichment_tag=cif

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/GeoMysqlAdapterTest.properties
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/GeoMysqlAdapterTest.properties b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/GeoMysqlAdapterTest.properties
index fe95233..3a4e179 100644
--- a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/GeoMysqlAdapterTest.properties
+++ b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/GeoMysqlAdapterTest.properties
@@ -1,11 +1,11 @@
 mysql.ip=172.30.9.120
-mysql.port=0
+mysql.port=3306
 mysql.username=test
 mysql.password=123123
 
 #GeoEnrichment
-
 bolt.enrichment.geo.enrichment_tag=geo
 bolt.enrichment.geo.adapter.table=GEO
-bolt.enrichment.geo.MAX_CACHE_SIZE=10000
-bolt.enrichment.geo.MAX_TIME_RETAIN=10
+bolt.enrichment.geo.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.geo.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.geo.source=ip_src_addr,ip_dst_addr

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/TestSchemas/CIFHbaseSchema.json
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/TestSchemas/CIFHbaseSchema.json b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/TestSchemas/CIFHbaseSchema.json
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/TestSchemas/GeoMySqlSchema.json
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/TestSchemas/GeoMySqlSchema.json b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/TestSchemas/GeoMySqlSchema.json
new file mode 100644
index 0000000..c4f2a82
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/TestSchemas/GeoMySqlSchema.json
@@ -0,0 +1,42 @@
+{
+"title": "GeoMySql Schema",
+"type": "object",
+"properties": {
+
+         "city"    : {
+					   "type": "string"
+				  },
+		 "country" : {
+						"type": "string"
+					},
+		 "dmaCode" :
+		 			 {
+						"type": "string"
+					},
+	     "geoHash" : 
+	     			{
+						"type": "string"
+					},
+		 "latitude" : 
+		 			{
+						"type": "string"
+				   },
+		 "locID" : 
+		 			{
+					   "type": "string"
+				   },
+		 "location_point" : 
+		 			{
+					   "type": "string"
+				    },
+		 "longitude" : 
+		 			{
+						"type": "string"
+					},
+		 "postalCode" : 
+		 			{
+						"type": "string"
+					}
+   },
+   "required": ["city", "country", "dmaCode","latitude","locID","location_point","postalCode"]
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/TestSchemas/WhoisHbaseSchema.json
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/TestSchemas/WhoisHbaseSchema.json b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/TestSchemas/WhoisHbaseSchema.json
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/WhoisHbaseAdapterTest.properties
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/WhoisHbaseAdapterTest.properties b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/WhoisHbaseAdapterTest.properties
index b80dfcd..4f264ed 100644
--- a/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/WhoisHbaseAdapterTest.properties
+++ b/opensoc-streaming/OpenSOC-EnrichmentAdapters/src/test/resources/WhoisHbaseAdapterTest.properties
@@ -1,11 +1,11 @@
 kafka.zk.port=2181
-kafka.zk.list=zkpr1,zkpr2,zkpr3
-kafka.zk=zkpr1:2181,zkpr2:2181,zkpr3:2181
+kafka.zk.list=zkpr1
+kafka.zk=zkpr1:2181
 
 #WhoisEnrichment
 
 bolt.enrichment.whois.hbase.table.name=whois
 bolt.enrichment.whois.enrichment_tag=whois
 bolt.enrichment.whois.source=tld
-bolt.enrichment.whois.MAX_CACHE_SIZE=10000
-bolt.enrichment.whois.MAX_TIME_RETAIN=10
+bolt.enrichment.whois.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.whois.MAX_TIME_RETAIN_MINUTES=10

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Indexing/pom.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Indexing/pom.xml b/opensoc-streaming/OpenSOC-Indexing/pom.xml
index 96e2bd0..d55ab7f 100644
--- a/opensoc-streaming/OpenSOC-Indexing/pom.xml
+++ b/opensoc-streaming/OpenSOC-Indexing/pom.xml
@@ -15,11 +15,13 @@
 	<parent>
 		<groupId>com.opensoc</groupId>
 		<artifactId>OpenSOC-Streaming</artifactId>
-		<version>0.3BETA-SNAPSHOT</version>
+		<version>0.6BETA</version>
 	</parent>
 	<artifactId>OpenSOC-Indexing</artifactId>
 	<properties>
-		<elastic.search.version>1.2.1</elastic.search.version>
+       <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>		
+		<elastic.search.version>1.3.1</elastic.search.version>
 		<http.client.version>4.3.4</http.client.version>
 		<jsonsimple.version>1.1.1</jsonsimple.version>
 	</properties>
@@ -28,13 +30,19 @@
 		<dependency>
 			<groupId>com.opensoc</groupId>
 			<artifactId>OpenSOC-Common</artifactId>
-			<version>${parent.version}</version>
+			<version>${project.parent.version}</version>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.storm</groupId>
 			<artifactId>storm-core</artifactId>
 			<version>${global_storm_version}</version>
 			<scope>provided</scope>
+			  <exclusions>
+				<exclusion>
+				   <artifactId>servlet-api</artifactId>
+				   <groupId>javax.servlet</groupId>
+				  </exclusion>
+		    </exclusions>					
 		</dependency>
 		<dependency>
 			<groupId>org.elasticsearch</groupId>
@@ -86,4 +94,4 @@
       </plugin>
     </plugins>
   </reporting>  	
-</project>
\ No newline at end of file
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Indexing/readme.md
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Indexing/readme.md b/opensoc-streaming/OpenSOC-Indexing/readme.md
new file mode 100644
index 0000000..bd9d7ac
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Indexing/readme.md
@@ -0,0 +1,61 @@
+#OpenSOC-Indexing
+
+##Module Description
+
+This module provides the indexing capability to OpenSOC components.  The primary indexing engine for now is Elastic Search, but Solr may be supported at some point in the future as well.  There are three types of messages that are commonly indexed in OpenSOC topologies: messages, alerts, and errors.  Messages are telemetry messages parsed by the parser bolt.  Alerts are alerts generated by the alerts bolt.  Errors are an optional feature where each OpenSOC bolt in addition to outputting errors in the log file will also index them for immediate analysis.
+
+###Index bolt
+
+The signature of the index bolt is as follows:
+
+```
+TelemetryIndexingBolt indexing_bolt = new TelemetryIndexingBolt()
+.withIndexIP(config.getString("es.ip"))
+.withIndexPort(config.getInt("es.port"))
+.withClusterName(config.getString("es.clustername"))
+.withIndexName(
+config.getString("bolt.error.indexing.indexname"))
+.withDocumentName(
+config.getString("bolt.error.indexing.documentname"))
+.withBulk(config.getInt("bolt.error.indexing.bulk"))
+.withIndexAdapter(adapter)
+.withMetricConfiguration(config);
+
+```
+
+###IndexAdapters
+
+*com.opensoc.indexing.adapters.ESBaseBulkAdapter - bulk ingest messages into Elastic Search
+*com.opensoc.indexing.adapters.ESBaseBulkRotatingAdapter - does everything adapter above does, but is able to rotate the index names based on size
+*com.opensoc.indexing.adapters.ESTimedBulkRotatingAdapter - does everything adapter above does, but is able to rotate the index names based on size and time
+*com.opensoc.indexing.adapters.SolrAdapter - currently under development
+
+/etc/ directory contains all environment-related configs
+
+##Sample Input and Generator Spout
+
+The sample input for topologies provided in this release was checked in here:
+
+```
+https://github.com/OpenSOC/opensoc-streaming/tree/master/OpenSOC-Topologies/src/main/resources/SampleInput
+```
+
+We provide a generator spout that is able to drive these topologies.  In production we run with the kafka spout, but for documentation on that please reference the Storm project documentation
+
+The generator spout comes with the following signature:
+
+```
+GenericInternalTestSpout testSpout = new GenericInternalTestSpout()
+.withFilename(test_file_path).withRepeating(
+config.getBoolean("spout.test.parallelism.repeat"));
+```
+
+* the repeat variable defines if the generator spout will loop through the input or stop once it gets to the end of file
+
+###Additional Storm Bolts
+In addition to custom bolts developed for OpenSOC we utilize standard bolts and spouts included with the Storm release.  We will not provide documentation for these spouts and bolts since they are provided as part of Storm.  These spouts bolts are:
+
+* KafkaSpout
+* KafkaBolt
+* HDFSBolt
+* HBaseBolt
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/TelemetryIndexingBolt.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/TelemetryIndexingBolt.java b/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/TelemetryIndexingBolt.java
index 965deb5..2c4e0a9 100644
--- a/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/TelemetryIndexingBolt.java
+++ b/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/TelemetryIndexingBolt.java
@@ -33,10 +33,10 @@ import backtype.storm.tuple.Fields;
 import backtype.storm.tuple.Tuple;
 import backtype.storm.tuple.Values;
 
+import com.opensoc.helpers.topology.ErrorGenerator;
 import com.opensoc.index.interfaces.IndexAdapter;
 import com.opensoc.json.serialization.JSONEncoderHelper;
 import com.opensoc.metrics.MetricReporter;
-import com.opensoc.topologyhelpers.ErrorGenerator;
 
 /**
  * 
@@ -59,6 +59,8 @@ import com.opensoc.topologyhelpers.ErrorGenerator;
 public class TelemetryIndexingBolt extends AbstractIndexingBolt {
 
 	private JSONObject metricConfiguration;
+	private String _indexDateFormat;
+	
 	private Set<Tuple> tuple_queue = new HashSet<Tuple>();
 
 	/**
@@ -140,7 +142,18 @@ public class TelemetryIndexingBolt extends AbstractIndexingBolt {
 
 		return this;
 	}
+	
+	/**
+	 * 
+	 * @param dateFormat
+	 *           timestamp to append to index names
+	 * @return instance of bolt
+	 */
+	public TelemetryIndexingBolt withIndexTimestamp(String indexTimestamp) {
+		_indexDateFormat = indexTimestamp;
 
+		return this;
+	}
 	/**
 	 * 
 	 * @param config
@@ -161,7 +174,7 @@ public class TelemetryIndexingBolt extends AbstractIndexingBolt {
 		try {
 			
 			_adapter.initializeConnection(_IndexIP, _IndexPort,
-					_ClusterName, _IndexName, _DocumentName, _BulkIndexNumber);
+					_ClusterName, _IndexName, _DocumentName, _BulkIndexNumber, _indexDateFormat);
 			
 			_reporter = new MetricReporter();
 			_reporter.initialize(metricConfiguration,
@@ -170,10 +183,8 @@ public class TelemetryIndexingBolt extends AbstractIndexingBolt {
 		} catch (Exception e) {
 			
 			e.printStackTrace();
-			
-			String error_as_string = org.apache.commons.lang.exception.ExceptionUtils.getStackTrace(e);
-			
-			JSONObject error = ErrorGenerator.generateErrorMessage(new String("bulk index problem"), error_as_string);
+					
+			JSONObject error = ErrorGenerator.generateErrorMessage(new String("bulk index problem"), e);
 			_collector.emit("error", new Values(error));
 		}
 
@@ -222,9 +233,8 @@ public class TelemetryIndexingBolt extends AbstractIndexingBolt {
 				_collector.fail(setElement);
 				failCounter.inc();
 				
-				String error_as_string = org.apache.commons.lang.exception.ExceptionUtils.getStackTrace(e);
 				
-				JSONObject error = ErrorGenerator.generateErrorMessage(new String("bulk index problem"), error_as_string);
+				JSONObject error = ErrorGenerator.generateErrorMessage(new String("bulk index problem"), e);
 				_collector.emit("error", new Values(error));
 			}
 			tuple_queue.clear();

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/AbstractIndexAdapter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/AbstractIndexAdapter.java b/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/AbstractIndexAdapter.java
index 3644c9e..6dafbe7 100644
--- a/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/AbstractIndexAdapter.java
+++ b/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/AbstractIndexAdapter.java
@@ -20,6 +20,6 @@ public abstract class AbstractIndexAdapter implements IndexAdapter, Serializable
 
 	abstract public boolean initializeConnection(String ip, int port,
 			String cluster_name, String index_name, String document_name,
-			int bulk) throws Exception;
+			int bulk, String date_format) throws Exception;
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/ESBaseBulkAdapter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/ESBaseBulkAdapter.java b/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/ESBaseBulkAdapter.java
index 97af748..e5ed283 100644
--- a/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/ESBaseBulkAdapter.java
+++ b/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/ESBaseBulkAdapter.java
@@ -1,12 +1,11 @@
 package com.opensoc.indexing.adapters;
 
 import java.io.Serializable;
-import java.util.HashSet;
 import java.util.Iterator;
-import java.util.Set;
+import java.util.Map;
 
 import org.apache.commons.collections.Bag;
-import org.apache.commons.collections.HashBag;
+import org.apache.commons.collections.bag.HashBag;
 import org.elasticsearch.action.bulk.BulkRequestBuilder;
 import org.elasticsearch.action.bulk.BulkResponse;
 import org.elasticsearch.action.index.IndexRequestBuilder;
@@ -35,7 +34,7 @@ public class ESBaseBulkAdapter extends AbstractIndexAdapter implements
 	@Override
 	public boolean initializeConnection(String ip, int port,
 			String cluster_name, String index_name, String document_name,
-			int bulk_size) throws Exception {
+			int bulk_size, String date_format) throws Exception {
 
 		bulk_set = new HashBag();
 
@@ -141,4 +140,9 @@ public class ESBaseBulkAdapter extends AbstractIndexAdapter implements
 			return false;
 		}
 	}
+
+	public void setOptionalSettings(Map<String, String> settings) {
+		// TODO Auto-generated method stub
+		
+	}
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/ESBulkRotatingAdapter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/ESBulkRotatingAdapter.java b/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/ESBulkRotatingAdapter.java
index 022bbde..ebdc7b0 100644
--- a/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/ESBulkRotatingAdapter.java
+++ b/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/ESBulkRotatingAdapter.java
@@ -3,6 +3,7 @@ package com.opensoc.indexing.adapters;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.Date;
+import java.util.Map;
 
 import org.apache.http.HttpResponse;
 import org.apache.http.client.HttpClient;
@@ -34,11 +35,11 @@ public class ESBulkRotatingAdapter extends AbstractIndexAdapter {
 	private HttpClient httpclient;
 	private HttpPost post;
 
-	private DateFormat dateFormat = new SimpleDateFormat("yyyy.MM.dd.HH");
+	private DateFormat dateFormat;
 
 	public boolean initializeConnection(String ip, int port,
 			String cluster_name, String index_name, String document_name,
-			int bulk_size) {
+			int bulk_size, String date_format) {
 
 		_LOG.info("Initializing ESBulkAdapter...");
 
@@ -51,9 +52,11 @@ public class ESBulkRotatingAdapter extends AbstractIndexAdapter {
 			_document_name = document_name;
 
 			_bulk_size = bulk_size - 1;
+			
 
+			dateFormat = new SimpleDateFormat(date_format);
+			
 			element_count = 0;
-			index_postfix = dateFormat.format(new Date());
 			running_index_postfix = "NONE";
 
 			Settings settings = ImmutableSettings.settingsBuilder()
@@ -76,7 +79,7 @@ public class ESBulkRotatingAdapter extends AbstractIndexAdapter {
 
 		index_postfix = dateFormat.format(new Date());
 
-		bulkRequest.add(client.prepareIndex(_index_name + "-" + index_postfix,
+		bulkRequest.add(client.prepareIndex(_index_name + "_" + index_postfix,
 				_document_name).setSource(raw_message));
 
 		return doIndex();
@@ -86,7 +89,7 @@ public class ESBulkRotatingAdapter extends AbstractIndexAdapter {
 
 		index_postfix = dateFormat.format(new Date());
 
-		bulkRequest.add(client.prepareIndex(_index_name + "-" + index_postfix,
+		bulkRequest.add(client.prepareIndex(_index_name + "_" + index_postfix,
 				_document_name).setSource(raw_message));
 
 		return doIndex();
@@ -149,4 +152,9 @@ public class ESBulkRotatingAdapter extends AbstractIndexAdapter {
 		return 1;
 	}
 
+	public void setOptionalSettings(Map<String, String> settings) {
+		// TODO Auto-generated method stub
+		
+	}
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/ESTimedRotatingAdapter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/ESTimedRotatingAdapter.java b/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/ESTimedRotatingAdapter.java
index e0a8b98..a94ef97 100644
--- a/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/ESTimedRotatingAdapter.java
+++ b/opensoc-streaming/OpenSOC-Indexing/src/main/java/com/opensoc/indexing/adapters/ESTimedRotatingAdapter.java
@@ -5,6 +5,7 @@ import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.Date;
 import java.util.Iterator;
+import java.util.Map;
 
 import org.apache.commons.collections.Bag;
 import org.apache.commons.collections.HashBag;
@@ -29,16 +30,23 @@ public class ESTimedRotatingAdapter extends AbstractIndexAdapter implements
 	private int _port;
 	private String _ip;
 	public transient TransportClient client;
-	private DateFormat dateFormat = new SimpleDateFormat("yyyy.MM.dd.HH");
+	private DateFormat dateFormat;
+	
+	private Map<String, String> tuning_settings;
 
 	private Bag bulk_set;
 
 	private Settings settings;
+	
+	public void setOptionalSettings(Map<String, String> settings)
+	{
+		tuning_settings = settings;
+	}
 
 	@Override
 	public boolean initializeConnection(String ip, int port,
 			String cluster_name, String index_name, String document_name,
-			int bulk_size) throws Exception {
+			int bulk_size, String date_format) throws Exception {
 
 		bulk_set = new HashBag();
 
@@ -51,11 +59,25 @@ public class ESTimedRotatingAdapter extends AbstractIndexAdapter implements
 			_index_name = index_name;
 			_document_name = document_name;
 			_bulk_size = bulk_size;
+			
+
+			dateFormat = new SimpleDateFormat(date_format);
 
 			System.out.println("Bulk indexing is set to: " + _bulk_size);
 
-			settings = ImmutableSettings.settingsBuilder()
-					.put("cluster.name", _cluster_name).build();
+			ImmutableSettings.Builder builder = ImmutableSettings.settingsBuilder() ;	
+			
+			if(tuning_settings != null && tuning_settings.size() > 0)
+			{
+					builder.put(tuning_settings);
+			}
+			
+			builder.put("cluster.name", _cluster_name);
+			builder.put("client.transport.ping_timeout","500s");
+			
+			
+			settings = builder.build();
+					
 			client = new TransportClient(settings)
 					.addTransportAddress(new InetSocketTransportAddress(_ip,
 							_port));
@@ -83,7 +105,7 @@ public class ESTimedRotatingAdapter extends AbstractIndexAdapter implements
 			bulk_set.add(raw_message);
 			set_size = bulk_set.size();
 			
-			System.out.println("Bulk size is now: " + bulk_set.size());
+			_LOG.trace("[OpenSOC] Incremented bulk size to: " + bulk_set.size());
 		}
 
 		try {
@@ -122,7 +144,7 @@ public class ESTimedRotatingAdapter extends AbstractIndexAdapter implements
 				while (iterator.hasNext()) {
 					JSONObject setElement = iterator.next();
 					
-					System.out.println("Flushing to index: " + _index_name+ "_" + index_postfix);
+					_LOG.trace("[OpenSOC] Flushing to index: " + _index_name+ "_" + index_postfix);
 
 					IndexRequestBuilder a = client.prepareIndex(_index_name+ "_" + index_postfix,
 							_document_name);
@@ -131,22 +153,27 @@ public class ESTimedRotatingAdapter extends AbstractIndexAdapter implements
 
 				}
 
-				System.out.println("Performing bulk load of size: "
+				_LOG.trace("[OpenSOC] Performing bulk load of size: "
 						+ bulkRequest.numberOfActions());
 
 				BulkResponse resp = bulkRequest.execute().actionGet();
 				
+				for(BulkItemResponse r: resp.getItems())
+				{
+					r.getResponse();
+					_LOG.trace("[OpenSOC] ES SUCCESS MESSAGE: " + r.getFailureMessage());
+				}
 				
-				System.out.println("[OpenSOC] Received bulk response: "
-						+ resp.buildFailureMessage());
 				bulk_set.clear();
 				
 				if (resp.hasFailures()) {
-				    
+					_LOG.error("[OpenSOC] Received bulk response error: "
+							+ resp.buildFailureMessage());
+					
 					for(BulkItemResponse r: resp.getItems())
 					{
 						r.getResponse();
-						System.out.println("FAILURE MESSAGE: " + r.getFailureMessage());
+						_LOG.error("[OpenSOC] ES FAILURE MESSAGE: " + r.getFailureMessage());
 					}
 				}
 				

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/pom.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/pom.xml b/opensoc-streaming/OpenSOC-MessageParsers/pom.xml
index 5cfdfa7..9a7d651 100644
--- a/opensoc-streaming/OpenSOC-MessageParsers/pom.xml
+++ b/opensoc-streaming/OpenSOC-MessageParsers/pom.xml
@@ -15,14 +15,18 @@
 	<parent>
 		<groupId>com.opensoc</groupId>
 		<artifactId>OpenSOC-Streaming</artifactId>
-		<version>0.3BETA-SNAPSHOT</version>
+		<version>0.6BETA</version>
 	</parent>
 	<artifactId>OpenSOC-MessageParsers</artifactId>
+    <properties>
+ 		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>	
+    </properties>
 	<dependencies>
 		<dependency>
 			<groupId>com.opensoc</groupId>
 			<artifactId>OpenSOC-Common</artifactId>
-			<version>${parent.version}</version>
+			<version>${project.parent.version}</version>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.storm</groupId>
@@ -40,11 +44,7 @@
 			<artifactId>guava</artifactId>
 			<version>${global_guava_version}</version>
 		</dependency>
-		<dependency>
-			<groupId>com.github.fge</groupId>
-			<artifactId>json-schema-validator</artifactId>
-			<version>${global_json_schema_validator_version}</version>
-		</dependency>
+
 		<dependency>
 			<groupId>io.thekraken</groupId>
 			<artifactId>grok</artifactId>
@@ -74,15 +74,31 @@
 				<configuration>
 					<targetJdk>1.7</targetJdk>
 				</configuration>
+				
 			</plugin>
 		</plugins>
 	</reporting>
 	<build>
+	<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>3.1</version>
+				<inherited>true</inherited>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			</plugins>
 		<resources>
 		<resource>
 				<directory>src/main/resources</directory>
 			</resource>
 			<resource>
+				<directory>src/main/resources/patterns</directory>
+			</resource>
+			<resource>
 				<directory>src/test/resources</directory>
 			</resource>
 		</resources>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/readme.md
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/readme.md b/opensoc-streaming/OpenSOC-MessageParsers/readme.md
new file mode 100644
index 0000000..128932a
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/readme.md
@@ -0,0 +1,82 @@
+#OpenSOC-Parsers
+
+##Module Description
+
+This module provides a list of parsers that can be used with the OpenSOC framework.  There are two types of parsers.  First type is a Java parser.  This kind of parser is optimized for speed and performance and is built for use with higher velicity topologies.  These parsers are not easily modifiable and in order to make changes to them the entire topology need to be recompiled.  The second type of parser provided with the system is a Grok parser.  This type of parser is primarily designed for lower-velocity topologies or for quickly standing up a parser for a new telemetry before a permanent Java parser can be written for it.
+
+##Message Format
+
+All opensoc messages follow a specific format in order to ingest a message.  If a message does not conform to this format it will be dropped and put onto an error queue for further examination.  The message must be of a JSON format and must have a JSON tag message like so:
+
+```
+{"message" : message content}
+
+```
+
+Where appropriate there is also a standardization around the 5-tuple JSON fields.  This is done so the topology correlation engine further down stream can correlate messages from different topologies by these fields.  We are currently working on expanding the message standardization beyond these fields, but this feature is not yet availabe.  The standard field names are as follows:
+
+* ip_src_addr: layer 3 source IP
+* ip_dst_addr: layer 3 dest IP
+* ip_src_port: layer 4 source port
+* ip_dst_port: layer 4 dest port
+* protocol: layer 4 protocol
+* timestamp (epoch)
+* original_string: A human friendly string representation of the message
+
+The timestamp and original_string fields are madatory. The remaining standard fields are optional.  If any of the optional fields are not applicable then the field should be left out of the JSON.
+
+So putting it all together a typical OpenSOC message with all 5-tuple fields present would look like the following:
+
+```json
+{
+"message": 
+{"ip_src_addr": xxxx, 
+"ip_dst_addr": xxxx, 
+"ip_src_port": xxxx, 
+"ip_dst_port": xxxx, 
+"protocol": xxxx, 
+"original_string": xxx,
+"additional-field 1": xxx,
+}
+
+}
+```
+
+##Parser Bolt
+
+The OpenSOC parser bolt is a standard bolt, which can be extended with multiple Java and Grok parser adapter for parsing different topology messages.  The bolt signature for declaration in a storm topology is as follows:
+
+```
+AbstractParserBolt parser_bolt = new TelemetryParserBolt()
+.withMessageParser(parser)
+.withMessageFilter(new GenericMessageFilter())
+.withMetricConfig(config);
+
+```
+
+Metric Config - optional argument for exporting custom metrics to graphite.  If set to null no metrics will be exported.  If set, then a list of metrics defined in the metrics.conf file of each topology will define will metrics are exported and how often.
+
+Message Filter - a filter defining which messages can be dropped.  This feature is only present in the Java paerer adapters
+
+Message Parser - defines the parser adapter to be used for a topology
+
+##Parser Adapters
+
+Parser adapters are loaded dynamically in each OpenSOC topology.  They are defined in topology.conf in the configuration item bolt.parser.adapter
+
+###Java Parser Adapters
+Java parser adapters are indended for higher-velocity topologies and are not easily changed or extended.  As the adoption of OpenSOC continues we plan on extending our library of Java adapters to process more log formats.  As of this moment the Java adapters included with OpenSOC are:
+
+* com.opensoc.parsing.parsers.BasicIseParser : Parse ISE messages
+* com.opensoc.parsing.parsers.BasicBroParser : Parse Bro messages
+* com.opensoc.parsing.parsers.BasicSourcefireParser : Parse Sourcefire messages
+* com.opensoc.parsing.parsers.BasicLancopeParser : Parse Lancope messages
+
+###Grok Parser Adapters
+Grok parser adapters are designed primarly for someone who is not a Java coder for quickly standing up a parser adapter for lower velocity topologies.  Grok relies on Regex for message parsing, which is much slower than purpose-built Java parsers, but is more extensible.  Grok parsers are defined via a config file and the topplogy does not need to be recombiled in order to make changes to them.  An example of a Grok perser is:
+
+* com.opensoc.parsing.parsers.GrokSourcefireParser
+
+For more information on the Grok project please refer to the following link:
+
+https://github.com/thekrakken/java-grok

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/AbstractParserBolt.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/AbstractParserBolt.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/AbstractParserBolt.java
index ace7141..7dc5d4f 100644
--- a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/AbstractParserBolt.java
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/AbstractParserBolt.java
@@ -18,9 +18,7 @@
 package com.opensoc.parsing;
 
 import java.io.IOException;
-import java.io.UnsupportedEncodingException;
 import java.util.Map;
-import java.util.zip.Deflater;
 
 import org.json.simple.JSONObject;
 import org.slf4j.Logger;
@@ -105,32 +103,15 @@ public abstract class AbstractParserBolt extends BaseRichBolt {
 	public boolean checkForSchemaCorrectness(JSONObject message) {
 		int correct = 0;
 
-		if (message.containsKey("ip_src_addr")) {
-			correct++;
-			LOG.trace("[OpenSOC] Message contains ip_src_addr");
-		}
-		if (message.containsKey("ip_dst_addr")) {
-			correct++;
-			LOG.trace("[OpenSOC] Message contains ip_dst_addr");
-		}
-		if (message.containsKey("ip_src_port")) {
-			correct++;
-			LOG.trace("[OpenSOC] Message contains ip_src_port");
-		}
-		if (message.containsKey("ip_dst_port")) {
-			correct++;
-			LOG.trace("[OpenSOC] Message contains ip_dst_port");
-		}
-		if (message.containsKey("protocol")) {
-			correct++;
-			LOG.trace("[OpenSOC] Message contains protocol");
-		}
-
-		if (correct == 0) {
-			LOG.trace("[OpenSOC] Message conforms to schema: " + message);
+		
+		if (!(message.containsKey("original_string"))) {
+			LOG.trace("[OpenSOC] Message does not have original_string: " + message);
+			return false;
+		} else if (!(message.containsKey("timestamp"))) { 
+			LOG.trace("[OpenSOC] Message does not have timestamp: " + message);
 			return false;
 		} else {
-			LOG.trace("[OpenSOC] Message does not conform to schema: "
+			LOG.trace("[OpenSOC] Message conforms to schema: "
 					+ message);
 			return true;
 		}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/PcapParserBolt.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/PcapParserBolt.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/PcapParserBolt.java
index bd3951b..4fb6482 100644
--- a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/PcapParserBolt.java
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/PcapParserBolt.java
@@ -7,9 +7,9 @@ import org.apache.log4j.Logger;
 import org.json.simple.JSONObject;
 import org.json.simple.JSONValue;
 
+import com.opensoc.helpers.topology.ErrorGenerator;
 import com.opensoc.parsing.parsers.PcapParser;
 import com.opensoc.pcap.PacketInfo;
-import com.opensoc.topologyhelpers.ErrorGenerator;
 
 import backtype.storm.generated.Grouping;
 import backtype.storm.task.OutputCollector;
@@ -49,11 +49,9 @@ private Map conf;
   @SuppressWarnings("unused")
 private int numberOfCharsToUseForShuffleGrouping = 4;
 
-  /** The micro sec multiplier. */
-  private long microSecMultiplier = 1L;
+  /** The divisor to convert nanos to expected time precision. */
+  private long timePrecisionDivisor = 1L;
 
-  /** The sec multiplier. */
-  private long secMultiplier = 1000000L;
 
   // HBaseStreamPartitioner hBaseStreamPartitioner = null ;
 
@@ -64,6 +62,26 @@ private int numberOfCharsToUseForShuffleGrouping = 4;
 
   }
 
+  public PcapParserBolt withTsPrecision(String tsPrecision) {
+	if (tsPrecision.equalsIgnoreCase("MILLI")) {
+	  //Convert nanos to millis
+	  LOG.info("Configured for MILLI, setting timePrecisionDivisor to 1000000L" );
+	  timePrecisionDivisor = 1000000L;
+	} else if (tsPrecision.equalsIgnoreCase("MICRO")) {
+	  //Convert nanos to micro
+	  LOG.info("Configured for MICRO, setting timePrecisionDivisor to 1000L" );
+	  timePrecisionDivisor = 1000L;
+	} else if (tsPrecision.equalsIgnoreCase("NANO")) {
+	  //Keep nano as is.
+	  LOG.info("Configured for NANO, setting timePrecisionDivisor to 1L" );
+	  timePrecisionDivisor = 1L;
+	} else {
+	  LOG.info("bolt.parser.ts.precision not set. Default to NANO");
+	  timePrecisionDivisor = 1L;
+	}
+	return this;
+  }
+  
   /*
    * (non-Javadoc)
    * 
@@ -116,19 +134,7 @@ private int numberOfCharsToUseForShuffleGrouping = 4;
     
     Grouping._Fields a;
 
-    if (conf.containsKey("bolt.parser.ts.precision")) {
-      String timePrecision = conf.get("bolt.parser.ts.precision").toString();
-      if (timePrecision.equalsIgnoreCase("MILLI")) {
-        microSecMultiplier = 1L / 1000;
-        secMultiplier = 1000L;
-      } else if (timePrecision.equalsIgnoreCase("MICRO")) {
-        microSecMultiplier = 1L;
-        secMultiplier = 1000000L;
-      } else if (timePrecision.equalsIgnoreCase("NANO")) {
-        microSecMultiplier = 1000L;
-        secMultiplier = 1000000000L;
-      }
-    }
+
     // hBaseStreamPartitioner = new HBaseStreamPartitioner(
     // conf.get("bolt.hbase.table.name").toString(),
     // 0,
@@ -165,6 +171,7 @@ public void execute(Tuple input) {
 
         for (PacketInfo packetInfo : packetInfoList) {
         	
+        	
         	String string_pcap = packetInfo.getJsonIndexDoc();
         	Object obj=JSONValue.parse(string_pcap);
         	  JSONObject header=(JSONObject)obj;
@@ -172,40 +179,6 @@ public void execute(Tuple input) {
         	JSONObject message = new JSONObject();
         	//message.put("key", packetInfo.getKey());
         	
-        	if(header.containsKey("src_addr"))
-        	{
-        		String tmp = header.get("src_addr").toString();
-        		header.remove("src_addr");
-        		header.put("ip_src_addr", tmp);
-        	}
-        	
-        	if(header.containsKey("dst_addr"))
-        	{
-        		String tmp = header.get("dst_addr").toString();
-        		header.remove("dst_addr");
-        		header.put("ip_dst_addr", tmp);
-        	}
-        	
-        	if(header.containsKey("src_port"))
-        	{
-        		String tmp = header.get("src_port").toString();
-        		header.remove("src_port");
-        		header.put("ip_src_port", tmp);
-        	}
-        	
-        	if(message.containsKey("dst_port"))
-        	{
-        		String tmp = header.get("dst_port").toString();
-        		header.remove("dst_port");
-        		header.put("ip_dst_port", tmp);
-        	}
-        	if(message.containsKey("ip_protocol"))
-        	{
-        		String tmp = header.get("ip_protocol").toString();
-        		header.remove("ip_protocol");
-        		header.put("protocol", tmp);
-        	}
-        	
         	message.put("message", header);
         	
         	collector.emit("message", new Values(packetInfo.getKey(), message));
@@ -214,7 +187,7 @@ public void execute(Tuple input) {
         	
           collector.emit("pcap_header_stream", new Values(packetInfo.getJsonDoc(), packetInfo.getKey()));
           collector.emit("pcap_data_stream", new Values(packetInfo.getKey(),
-              (packetInfo.getPacketHeader().getTsSec() * secMultiplier + packetInfo.getPacketHeader().getTsUsec() * microSecMultiplier),
+             packetInfo.getPacketTimeInNanos() / timePrecisionDivisor,
               input.getBinary(0)));
 
           // collector.emit(new Values(packetInfo.getJsonDoc(), packetInfo
@@ -230,11 +203,9 @@ public void execute(Tuple input) {
       e.printStackTrace();
       LOG.error("Exception while processing tuple", e);
       
-      String error_as_string = org.apache.commons.lang.exception.ExceptionUtils
-				.getStackTrace(e);
 
 		JSONObject error = ErrorGenerator.generateErrorMessage(
-				"Alerts problem: " + input.getBinary(0), error_as_string);
+				"Alerts problem: " + input.getBinary(0), e);
 		collector.emit("error", new Values(error));
 		
       return;

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/TelemetryParserBolt.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/TelemetryParserBolt.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/TelemetryParserBolt.java
index b324eb7..8a48764 100644
--- a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/TelemetryParserBolt.java
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/TelemetryParserBolt.java
@@ -30,11 +30,11 @@ import backtype.storm.tuple.Fields;
 import backtype.storm.tuple.Tuple;
 import backtype.storm.tuple.Values;
 
+import com.opensoc.helpers.topology.ErrorGenerator;
 import com.opensoc.json.serialization.JSONEncoderHelper;
 import com.opensoc.metrics.MetricReporter;
 import com.opensoc.parser.interfaces.MessageFilter;
 import com.opensoc.parser.interfaces.MessageParser;
-import com.opensoc.topologyhelpers.ErrorGenerator;
 
 /**
  * Uses an adapter to parse a telemetry message from its native format into a
@@ -125,6 +125,11 @@ public class TelemetryParserBolt extends AbstractParserBolt {
 			LOG.info("[OpenSOC] Metric reporter is not initialized");
 		}
 		this.registerCounters();
+		
+		if(_parser != null)
+		_parser.init();
+		
+		
 	}
 
 	@SuppressWarnings("unchecked")
@@ -203,7 +208,7 @@ public class TelemetryParserBolt extends AbstractParserBolt {
 
 			JSONObject error = ErrorGenerator.generateErrorMessage(
 					"Parsing problem: " + new String(original_message),
-					e.toString());
+					e);
 			_collector.emit("error", new Values(error));
 		}
 	}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/AbstractParser.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/AbstractParser.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/AbstractParser.java
index e5fa29e..728e275 100644
--- a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/AbstractParser.java
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/AbstractParser.java
@@ -34,9 +34,15 @@ public abstract class AbstractParser implements MessageParser, Serializable {
 
 	public void initializeParser() {
 		_LOG.debug("Initializing adapter...");
+		
 
 	}
-
+	
+	public void init() {
+		
+	}
+	
+	
 	abstract public JSONObject parse(byte[] raw_message);
 
 }



[14/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/LancopeParserTest.log
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/LancopeParserTest.log b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/LancopeParserTest.log
new file mode 100644
index 0000000..0e4bf74
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/LancopeParserTest.log
@@ -0,0 +1 @@
+{"message":"<131>Jul 17 15:59:01 smc-01 StealthWatch[12365]: 2014-07-17T15:58:30Z 10.40.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.55M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:56:05.992Z","type":"syslog","host":"10.122.196.201"}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/PaloAltoFirewallParserTest.log
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/PaloAltoFirewallParserTest.log b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/PaloAltoFirewallParserTest.log
new file mode 100644
index 0000000..c58bcc8
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/PaloAltoFirewallParserTest.log
@@ -0,0 +1,2 @@
+<11>Jan  5 05:38:59 PAN1.exampleCustomer.com 1,2015/01/05 05:38:58,0006C110285,THREAT,vulnerability,1,2015/01/05 05:38:58,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 05:38:58,12031,1,54180,80,0,0,0x80004000,tcp,reset-both,\"ad.aspx?f=300x250&id=12;tile=1;ord=67AF705D60B1119C0F18BEA336F9\",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347368099,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109656,,
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.39,10.1.0.163,0.0.0.0,0.0.0.0,EX-Allow,,example\\user.name,ms-ds-smb,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,33760927,1,52688,445,0,0,0x401a,tcp,allow,2229,1287,942,10,2015/01/05 12:51:01,30,any,0,17754932062,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,6,4
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/SourceFireTest.log
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/SourceFireTest.log b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/SourceFireTest.log
new file mode 100644
index 0000000..af257aa
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/SourceFireTest.log
@@ -0,0 +1,3 @@
+SFIMS: [Primary Detection Engine (a7213248-6423-11e3-8537-fac6a92b7d9d)][MTD Access Control] Connection Type: Start, User: Unknown, Client: Unknown, Application Protocol: Unknown, Web App: Unknown, Firewall Rule Name: MTD Access Control, Firewall Rule Action: Allow, Firewall Rule Reasons: Unknown, URL Category: Unknown, URL_Reputation: Risk unknown, URL: Unknown, Interface Ingress: s1p1, Interface Egress: N/A, Security Zone Ingress: Unknown, Security Zone Egress: N/A, Security Intelligence Matching IP: None, Security Intelligence Category: None, {TCP} 72.163.0.129:60517 -> 10.1.128.236:443
+snort: [1:3192:2] WEB-CLIENT Windows Media Player directory traversal via Content-Disposition attempt [Classification: Attempted User Privilege Gain] [Priority: 1] {TCP} 46.149.110.103:80 -> 192.168.56.102:1073
+SFIMS: Correlation Event: Open Soc Log Forwarding/Opensoc Log Forwarding at Thu Oct 23 04:55:39 2014 UTC: [1:19123:7] \"MALWARE-CNC Dropper Win.Trojan.Cefyns.A variant outbound connection\" [Impact: Unknown] From \"172.19.50.7\" at Thu Oct 23 04:55:38 2014 UTC [Classification: A Network Trojan was Detected] [Priority: 1] {tcp} 139.230.245.23:52078->72.52.4.91:80
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/TestSchemas/BroSchema.json
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/TestSchemas/BroSchema.json b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/TestSchemas/BroSchema.json
new file mode 100644
index 0000000..0105c19
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/TestSchemas/BroSchema.json
@@ -0,0 +1,28 @@
+{
+	"title": "Bro Schema",
+	"type": "object",
+	"properties": {
+		"status_code": {
+			"type": "integer"
+		},
+		"uid": {
+			"type": "string"
+		},
+		"protocol": {
+			"type": "string"
+		},
+		"ip_dst_addr": {
+			"type": "string"
+		},
+		"host": {
+			"type": "string"
+		},		
+		"request_body_len": {
+			"type": "integer"
+		},
+		"response_body_len": {
+			"type": "integer"
+		}	
+	},
+	"required": ["status_code", "uid", "protocol","ip_dst_addr","host","request_body_len","response_body_len"]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/TestSchemas/LancopeSchema.json
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/TestSchemas/LancopeSchema.json b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/TestSchemas/LancopeSchema.json
index 12f326f..9118a93 100644
--- a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/TestSchemas/LancopeSchema.json
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/TestSchemas/LancopeSchema.json
@@ -8,15 +8,21 @@
 		"ip_dst_addr": {
 			"type": "string"
 		},
-		"ip_src_port": {
+		"original_string": {
 			"type": "string"
 		},
-		"ip_dst_port": {
+		"@version": {
 			"type": "string"
 		},
-		"protocol": {
+		"timestamp": {
+			"type": "integer"
+		},
+		"type": {
+			"type": "string"
+		},
+		"host": {
 			"type": "string"
 		}
 	},
-	"required": ["ip_src_addr", "ip_dst_addr", "ip_src_port", "ip_dst_port","protocol"]
+	"required": ["ip_src_addr", "ip_dst_addr", "original_string","@version", "timestamp", "type","host"]
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/TestSchemas/SourcefireSchema.json
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/TestSchemas/SourcefireSchema.json b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/TestSchemas/SourcefireSchema.json
index 3984b00..2711909 100644
--- a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/TestSchemas/SourcefireSchema.json
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/TestSchemas/SourcefireSchema.json
@@ -8,15 +8,27 @@
 		"ip_dst_addr": {
 			"type": "string"
 		},
-		"ip_src_port": {
+		"timestamp": {
+			"type": "integer"
+		},
+		"protocol": {
+			"type": "string"
+		},
+		"original_string": {
 			"type": "string"
 		},
+		"original_string": {
+			"type": "string"
+		},
+		"ip_src_port": {
+			"type": "string"
+		},		
 		"ip_dst_port": {
 			"type": "string"
 		},
-		"protocol": {
+		"key": {
 			"type": "string"
-		}
+		}	
 	},
-	"required": ["ip_src_addr", "ip_dst_addr", "ip_src_port", "ip_dst_port","protocol"]
+	"required": ["ip_src_addr", "ip_dst_addr", "ip_src_port", "ip_dst_port","protocol","original_string","key","timestamp"]
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicFireEyeParserTest.config
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicFireEyeParserTest.config b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicFireEyeParserTest.config
new file mode 100644
index 0000000..8073cec
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicFireEyeParserTest.config
@@ -0,0 +1,2 @@
+#BasicFireEyeParserTestConfig
+logFile=src/test/resources/FireEyeParserTest.log

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicIseParserTest.config
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicIseParserTest.config b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicIseParserTest.config
new file mode 100644
index 0000000..ac158a5
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicIseParserTest.config
@@ -0,0 +1,2 @@
+#IseParserTestConfig
+logFile=src/test/resources/IseParserTest.log

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicLancopeParserTest.config
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicLancopeParserTest.config b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicLancopeParserTest.config
new file mode 100644
index 0000000..edafc56
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicLancopeParserTest.config
@@ -0,0 +1,2 @@
+#LancopeParserTestConfig
+logFile=src/test/resources/LancopeParserTest.log

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicPaloAltoFirewallParserTest.config
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicPaloAltoFirewallParserTest.config b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicPaloAltoFirewallParserTest.config
new file mode 100644
index 0000000..613c314
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicPaloAltoFirewallParserTest.config
@@ -0,0 +1,2 @@
+#BasicFireEyeParserTestConfig
+logFile=src/test/resources/PaloAltoFirewallParserTest.log

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicSourcefireParserTest.config
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicSourcefireParserTest.config b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicSourcefireParserTest.config
new file mode 100644
index 0000000..556a54c
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BasicSourcefireParserTest.config
@@ -0,0 +1,2 @@
+#BasicSourceFileParserTestConfig
+logFile=src/test/resources/SourceFireTest.log

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BroParserTest.config
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BroParserTest.config b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BroParserTest.config
new file mode 100644
index 0000000..c50743c
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/BroParserTest.config
@@ -0,0 +1,2 @@
+#BroParserTestConfig
+logFile=src/test/resources/BroParserTest.log

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/GrokAsaParserTest.config
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/GrokAsaParserTest.config b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/GrokAsaParserTest.config
new file mode 100644
index 0000000..2f41210
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/config/GrokAsaParserTest.config
@@ -0,0 +1,2 @@
+#GrokParserTestConfig
+logFile=src/test/resources/GrokParserTest.log


[20/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapGetterHBaseImpl.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapGetterHBaseImpl.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapGetterHBaseImpl.java
new file mode 100644
index 0000000..b06137d
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapGetterHBaseImpl.java
@@ -0,0 +1,809 @@
+package com.opensoc.pcapservice;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import javax.annotation.Resource;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Response;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.ZooKeeperConnectionException;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.NoServerForRegionException;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.log4j.Logger;
+import org.springframework.util.Assert;
+import org.springframework.util.CollectionUtils;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * Singleton class which integrates with HBase table and returns pcaps sorted by
+ * timestamp(dsc) for the given list of keys. Creates HConnection if it is not
+ * already created and the same connection instance is being used for all
+ * requests
+ * 
+ * @author sheetal
+ * @version $Revision: 1.0 $
+ */
+
+@Path("/")
+public class PcapGetterHBaseImpl implements IPcapGetter {
+
+  /** The pcap getter h base. */
+  private static IPcapGetter pcapGetterHBase = null;
+
+  /** The Constant LOG. */
+  private static final Logger LOGGER = Logger
+      .getLogger(PcapGetterHBaseImpl.class);
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapGetter#getPcaps(java.util.List,
+   * java.lang.String, long, long, boolean, boolean, long)
+   */
+ 
+  
+	@GET
+	@Path("pcap/test")
+	@Produces("text/html")
+	public Response  index() throws URISyntaxException { 
+		return Response.ok("ALL GOOD").build();   
+	}
+	
+	
+  public PcapsResponse getPcaps(List<String> keys, String lastRowKey,
+      long startTime, long endTime, boolean includeReverseTraffic,
+      boolean includeDuplicateLastRow, long maxResultSize) throws IOException {
+    Assert
+        .isTrue(
+            checkIfValidInput(keys, lastRowKey),
+            "No valid input. One of the value must be present from {keys, lastRowKey}");
+    LOGGER.info(" keys=" + keys.toString() + ";  lastRowKey="
+        + lastRowKey);
+
+    PcapsResponse pcapsResponse = new PcapsResponse();
+    // 1. Process partial response key
+    if (StringUtils.isNotEmpty(lastRowKey)) {
+      pcapsResponse = processKey(pcapsResponse, lastRowKey, startTime,
+          endTime, true, includeDuplicateLastRow, maxResultSize);
+      // LOGGER.debug("after scanning lastRowKey=" +
+      // pcapsResponse.toString()+"*********************************************************************");
+      if (pcapsResponse.getStatus() == PcapsResponse.Status.PARTIAL) {
+        return pcapsResponse;
+      }
+    }
+    // 2. Process input keys
+    List<String> sortedKeys = sortKeysByAscOrder(keys, includeReverseTraffic);
+    List<String> unprocessedKeys = new ArrayList<String>();
+    unprocessedKeys.addAll(sortedKeys);
+    if (StringUtils.isNotEmpty(lastRowKey)) {
+      unprocessedKeys.clear();
+      unprocessedKeys = getUnprocessedSublistOfKeys(sortedKeys,
+          lastRowKey);
+    }
+    LOGGER.info("unprocessedKeys in getPcaps" + unprocessedKeys.toString());
+    if (!CollectionUtils.isEmpty(unprocessedKeys)) {
+      for (int i = 0; i < unprocessedKeys.size(); i++) {
+        pcapsResponse = processKey(pcapsResponse, unprocessedKeys.get(i),
+            startTime, endTime, false, includeDuplicateLastRow, maxResultSize);
+        // LOGGER.debug("after scanning input unprocessedKeys.get(" + i + ") ="
+        // +
+        // pcapsResponse.toString()+"*********************************************************************");
+        if (pcapsResponse.getStatus() == PcapsResponse.Status.PARTIAL) {
+          return pcapsResponse;
+        }
+      }
+    }
+    return pcapsResponse;
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapGetter#getPcaps(java.lang.String, long,
+   * long, boolean)
+   */
+ 
+  public PcapsResponse getPcaps(String key, long startTime, long endTime,
+      boolean includeReverseTraffic) throws IOException {
+    Assert.hasText(key, "key must not be null or empty");
+    return getPcaps(Arrays.asList(key), null, startTime, endTime,
+        includeReverseTraffic, false, ConfigurationUtil.getDefaultResultSize());
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapGetter#getPcaps(java.util.List)
+   */
+ 
+  public PcapsResponse getPcaps(List<String> keys) throws IOException {
+    Assert.notEmpty(keys, "'keys' must not be null or empty");
+    return getPcaps(keys, null, -1, -1,
+        ConfigurationUtil.isDefaultIncludeReverseTraffic(), false,
+        ConfigurationUtil.getDefaultResultSize());
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapGetter#getPcaps(java.lang.String)
+   */
+ 
+  public PcapsResponse getPcaps(String key) throws IOException {
+    Assert.hasText(key, "key must not be null or empty");
+    return getPcaps(Arrays.asList(key), null, -1, -1,
+        ConfigurationUtil.isDefaultIncludeReverseTraffic(), false,
+        ConfigurationUtil.getDefaultResultSize());
+  }
+
+  /**
+   * Always returns the singleton instance.
+   * 
+   * @return IPcapGetter singleton instance
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public static IPcapGetter getInstance() throws IOException {
+    if (pcapGetterHBase == null) {
+      synchronized (PcapGetterHBaseImpl.class) {
+        if (pcapGetterHBase == null) {
+          pcapGetterHBase = new PcapGetterHBaseImpl();
+        }
+      }
+    }
+    return pcapGetterHBase;
+  }
+
+  /**
+   * Instantiates a new pcap getter h base impl.
+   */
+  private PcapGetterHBaseImpl() {
+  }
+
+  /**
+   * Adds reverse keys to the list if the flag 'includeReverseTraffic' is set to
+   * true; removes duplicates and sorts the list by ascending order;.
+   * 
+   * @param keys
+   *          input keys
+   * @param includeReverseTraffic
+   *          flag whether or not to include reverse traffic
+   * @return List<String>
+   */
+  @VisibleForTesting
+  List<String> sortKeysByAscOrder(List<String> keys,
+      boolean includeReverseTraffic) {
+    Assert.notEmpty(keys, "'keys' must not be null");
+    if (includeReverseTraffic) {
+      keys.addAll(PcapHelper.reverseKey(keys));
+    }
+    List<String> deDupKeys = removeDuplicateKeys(keys);
+    Collections.sort(deDupKeys);
+    return deDupKeys;
+  }
+
+  /**
+   * Removes the duplicate keys.
+   * 
+   * @param keys
+   *          the keys
+   * @return the list
+   */
+  @VisibleForTesting
+public
+  List<String> removeDuplicateKeys(List<String> keys) {
+    Set<String> set = new HashSet<String>(keys);
+    return new ArrayList<String>(set);
+  }
+
+  /**
+   * <p>
+   * Returns the sublist starting from the element after the lastRowKey
+   * to the last element in the list; if the 'lastRowKey' is not matched
+   * the complete list will be returned.
+   * </p>
+   * 
+   * <pre>
+   * Eg :
+   *  keys = [18800006-1800000b-06-0019-caac, 18800006-1800000b-06-0050-5af6, 18800006-1800000b-11-0035-3810]
+   *  lastRowKey = "18800006-1800000b-06-0019-caac-65140-40815"
+   *  and the response from this method [18800006-1800000b-06-0050-5af6, 18800006-1800000b-11-0035-3810]
+   * </pre>
+   * 
+   * @param keys
+   *          keys
+   * @param lastRowKey
+   *          last row key of the previous partial response
+   * @return List<String>
+   */
+  @VisibleForTesting
+  List<String> getUnprocessedSublistOfKeys(List<String> keys,
+      String lastRowKey) {
+    Assert.notEmpty(keys, "'keys' must not be null");
+    Assert.hasText(lastRowKey, "'lastRowKey' must not be null");
+    String partialKey = getTokens(lastRowKey, 5);
+    int startIndex = 0;
+    for (int i = 0; i < keys.size(); i++) {
+      if (partialKey.equals(keys.get(i))) {
+        startIndex = i + 1;
+        break;
+      }
+    }
+    List<String> unprocessedKeys = keys.subList(startIndex, keys.size());
+    return unprocessedKeys;
+  }
+
+  /**
+   * Returns the first 'noOfTokens' tokens from the given key; token delimiter
+   * "-";.
+   * 
+   * @param key
+   *          given key
+   * @param noOfTokens
+   *          number of tokens to retrieve
+   * @return the tokens
+   */
+  @VisibleForTesting
+  String getTokens(String key, int noOfTokens) {
+    String delimeter = HBaseConfigConstants.PCAP_KEY_DELIMETER;
+    String regex = "\\" + delimeter;
+    String[] keyTokens = key.split(regex);
+    Assert.isTrue(noOfTokens < keyTokens.length,
+        "Invalid value for 'noOfTokens'");
+    StringBuffer sbf = new StringBuffer();
+    for (int i = 0; i < noOfTokens; i++) {
+      sbf.append(keyTokens[i]);
+      if (i != (noOfTokens - 1)) {
+        sbf.append(HBaseConfigConstants.PCAP_KEY_DELIMETER);
+      }
+
+    }
+    return sbf.toString();
+  }
+
+  /**
+   * Process key.
+   * 
+   * @param pcapsResponse
+   *          the pcaps response
+   * @param key
+   *          the key
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @param isPartialResponse
+   *          the is partial response
+   * @param includeDuplicateLastRow
+   *          the include duplicate last row
+   * @param maxResultSize
+   *          the max result size
+   * @return the pcaps response
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @VisibleForTesting
+  PcapsResponse processKey(PcapsResponse pcapsResponse, String key,
+      long startTime, long endTime, boolean isPartialResponse,
+      boolean includeDuplicateLastRow, long maxResultSize) throws IOException {
+    HTable table = null;
+    Scan scan = null;
+    List<Cell> scannedCells = null;
+    try {
+      // 1. Create start and stop row for the key;
+      Map<String, String> keysMap = createStartAndStopRowKeys(key,
+          isPartialResponse, includeDuplicateLastRow);
+
+      // 2. if the input key contains all fragments (7) and it is not part
+      // of previous partial response (isPartialResponse),
+      // 'keysMap' will be null; do a Get; currently not doing any
+      // response size related checks for Get;
+      // by default all cells from a specific row are sorted by timestamp
+      if (keysMap == null) {
+        Get get = createGetRequest(key, startTime, endTime);
+        List<Cell> cells = executeGetRequest(table, get);
+        for (Cell cell : cells) {
+          pcapsResponse.addPcaps(CellUtil.cloneValue(cell));
+        }
+        return pcapsResponse;
+      }
+      // 3. Create and execute Scan request
+      scan = createScanRequest(pcapsResponse, keysMap, startTime, endTime,
+          maxResultSize);
+      scannedCells = executeScanRequest(table, scan);
+      LOGGER.info("scannedCells size :" + scannedCells.size());
+      addToResponse(pcapsResponse, scannedCells, maxResultSize);
+
+    } catch (IOException e) {
+      LOGGER.error("Exception occurred while fetching Pcaps for the keys :"
+          + key, e);
+      if (e instanceof ZooKeeperConnectionException
+          || e instanceof MasterNotRunningException
+          || e instanceof NoServerForRegionException) {
+        int maxRetryLimit = ConfigurationUtil.getConnectionRetryLimit();
+        System.out.println("maxRetryLimit =" + maxRetryLimit);
+        for (int attempt = 1; attempt <= maxRetryLimit; attempt++) {
+          System.out.println("attempting  =" + attempt);
+          try {
+            HBaseConfigurationUtil.closeConnection(); // closing the
+            // existing
+            // connection
+            // and retry,
+            // it will
+            // create a new
+            // HConnection
+            scannedCells = executeScanRequest(table, scan);
+            addToResponse(pcapsResponse, scannedCells, maxResultSize);
+            break;
+          } catch (IOException ie) {
+            if (attempt == maxRetryLimit) {
+              LOGGER.error("Throwing the exception after retrying "
+                  + maxRetryLimit + " times.");
+              throw e;
+            }
+          }
+        }
+      }
+
+    } finally {
+      if (table != null) {
+        table.close();
+      }
+    }
+    return pcapsResponse;
+  }
+
+  /**
+   * Adds the to response.
+   * 
+   * @param pcapsResponse
+   *          the pcaps response
+   * @param scannedCells
+   *          the scanned cells
+   * @param maxResultSize
+   *          the max result size
+   */
+  private void addToResponse(PcapsResponse pcapsResponse,
+      List<Cell> scannedCells, long maxResultSize) {
+    String lastKeyFromCurrentScan = null;
+    if (scannedCells != null && scannedCells.size() > 0) {
+      lastKeyFromCurrentScan = new String(CellUtil.cloneRow(scannedCells
+          .get(scannedCells.size() - 1)));
+    }
+    // 4. calculate the response size
+    Collections.sort(scannedCells, PcapHelper.getCellTimestampComparator());
+    for (Cell sortedCell : scannedCells) {
+      pcapsResponse.addPcaps(CellUtil.cloneValue(sortedCell));
+    }
+    if (!pcapsResponse.isResonseSizeWithinLimit(maxResultSize)) {
+      pcapsResponse.setStatus(PcapsResponse.Status.PARTIAL); // response size
+                                                             // reached
+      pcapsResponse.setLastRowKey(new String(lastKeyFromCurrentScan));
+    }
+  }
+
+  /**
+   * Builds start and stop row keys according to the following logic : 1.
+   * Creates tokens out of 'key' using pcap_id delimiter ('-') 2. if the input
+   * 'key' contains (assume : configuredTokensInRowKey=7 and
+   * minimumTokensIninputKey=5): a). 5 tokens
+   * ("srcIp-dstIp-protocol-srcPort-dstPort") startKey =
+   * "srcIp-dstIp-protocol-srcPort-dstPort-00000-00000" stopKey =
+   * "srcIp-dstIp-protocol-srcPort-dstPort-99999-99999" b). 6 tokens
+   * ("srcIp-dstIp-protocol-srcPort-dstPort-id1") startKey =
+   * "srcIp-dstIp-protocol-srcPort-dstPort-id1-00000" stopKey =
+   * "srcIp-dstIp-protocol-srcPort-dstPort-id1-99999"
+   * 
+   * c). 7 tokens ("srcIp-dstIp-protocol-srcPort-dstPort-id1-id2") 1>. if the
+   * key is NOT part of the partial response from previous request, return
+   * 'null' 2>. if the key is part of partial response from previous request
+   * startKey = "srcIp-dstIp-protocol-srcPort-dstPort-id1-(id2+1)"; 1 is added
+   * to exclude this key as it was included in the previous request stopKey =
+   * "srcIp-dstIp-protocol-srcPort-dstPort-99999-99999"
+   * 
+   * @param key
+   *          the key
+   * @param isLastRowKey
+   *          if the key is part of partial response
+   * @param includeDuplicateLastRow
+   *          the include duplicate last row
+   * @return Map<String, String>
+   */
+  @VisibleForTesting
+  Map<String, String> createStartAndStopRowKeys(String key,
+      boolean isLastRowKey, boolean includeDuplicateLastRow) {
+    String delimeter = HBaseConfigConstants.PCAP_KEY_DELIMETER;
+    String regex = "\\" + delimeter;
+    String[] keyTokens = key.split(regex);
+
+    String startKey = null;
+    String endKey = null;
+    Map<String, String> map = new HashMap<String, String>();
+
+    int configuredTokensInRowKey = ConfigurationUtil
+        .getConfiguredTokensInRowkey();
+    int minimumTokensIninputKey = ConfigurationUtil
+        .getMinimumTokensInInputkey();
+    Assert
+        .isTrue(
+            minimumTokensIninputKey <= configuredTokensInRowKey,
+            "tokens in the input key (separated by '-'), must be less than or equal to the tokens used in hbase table row key ");
+    // in case if the input key contains 'configuredTokensInRowKey' tokens and
+    // it is NOT a
+    // partial response key, do a Get instead of Scan
+    if (keyTokens.length == configuredTokensInRowKey) {
+      if (!isLastRowKey) {
+        return null;
+      }
+      // it is a partial response key; 'startKey' is same as input partial
+      // response key; 'endKey' can be built by replacing
+      // (configuredTokensInRowKey - minimumTokensIninputKey) tokens
+      // of input partial response key with '99999'
+      if (keyTokens.length == minimumTokensIninputKey) {
+        return null;
+      }
+      int appendingTokenSlots = configuredTokensInRowKey
+          - minimumTokensIninputKey;
+      if (appendingTokenSlots > 0) {
+        String partialKey = getTokens(key, minimumTokensIninputKey);
+        StringBuffer sbfStartNew = new StringBuffer(partialKey);
+        StringBuffer sbfEndNew = new StringBuffer(partialKey);
+        for (int i = 0; i < appendingTokenSlots; i++) {
+          if (i == (appendingTokenSlots - 1)) {
+            if (!includeDuplicateLastRow) {
+              sbfStartNew
+                  .append(HBaseConfigConstants.PCAP_KEY_DELIMETER)
+                  .append(
+                      Integer.valueOf(keyTokens[minimumTokensIninputKey + i]) + 1);
+            } else {
+              sbfStartNew.append(HBaseConfigConstants.PCAP_KEY_DELIMETER)
+                  .append(keyTokens[minimumTokensIninputKey + i]);
+            }
+          } else {
+            sbfStartNew.append(HBaseConfigConstants.PCAP_KEY_DELIMETER).append(
+                keyTokens[minimumTokensIninputKey + i]);
+          }
+          sbfEndNew.append(HBaseConfigConstants.PCAP_KEY_DELIMETER).append(
+              getMaxLimitForAppendingTokens());
+        }
+        startKey = sbfStartNew.toString();
+        endKey = sbfEndNew.toString();
+      }
+    } else {
+      StringBuffer sbfStart = new StringBuffer(key);
+      StringBuffer sbfEnd = new StringBuffer(key);
+      for (int i = keyTokens.length; i < configuredTokensInRowKey; i++) {
+        sbfStart.append(HBaseConfigConstants.PCAP_KEY_DELIMETER).append(
+            getMinLimitForAppendingTokens());
+        sbfEnd.append(HBaseConfigConstants.PCAP_KEY_DELIMETER).append(
+            getMaxLimitForAppendingTokens());
+      }
+      startKey = sbfStart.toString();
+      endKey = sbfEnd.toString();
+    }
+    map.put(HBaseConfigConstants.START_KEY, startKey);
+    map.put(HBaseConfigConstants.END_KEY, endKey);
+
+    return map;
+  }
+
+  /**
+   * Returns false if keys is empty or null AND lastRowKey is null or
+   * empty; otherwise returns true;.
+   * 
+   * @param keys
+   *          input row keys
+   * @param lastRowKey
+   *          partial response key
+   * @return boolean
+   */
+  @VisibleForTesting
+  boolean checkIfValidInput(List<String> keys, String lastRowKey) {
+    if (CollectionUtils.isEmpty(keys)
+        && StringUtils.isEmpty(lastRowKey)) {
+      return false;
+    }
+    return true;
+  }
+
+  /**
+   * Executes the given Get request.
+   * 
+   * @param table
+   *          hbase table
+   * @param get
+   *          Get
+   * @return List<Cell>
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private List<Cell> executeGetRequest(HTable table, Get get)
+      throws IOException {
+    LOGGER.info("Get :" + get.toString());
+    table = (HTable) HBaseConfigurationUtil.getConnection().getTable(
+        ConfigurationUtil.getTableName());
+    Result result = table.get(get);
+    List<Cell> cells = result.getColumnCells(
+        ConfigurationUtil.getColumnFamily(),
+        ConfigurationUtil.getColumnQualifier());
+    return cells;
+  }
+
+  /**
+   * Execute scan request.
+   * 
+   * @param table
+   *          hbase table
+   * @param scan
+   *          the scan
+   * @return the list
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private List<Cell> executeScanRequest(HTable table, Scan scan)
+      throws IOException {
+    LOGGER.info("Scan :" + scan.toString());
+    table = (HTable) HBaseConfigurationUtil.getConnection().getTable(
+    		ConfigurationUtil.getConfiguration().getString("hbase.table.name"));
+    ResultScanner resultScanner = table.getScanner(scan);
+    List<Cell> scannedCells = new ArrayList<Cell>();
+    for (Result result = resultScanner.next(); result != null; result = resultScanner
+        .next()) {
+      List<Cell> cells = result.getColumnCells(
+          ConfigurationUtil.getColumnFamily(),
+          ConfigurationUtil.getColumnQualifier());
+      if (cells != null) {
+        for (Cell cell : cells) {
+          scannedCells.add(cell);
+        }
+      }
+    }
+    return scannedCells;
+  }
+
+  /**
+   * Creates the get request.
+   * 
+   * @param key
+   *          the key
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @return the gets the
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @VisibleForTesting
+  Get createGetRequest(String key, long startTime, long endTime)
+      throws IOException {
+    Get get = new Get(Bytes.toBytes(key));
+    // set family name
+    get.addFamily(ConfigurationUtil.getColumnFamily());
+
+    // set column family, qualifier
+    get.addColumn(ConfigurationUtil.getColumnFamily(),
+        ConfigurationUtil.getColumnQualifier());
+
+    // set max versions
+    get.setMaxVersions(ConfigurationUtil.getMaxVersions());
+
+    // set time range
+    setTimeRangeOnGet(get, startTime, endTime);
+    return get;
+  }
+
+  /**
+   * Creates the scan request.
+   * 
+   * @param pcapsResponse
+   *          the pcaps response
+   * @param keysMap
+   *          the keys map
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @param maxResultSize
+   *          the max result size
+   * @return the scan
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @VisibleForTesting
+  Scan createScanRequest(PcapsResponse pcapsResponse,
+      Map<String, String> keysMap, long startTime, long endTime,
+      long maxResultSize) throws IOException {
+    Scan scan = new Scan();
+    // set column family, qualifier
+    scan.addColumn(ConfigurationUtil.getColumnFamily(),
+        ConfigurationUtil.getColumnQualifier());
+
+    // set start and stop keys
+    scan.setStartRow(keysMap.get(HBaseConfigConstants.START_KEY).getBytes());
+    scan.setStopRow(keysMap.get(HBaseConfigConstants.END_KEY).getBytes());
+
+    // set max results size : remaining size = max results size - ( current
+    // pcaps response size + possible maximum row size)
+    long remainingSize = maxResultSize
+        - (pcapsResponse.getResponseSize() + ConfigurationUtil.getMaxRowSize());
+
+    if (remainingSize > 0) {
+      scan.setMaxResultSize(remainingSize);
+    }
+    // set max versions
+    scan.setMaxVersions(ConfigurationUtil.getConfiguration().getInt(
+        "hbase.table.column.maxVersions"));
+
+    // set time range
+    setTimeRangeOnScan(scan, startTime, endTime);
+    return scan;
+  }
+
+  /**
+   * Sets the time range on scan.
+   * 
+   * @param scan
+   *          the scan
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private void setTimeRangeOnScan(Scan scan, long startTime, long endTime)
+      throws IOException {
+    boolean setTimeRange = true;
+    if (startTime < 0 && endTime < 0) {
+      setTimeRange = false;
+    }
+    if (setTimeRange) {
+      if (startTime < 0) {
+        startTime = 0;
+      } else {
+        startTime = PcapHelper.convertToDataCreationTimeUnit(startTime);
+      }
+      if (endTime < 0) {
+        endTime = Long.MAX_VALUE;
+      } else {
+        endTime = PcapHelper.convertToDataCreationTimeUnit(endTime);
+      }
+      Assert.isTrue(startTime < endTime,
+          "startTime value must be less than endTime value");
+      scan.setTimeRange(startTime, endTime);
+    }
+  }
+
+  /**
+   * Sets the time range on get.
+   * 
+   * @param get
+   *          the get
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private void setTimeRangeOnGet(Get get, long startTime, long endTime)
+      throws IOException {
+    boolean setTimeRange = true;
+    if (startTime < 0 && endTime < 0) {
+      setTimeRange = false;
+    }
+    if (setTimeRange) {
+      if (startTime < 0) {
+        startTime = 0;
+      } else {
+        startTime = PcapHelper.convertToDataCreationTimeUnit(startTime);
+      }
+      if (endTime < 0) {
+        endTime = Long.MAX_VALUE;
+      } else {
+        endTime = PcapHelper.convertToDataCreationTimeUnit(endTime);
+      }
+      Assert.isTrue(startTime < endTime,
+          "startTime value must be less than endTime value");
+      get.setTimeRange(startTime, endTime);
+    }
+  }
+
+  /**
+   * Gets the min limit for appending tokens.
+   * 
+   * @return the min limit for appending tokens
+   */
+  private String getMinLimitForAppendingTokens() {
+    int digits = ConfigurationUtil.getAppendingTokenDigits();
+    StringBuffer sbf = new StringBuffer();
+    for (int i = 0; i < digits; i++) {
+      sbf.append("0");
+    }
+    return sbf.toString();
+  }
+
+  /**
+   * Gets the max limit for appending tokens.
+   * 
+   * @return the max limit for appending tokens
+   */
+  private String getMaxLimitForAppendingTokens() {
+    int digits = ConfigurationUtil.getAppendingTokenDigits();
+    StringBuffer sbf = new StringBuffer();
+    for (int i = 0; i < digits; i++) {
+      sbf.append("9");
+    }
+    return sbf.toString();
+  }
+
+  /**
+   * The main method.
+   * 
+   * @param args
+   *          the arguments
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public static void main(String[] args) throws IOException {
+    if (args == null || args.length < 2) {
+      usage();
+      return;
+    }
+    String outputFileName = null;
+    outputFileName = args[1];
+    List<String> keys = Arrays.asList(StringUtils.split(args[2], ","));
+    System.out.println("Geting keys " + keys);
+    long startTime = 0;
+    long endTime = Long.MAX_VALUE;
+    if (args.length > 3) {
+      startTime = Long.valueOf(args[3]);
+    }
+    if (args.length > 4) {
+      endTime = Long.valueOf(args[4]);
+    }
+    System.out.println("With start time " + startTime + " and end time "
+        + endTime);
+    PcapGetterHBaseImpl downloader = new PcapGetterHBaseImpl();
+    PcapsResponse pcaps = downloader.getPcaps(keys, null, startTime, endTime,
+        false, false, 6);
+    File file = new File(outputFileName);
+    FileUtils.write(file, "", false);
+    FileUtils.writeByteArrayToFile(file, pcaps.getPcaps(), true);
+  }
+
+  /**
+   * Usage.
+   */
+  private static void usage() {
+    System.out.println("java " + PcapGetterHBaseImpl.class.getName() // $codepro.audit.disable
+        // debuggingCode
+        + " <zk quorum> <output file> <start key> [stop key]");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapHelper.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapHelper.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapHelper.java
new file mode 100644
index 0000000..5224945
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapHelper.java
@@ -0,0 +1,205 @@
+package com.opensoc.pcapservice;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+import org.mortbay.log.Log;
+import org.springframework.util.Assert;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * utility class which holds methods related to time conversions, building
+ * reverse keys.
+ */
+public class PcapHelper {
+
+  /** The Constant LOGGER. */
+  private static final Logger LOGGER = Logger.getLogger(PcapHelper.class);
+
+  /** The cell timestamp comparator. */
+  private static CellTimestampComparator CELL_TIMESTAMP_COMPARATOR = new CellTimestampComparator();
+
+  /**
+   * The Enum TimeUnit.
+   */
+  public enum TimeUnit {
+
+    /** The seconds. */
+    SECONDS,
+    /** The millis. */
+    MILLIS,
+    /** The micros. */
+    MICROS,
+    /** The unknown. */
+    UNKNOWN
+  };
+
+  /**
+   * Converts the given time to the 'hbase' data creation time unit.
+   * 
+   * @param inputTime
+   *          the input time
+   * @return the long
+   */
+  public static long convertToDataCreationTimeUnit(long inputTime) {
+    if (inputTime <= 9999999999L) {
+      return convertSecondsToDataCreationTimeUnit(inputTime); // input time unit
+                                                              // is in seconds
+    } else if (inputTime <= 9999999999999L) {
+      return convertMillisToDataCreationTimeUnit(inputTime); // input time unit
+                                                             // is in millis
+    } else if (inputTime <= 9999999999999999L) {
+      return convertMicrosToDataCreationTimeUnit(inputTime); // input time unit
+                                                             // it in micros
+    }
+    return inputTime; // input time unit is unknown
+  }
+
+  /**
+   * Returns the 'hbase' data creation time unit by reading
+   * 'hbase.table.data.time.unit' property in 'hbase-config' properties file; If
+   * none is mentioned in properties file, returns <code>TimeUnit.UNKNOWN</code>
+   * 
+   * @return TimeUnit
+   */
+  @VisibleForTesting
+  public static TimeUnit getDataCreationTimeUnit() {
+    String timeUnit = ConfigurationUtil.getConfiguration().getString(
+        "hbase.table.data.time.unit");
+    LOGGER.debug("hbase.table.data.time.unit=" + timeUnit.toString());
+    if (StringUtils.isNotEmpty(timeUnit)) {
+      return TimeUnit.valueOf(timeUnit);
+    }
+    return TimeUnit.UNKNOWN;
+  }
+
+  /**
+   * Convert seconds to data creation time unit.
+   * 
+   * @param inputTime
+   *          the input time
+   * @return the long
+   */
+  @VisibleForTesting
+  public static long convertSecondsToDataCreationTimeUnit(long inputTime) {
+    System.out.println("convert Seconds To DataCreation TimeUnit");
+    TimeUnit dataCreationTimeUnit = getDataCreationTimeUnit();
+    if (TimeUnit.SECONDS == dataCreationTimeUnit) {
+      return inputTime;
+    } else if (TimeUnit.MILLIS == dataCreationTimeUnit) {
+      return inputTime * 1000;
+    } else if (TimeUnit.MICROS == dataCreationTimeUnit) {
+      return inputTime * 1000 * 1000;
+    }
+    return inputTime;
+  }
+
+  /**
+   * Builds the reverseKey to fetch the pcaps in the reverse traffic
+   * (destination to source).
+   * 
+   * @param key
+   *          indicates hbase rowKey (partial or full) in the format
+   *          "srcAddr-dstAddr-protocol-srcPort-dstPort-fragment"
+   * @return String indicates the key in the format
+   *         "dstAddr-srcAddr-protocol-dstPort-srcPort"
+   */
+  public static String reverseKey(String key) {
+    Assert.hasText(key, "key must not be null or empty");
+    String delimeter = HBaseConfigConstants.PCAP_KEY_DELIMETER;
+    String regex = "\\" + delimeter;
+    StringBuffer sb = new StringBuffer();
+    try {
+      String[] tokens = key.split(regex);
+      Assert
+          .isTrue(
+              (tokens.length == 5 || tokens.length == 6 || tokens.length == 7),
+              "key is not in the format : 'srcAddr-dstAddr-protocol-srcPort-dstPort-{ipId-fragment identifier}'");
+      sb.append(tokens[1]).append(delimeter).append(tokens[0])
+          .append(delimeter).append(tokens[2]).append(delimeter)
+          .append(tokens[4]).append(delimeter).append(tokens[3]);
+    } catch (Exception e) {
+      Log.warn("Failed to reverse the key. Reverse scan won't be performed.", e);
+    }
+    return sb.toString();
+  }
+
+  /**
+   * Builds the reverseKeys to fetch the pcaps in the reverse traffic
+   * (destination to source). If all keys in the input are not in the expected
+   * format, it returns an empty list;
+   * 
+   * @param keys
+   *          indicates list of hbase rowKeys (partial or full) in the format
+   *          "srcAddr-dstAddr-protocol-srcPort-dstPort-fragment"
+   * @return List<String> indicates the list of keys in the format
+   *         "dstAddr-srcAddr-protocol-dstPort-srcPort"
+   */
+  public static List<String> reverseKey(List<String> keys) {
+    Assert.notEmpty(keys, "'keys' must not be null or empty");
+    List<String> reverseKeys = new ArrayList<String>();
+    for (String key : keys) {
+      if (key != null) {
+        String reverseKey = reverseKey(key);
+        if (StringUtils.isNotEmpty(reverseKey)) {
+          reverseKeys.add(reverseKey);
+        }
+      }
+    }
+    return reverseKeys;
+  }
+
+  /**
+   * Returns Comparator for sorting pcaps cells based on the timestamp (dsc).
+   * 
+   * @return CellTimestampComparator
+   */
+  public static CellTimestampComparator getCellTimestampComparator() {
+    return CELL_TIMESTAMP_COMPARATOR;
+  }
+
+  /**
+   * Convert millis to data creation time unit.
+   * 
+   * @param inputTime
+   *          the input time
+   * @return the long
+   */
+  @VisibleForTesting
+  private static long convertMillisToDataCreationTimeUnit(long inputTime) {
+    System.out.println("convert Millis To DataCreation TimeUnit");
+    TimeUnit dataCreationTimeUnit = getDataCreationTimeUnit();
+    if (TimeUnit.SECONDS == dataCreationTimeUnit) {
+      return (inputTime / 1000);
+    } else if (TimeUnit.MILLIS == dataCreationTimeUnit) {
+      return inputTime;
+    } else if (TimeUnit.MICROS == dataCreationTimeUnit) {
+      return inputTime * 1000;
+    }
+    return inputTime;
+  }
+
+  /**
+   * Convert micros to data creation time unit.
+   * 
+   * @param inputTime
+   *          the input time
+   * @return the long
+   */
+  @VisibleForTesting
+  private static long convertMicrosToDataCreationTimeUnit(long inputTime) {
+    System.out.println("convert Micros To DataCreation TimeUnit");
+    TimeUnit dataCreationTimeUnit = getDataCreationTimeUnit();
+    if (TimeUnit.SECONDS == dataCreationTimeUnit) {
+      return inputTime / (1000 * 1000);
+    } else if (TimeUnit.MILLIS == dataCreationTimeUnit) {
+      return inputTime / 1000;
+    } else if (TimeUnit.MICROS == dataCreationTimeUnit) {
+      return inputTime;
+    }
+    return inputTime;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapReceiverImplRestEasy.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapReceiverImplRestEasy.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapReceiverImplRestEasy.java
new file mode 100644
index 0000000..55c6b78
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapReceiverImplRestEasy.java
@@ -0,0 +1,256 @@
+package com.opensoc.pcapservice;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.opensoc.dataservices.auth.AuthTokenFilter;
+import com.opensoc.pcap.PcapUtils;
+
+@Path("/pcap/")
+public class PcapReceiverImplRestEasy {
+
+	/** The Constant LOGGER. */
+	private static final Logger LOGGER = Logger
+			.getLogger(PcapReceiverImplRestEasy.class);
+
+	/** The Constant HEADER_CONTENT_DISPOSITION_NAME. */
+	private static final String HEADER_CONTENT_DISPOSITION_NAME = "Content-Disposition";
+
+	/** The Constant HEADER_CONTENT_DISPOSITION_VALUE. */
+	private static final String HEADER_CONTENT_DISPOSITION_VALUE = "attachment; filename=\"managed-threat.pcap\"";
+
+	/** partial response key header name. */
+	private static final String HEADER_PARTIAL_RESPONE_KEY = "lastRowKey";
+
+	@AuthTokenFilter
+	@GET
+	@Path("/pcapGetter/getPcapsByKeys")
+	public Response getPcapsByKeys(
+			@QueryParam("keys") List<String> keys,
+			@QueryParam("lastRowKey") String lastRowKey,
+			@DefaultValue("-1") @QueryParam("startTime") long startTime,
+			@DefaultValue("-1") @QueryParam("endTime") long endTime,
+			@QueryParam("includeDuplicateLastRow") boolean includeDuplicateLastRow,
+			@QueryParam("includeReverseTraffic") boolean includeReverseTraffic,
+			@QueryParam("maxResponseSize") String maxResponseSize,
+			@Context HttpServletResponse response) throws IOException {
+		PcapsResponse pcapResponse = null;
+
+		LOGGER.debug( "/pcapGetter/getPcapsByKeys");
+		
+		if (keys == null || keys.size() == 0) {
+			LOGGER.debug( "no keys provided" );
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'keys' must not be null or empty").build();
+		}
+
+		try {
+			IPcapGetter pcapGetter = PcapGetterHBaseImpl.getInstance();
+			pcapResponse = pcapGetter.getPcaps(parseKeys(keys), lastRowKey,
+					startTime, endTime, includeReverseTraffic,
+					includeDuplicateLastRow,
+					ConfigurationUtil.validateMaxResultSize(maxResponseSize));
+			LOGGER.info("pcaps response in REST layer ="
+					+ pcapResponse.toString());
+
+			// return http status '204 No Content' if the pcaps response size is
+			// 0
+			if (pcapResponse == null || pcapResponse.getResponseSize() == 0) {
+
+				return Response.status(Response.Status.NO_CONTENT).build();
+			}
+
+			// return http status '206 Partial Content', the partial response
+			// file and
+			// 'lastRowKey' header , if the pcaps response status is 'PARTIAL'
+
+			response.setHeader(HEADER_CONTENT_DISPOSITION_NAME,
+					HEADER_CONTENT_DISPOSITION_VALUE);
+
+			if (pcapResponse.getStatus() == PcapsResponse.Status.PARTIAL) {
+
+				response.setHeader(HEADER_PARTIAL_RESPONE_KEY,
+						pcapResponse.getLastRowKey());
+
+				return Response
+						.ok(pcapResponse.getPcaps(),
+								MediaType.APPLICATION_OCTET_STREAM).status(206)
+						.build();
+
+			}
+
+		} catch (IOException e) {
+			LOGGER.error(
+					"Exception occurred while fetching Pcaps for the keys :"
+							+ keys.toString(), e);
+			throw e;
+		}
+
+		// return http status '200 OK' along with the complete pcaps response
+		// file,
+		// and headers
+		// return new ResponseEntity<byte[]>(pcapResponse.getPcaps(), headers,
+		// HttpStatus.OK);
+
+		return Response
+				.ok(pcapResponse.getPcaps(), MediaType.APPLICATION_OCTET_STREAM)
+				.status(200).build();
+
+	}
+	
+	@AuthTokenFilter
+	@GET
+	@Path("/pcapGetter/getPcapsByKeyRange")
+
+	  public Response getPcapsByKeyRange(
+	      @QueryParam("startKey") String startKey,
+	      @QueryParam("endKey")String endKey,
+	      @QueryParam("maxResponseSize") String maxResponseSize,
+	      @DefaultValue("-1") @QueryParam("startTime")long startTime,
+	      @DefaultValue("-1") @QueryParam("endTime") long endTime, 
+	      @Context HttpServletResponse servlet_response) throws IOException {
+
+		if (startKey == null || startKey.isEmpty())
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'start key' must not be null or empty").build();
+		
+		if (endKey == null || endKey.isEmpty())
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'end key' must not be null or empty").build();
+		
+		
+	    byte[] response = null;
+	    try {
+	      IPcapScanner pcapScanner = PcapScannerHBaseImpl.getInstance();
+	      response = pcapScanner.getPcaps(startKey, endKey,
+	          ConfigurationUtil.validateMaxResultSize(maxResponseSize), startTime,
+	          endTime);
+	      if (response == null || response.length == 0) {
+	    	  
+	    	  return Response.status(Response.Status.NO_CONTENT).entity("No Data").build();
+	        
+	      }
+	      servlet_response.setHeader(HEADER_CONTENT_DISPOSITION_NAME,
+					HEADER_CONTENT_DISPOSITION_VALUE);
+
+	    } catch (IOException e) {
+	      LOGGER.error(
+	          "Exception occurred while fetching Pcaps for the key range : startKey="
+	              + startKey + ", endKey=" + endKey, e);
+	      throw e;
+	    }
+	    // return http status '200 OK' along with the complete pcaps response file,
+	    // and headers
+	    
+		return Response
+				.ok(response, MediaType.APPLICATION_OCTET_STREAM)
+				.status(200).build();
+	  }
+
+	  /*
+	   * (non-Javadoc)
+	   * 
+	   * @see
+	   * com.cisco.opensoc.hbase.client.IPcapReceiver#getPcapsByIdentifiers(java.lang
+	   * .String, java.lang.String, java.lang.String, java.lang.String,
+	   * java.lang.String, long, long, boolean,
+	   * javax.servlet.http.HttpServletResponse)
+	   */
+	@AuthTokenFilter  
+	@GET
+	@Path("/pcapGetter/getPcapsByIdentifiers")
+
+	  public Response getPcapsByIdentifiers(
+	      @QueryParam ("srcIp") String srcIp, 
+	      @QueryParam ("dstIp") String dstIp,
+	      @QueryParam ("protocol") String protocol, 
+	      @QueryParam ("srcPort") String srcPort,
+	      @QueryParam ("dstPort") String dstPort,
+	      @DefaultValue("-1") @QueryParam ("startTime")long startTime,
+	      @DefaultValue("-1") @QueryParam ("endTime")long endTime,
+	      @DefaultValue("false") @QueryParam ("includeReverseTraffic") boolean includeReverseTraffic,
+	      @Context HttpServletResponse servlet_response)
+	      
+	      throws IOException {
+		
+		if (srcIp == null || srcIp.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'srcIp' must not be null or empty").build();
+		
+		if (dstIp == null || dstIp.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'dstIp' must not be null or empty").build();
+		
+		if (protocol == null || protocol.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'protocol' must not be null or empty").build();
+		
+		if (srcPort == null || srcPort.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'srcPort' must not be null or empty").build();
+		
+		if (dstPort == null || dstPort.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'dstPort' must not be null or empty").build();
+		
+	
+	    PcapsResponse response = null;
+	    try {
+	      String sessionKey = PcapUtils.getSessionKey(srcIp, dstIp, protocol,
+	          srcPort, dstPort);
+	      LOGGER.info("sessionKey =" + sessionKey);
+	      IPcapGetter pcapGetter = PcapGetterHBaseImpl.getInstance();
+	      response = pcapGetter.getPcaps(Arrays.asList(sessionKey), null,
+	          startTime, endTime, includeReverseTraffic, false,
+	          ConfigurationUtil.getDefaultResultSize());
+	      if (response == null || response.getResponseSize() == 0) {
+	         return Response.status(Response.Status.NO_CONTENT).build();
+	      }
+	      servlet_response.setHeader(HEADER_CONTENT_DISPOSITION_NAME,
+					HEADER_CONTENT_DISPOSITION_VALUE);
+
+	    } catch (IOException e) {
+	      LOGGER.error("Exception occurred while fetching Pcaps by identifiers :",
+	          e);
+	      throw e;
+	    }
+	    // return http status '200 OK' along with the complete pcaps response file,
+	    // and headers
+	    return Response
+				.ok(response.getPcaps(), MediaType.APPLICATION_OCTET_STREAM)
+				.status(200).build();
+	  }
+	/**
+	 * This method parses the each value in the List using delimiter ',' and
+	 * builds a new List;.
+	 * 
+	 * @param keys
+	 *            list of keys to be parsed
+	 * @return list of keys
+	 */
+	@VisibleForTesting
+	List<String> parseKeys(List<String> keys) {
+		// Assert.notEmpty(keys);
+		List<String> parsedKeys = new ArrayList<String>();
+		for (String key : keys) {
+			parsedKeys.addAll(Arrays.asList(StringUtils.split(
+					StringUtils.trim(key), ",")));
+		}
+		return parsedKeys;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapScannerHBaseImpl.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapScannerHBaseImpl.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapScannerHBaseImpl.java
new file mode 100644
index 0000000..b1f0179
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapScannerHBaseImpl.java
@@ -0,0 +1,302 @@
+package com.opensoc.pcapservice;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.ZooKeeperConnectionException;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.NoServerForRegionException;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.log4j.Logger;
+import org.springframework.util.Assert;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.opensoc.pcap.PcapMerger;
+
+/**
+ * Singleton class which integrates with HBase table and returns sorted pcaps
+ * based on the timestamp for the given range of keys. Creates HConnection if it
+ * is not already created and the same connection instance is being used for all
+ * requests
+ * 
+ * @author sheetal
+ * @version $Revision: 1.0 $
+ */
+public class PcapScannerHBaseImpl implements IPcapScanner {
+
+  /** The Constant LOGGER. */
+  private static final Logger LOGGER = Logger
+      .getLogger(PcapScannerHBaseImpl.class);
+
+  /** The Constant DEFAULT_HCONNECTION_RETRY_LIMIT. */
+  private static final int DEFAULT_HCONNECTION_RETRY_LIMIT = 0;
+
+  /** The pcap scanner h base. */
+  private static IPcapScanner pcapScannerHBase = null;
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapScanner#getPcaps(java.lang.String,
+   * java.lang.String, long, long, long)
+   */
+  
+  public byte[] getPcaps(String startKey, String endKey, long maxResultSize,
+      long startTime, long endTime) throws IOException {
+    Assert.hasText(startKey, "startKey must no be null or empty");
+    byte[] cf = Bytes.toBytes(ConfigurationUtil.getConfiguration()
+        .getString("hbase.table.column.family"));
+    byte[] cq = Bytes.toBytes(ConfigurationUtil.getConfiguration()
+        .getString("hbase.table.column.qualifier"));
+    // create scan request
+    Scan scan = createScanRequest(cf, cq, startKey, endKey, maxResultSize,
+        startTime, endTime);
+    List<byte[]> pcaps = new ArrayList<byte[]>();
+    HTable table = null;
+    try {
+      pcaps = scanPcaps(pcaps, table, scan, cf, cq);
+    } catch (IOException e) {
+      LOGGER.error(
+          "Exception occurred while fetching Pcaps for the key range : startKey="
+              + startKey + ", endKey=" + endKey, e);
+      if (e instanceof ZooKeeperConnectionException
+          || e instanceof MasterNotRunningException
+          || e instanceof NoServerForRegionException) {
+        int maxRetryLimit = getConnectionRetryLimit();
+        for (int attempt = 1; attempt <= maxRetryLimit; attempt++) {
+          try {
+            HBaseConfigurationUtil.closeConnection(); // closing the existing
+                                                      // connection and retry,
+                                                      // it will create a new
+                                                      // HConnection
+            pcaps = scanPcaps(pcaps, table, scan, cf, cq);
+            break;
+          } catch (IOException ie) {
+            if (attempt == maxRetryLimit) {
+              System.out.println("Throwing the exception after retrying "
+                  + maxRetryLimit + " times.");
+              throw e;
+            }
+          }
+        }
+      } else {
+        throw e;
+      }
+    } finally {
+      if (table != null) {
+        table.close();
+      }
+    }
+    if (pcaps.size() == 1) {
+      return pcaps.get(0);
+    }
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    PcapMerger.merge(baos, pcaps);
+    byte[] response = baos.toByteArray();
+    return response;
+  }
+
+  /**
+   * Creates the scan request.
+   * 
+   * @param cf
+   *          the cf
+   * @param cq
+   *          the cq
+   * @param startKey
+   *          the start key
+   * @param endKey
+   *          the end key
+   * @param maxResultSize
+   *          the max result size
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @return the scan
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @VisibleForTesting
+  Scan createScanRequest(byte[] cf, byte[] cq, String startKey, String endKey,
+      long maxResultSize, long startTime, long endTime) throws IOException {
+    Scan scan = new Scan();
+    scan.addColumn(cf, cq);
+    scan.setMaxVersions(ConfigurationUtil.getConfiguration().getInt(
+        "hbase.table.column.maxVersions"));
+    scan.setStartRow(startKey.getBytes());
+    if (endKey != null) {
+      scan.setStopRow(endKey.getBytes());
+    }
+    scan.setMaxResultSize(maxResultSize);
+    boolean setTimeRange = true;
+    if (startTime < 0 && endTime < 0) {
+      setTimeRange = false;
+    }
+    if (setTimeRange) {
+      if (startTime < 0) {
+        startTime = 0;
+      } else {
+        startTime = PcapHelper.convertToDataCreationTimeUnit(startTime);
+      }
+      if (endTime < 0) {
+        endTime = Long.MAX_VALUE;
+      } else {
+        endTime = PcapHelper.convertToDataCreationTimeUnit(endTime);
+      }
+      Assert.isTrue(startTime < endTime,
+          "startTime value must be less than endTime value");
+    }
+    // create Scan request;
+    if (setTimeRange) {
+      scan.setTimeRange(startTime, endTime);
+    }
+    return scan;
+  }
+
+  /**
+   * Scan pcaps.
+   * 
+   * @param pcaps
+   *          the pcaps
+   * @param table
+   *          the table
+   * @param scan
+   *          the scan
+   * @param cf
+   *          the cf
+   * @param cq
+   *          the cq
+   * @return the list
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @VisibleForTesting
+  List<byte[]> scanPcaps(List<byte[]> pcaps, HTable table, Scan scan,
+      byte[] cf, byte[] cq) throws IOException {
+    LOGGER.info("Scan =" + scan.toString());
+    table = (HTable) HBaseConfigurationUtil.getConnection().getTable(
+    		ConfigurationUtil.getConfiguration().getString("hbase.table.name"));
+    ResultScanner resultScanner = table.getScanner(scan);
+    List<Cell> scannedCells = new ArrayList<Cell>();
+    for (Result result = resultScanner.next(); result != null; result = resultScanner
+        .next()) {
+      List<Cell> cells = result.getColumnCells(cf, cq);
+      if (cells != null) {
+        for (Cell cell : cells) {
+          scannedCells.add(cell);
+        }
+      }
+    }
+    Collections.sort(scannedCells, PcapHelper.getCellTimestampComparator());
+    LOGGER.info("sorted cells :" + scannedCells.toString());
+    for (Cell sortedCell : scannedCells) {
+      pcaps.add(CellUtil.cloneValue(sortedCell));
+    }
+    return pcaps;
+  }
+
+  /**
+   * Gets the connection retry limit.
+   * 
+   * @return the connection retry limit
+   */
+  private int getConnectionRetryLimit() {
+    return ConfigurationUtil.getConfiguration().getInt(
+        "hbase.hconnection.retries.number", DEFAULT_HCONNECTION_RETRY_LIMIT);
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapScanner#getPcaps(java.lang.String,
+   * java.lang.String)
+   */
+  
+  public byte[] getPcaps(String startKey, String endKey) throws IOException {
+    Assert.hasText(startKey, "startKey must no be null or empty");
+    Assert.hasText(endKey, "endKey must no be null or empty");
+    return getPcaps(startKey, endKey, ConfigurationUtil.getDefaultResultSize(),
+        -1, -1);
+  }
+
+  /**
+   * Always returns the singleton instance.
+   * 
+   * @return IPcapScanner singleton instance
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public static IPcapScanner getInstance() throws IOException {
+    if (pcapScannerHBase == null) {
+      synchronized (PcapScannerHBaseImpl.class) {
+        if (pcapScannerHBase == null) {
+          pcapScannerHBase = new PcapScannerHBaseImpl();
+        }
+      }
+    }
+    return pcapScannerHBase;
+  }
+
+  /**
+   * Instantiates a new pcap scanner h base impl.
+   */
+  private PcapScannerHBaseImpl() {
+  }
+
+  /**
+   * The main method.
+   */
+  // public static void main(String[] args) throws IOException {
+  // if (args == null || args.length < 3) {
+  // usage();
+  // return;
+  // }
+  // String outputFileName = null;
+  // String startKey = null;
+  // String stopKey = null;
+  // outputFileName = args[0];
+  // startKey = args[1];
+  // if (args.length > 2) { // NOPMD by sheetal on 1/29/14 3:55 PM
+  // stopKey = args[2];
+  // }
+  // PcapScannerHBaseImpl downloader = new PcapScannerHBaseImpl();
+  // byte[] pcaps = downloader.getPcaps(startKey, stopKey, defaultResultSize, 0,
+  // Long.MAX_VALUE);
+  // File file = new File(outputFileName);
+  // FileUtils.write(file, "", false);
+  // ByteArrayOutputStream baos = new ByteArrayOutputStream(); //
+  // $codepro.audit.disable
+  // // closeWhereCreated
+  // PcapMerger.merge(baos, pcaps);
+  // FileUtils.writeByteArrayToFile(file, baos.toByteArray(), true);
+  // }
+
+  /**
+   * Usage.
+   */
+  @SuppressWarnings("unused")
+  private static void usage() {
+    System.out.println("java " + PcapScannerHBaseImpl.class.getName() // NOPMD
+                                                                      // by
+        // sheetal
+        // <!-- //
+        // $codepro.audit.disable
+        // debuggingCode
+        // -->
+        // on
+        // 1/29/14
+        // 3:55
+        // PM
+        + " <zk quorum> <output file> <start key> [stop key]");
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapsResponse.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapsResponse.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapsResponse.java
new file mode 100644
index 0000000..10af9e0
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/PcapsResponse.java
@@ -0,0 +1,153 @@
+/**
+ * 
+ */
+package com.opensoc.pcapservice;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import com.opensoc.pcap.PcapMerger;
+
+
+
+/**
+ * Holds pcaps data, status and the partial response key.
+ * 
+ * @author Sayi
+ */
+public class PcapsResponse {
+
+  /**
+   * The Enum Status.
+   */
+  public enum Status {
+    
+    /** The partial. */
+    PARTIAL, 
+ /** The complete. */
+ COMPLETE
+  };
+
+  /** response of the processed keys. */
+  private List<byte[]> pcaps = new ArrayList<byte[]>();;
+
+  /** partial response key. */
+  private String lastRowKey;
+
+  /** The status. */
+  private Status status = Status.COMPLETE;
+
+  /**
+   * Sets the pcaps.
+   * 
+   * @param pcaps
+   *          the new pcaps
+   */
+  public void setPcaps(List<byte[]> pcaps) {
+    this.pcaps = pcaps;
+  }
+
+  /**
+   * Adds the pcaps.
+   * 
+   * @param pcaps
+   *          the pcaps
+   */
+  public void addPcaps(byte[] pcaps) {
+    this.pcaps.add(pcaps);
+  }
+
+  /**
+   * Gets the partial response key.
+   * 
+   * @return the partial response key
+   */
+  public String getLastRowKey() {
+    return lastRowKey;
+  }
+
+  /**
+   * Sets the partial response key.
+   * 
+   * @param lastRowKey
+   *          the last row key
+   */
+  public void setLastRowKey(String lastRowKey) {
+    this.lastRowKey = lastRowKey;
+  }
+
+  /**
+   * Gets the status.
+   * 
+   * @return the status
+   */
+  public Status getStatus() {
+    return status;
+  }
+
+  /**
+   * Sets the status.
+   * 
+   * @param status
+   *          the new status
+   */
+  public void setStatus(Status status) {
+    this.status = status;
+  }
+
+  /**
+   * Checks if is resonse size within limit.
+   * 
+   * @param maxResultSize
+   *          the max result size
+   * @return true, if is resonse size within limit
+   */
+  public boolean isResonseSizeWithinLimit(long maxResultSize) {
+    // System.out.println("isResonseSizeWithinLimit() : getResponseSize() < (input|default result size - maximum packet size ) ="+
+    // getResponseSize()+ " < " + ( maxResultSize
+    // -ConfigurationUtil.getMaxRowSize()));
+    return getResponseSize() < (maxResultSize - ConfigurationUtil
+        .getMaxRowSize());
+  }
+
+  /**
+   * Gets the response size.
+   * 
+   * @return the response size
+   */
+  public long getResponseSize() {
+    long responseSize = 0;
+    for (byte[] pcap : this.pcaps) {
+      responseSize = responseSize + pcap.length;
+    }
+    return responseSize;
+  }
+
+  /**
+   * Gets the pcaps.
+   * 
+   * @return the pcaps
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public byte[] getPcaps() throws IOException {
+    if (pcaps.size() == 1) {
+      return pcaps.get(0);
+    }
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    PcapMerger.merge(baos, pcaps);
+    return baos.toByteArray();
+  }
+
+  /* (non-Javadoc)
+   * @see java.lang.Object#toString()
+   */
+  @Override
+  public String toString() {
+    return "PcapsResponse [lastRowKey=" + lastRowKey
+        + ", status=" + status + ", pcapsSize="
+        + String.valueOf(getResponseSize()) + "]";
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/RestTestingUtil.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/RestTestingUtil.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/RestTestingUtil.java
new file mode 100644
index 0000000..651affe
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/RestTestingUtil.java
@@ -0,0 +1,238 @@
+package com.opensoc.pcapservice;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.MediaType;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.client.RestTemplate;
+
+/**
+ * The Class RestTestingUtil.
+ */
+public class RestTestingUtil {
+  
+  /** The host name. */
+  public static String hostName = null;
+
+  /**
+   * Gets the pcaps by keys.
+   * 
+   * @param keys
+   *          the keys
+   * @return the pcaps by keys
+   */
+  @SuppressWarnings("unchecked")
+  private static void getPcapsByKeys(String keys) {
+    System.out
+        .println("**********************getPcapsByKeys ******************************************************************************************");
+    // 1.
+    String url = "http://" + hostName
+        + "/cisco-rest/pcapGetter/getPcapsByKeys?keys={keys}"
+        + "&includeReverseTraffic={includeReverseTraffic}"
+        + "&startTime={startTime}" + "&endTime={endTime}"
+        + "&maxResponseSize={maxResponseSize}";
+    // default values
+    String startTime = "-1";
+    String endTime = "-1";
+    String maxResponseSize = "6";
+    String includeReverseTraffic = "false";
+
+    @SuppressWarnings("rawtypes")
+    Map map = new HashMap();
+    map.put("keys", keys);
+    map.put("includeReverseTraffic", includeReverseTraffic);
+    map.put("startTime", startTime);
+    map.put("endTime", endTime);
+    map.put("maxResponseSize", maxResponseSize);
+
+    RestTemplate template = new RestTemplate();
+
+    // set headers and entity to send
+    HttpHeaders headers = new HttpHeaders();
+    headers.set("Accept", MediaType.APPLICATION_OCTET_STREAM_VALUE);
+    HttpEntity<Object> requestEntity = new HttpEntity<Object>(headers);
+
+    // 1.
+    ResponseEntity<byte[]> response1 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
+            response1);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 2. with reverse traffic
+    includeReverseTraffic = "true";
+    map.put("includeReverseTraffic", includeReverseTraffic);
+    ResponseEntity<byte[]> response2 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
+            response2);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 3.with time range
+    startTime = System.getProperty("startTime", "-1");
+    endTime = System.getProperty("endTime", "-1");
+    map.put("startTime", startTime);
+    map.put("endTime", endTime);
+    ResponseEntity<byte[]> response3 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
+            response3);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 4.with maxResponseSize
+    maxResponseSize = System.getProperty("maxResponseSize", "6");
+    map.put("maxResponseSize", maxResponseSize);
+    ResponseEntity<byte[]> response4 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
+            response4);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+  }
+
+  /**
+   * Gets the pcaps by keys range.
+   * 
+   * @param startKey
+   *          the start key
+   * @param endKey
+   *          the end key
+   * @return the pcaps by keys range
+   */
+  @SuppressWarnings("unchecked")
+  private static void getPcapsByKeysRange(String startKey, String endKey) {
+    System.out
+        .println("**********************getPcapsByKeysRange ******************************************************************************************");
+    // 1.
+    String url = "http://" + hostName
+        + "/cisco-rest/pcapGetter/getPcapsByKeyRange?startKey={startKey}"
+        + "&endKey={endKey}" + "&startTime={startTime}" + "&endTime={endTime}"
+        + "&maxResponseSize={maxResponseSize}";
+    // default values
+    String startTime = "-1";
+    String endTime = "-1";
+    String maxResponseSize = "6";
+    @SuppressWarnings("rawtypes")
+    Map map = new HashMap();
+    map.put("startKey", startKey);
+    map.put("endKey", "endKey");
+    map.put("startTime", startTime);
+    map.put("endTime", endTime);
+    map.put("maxResponseSize", maxResponseSize);
+
+    RestTemplate template = new RestTemplate();
+
+    // set headers and entity to send
+    HttpHeaders headers = new HttpHeaders();
+    headers.set("Accept", MediaType.APPLICATION_OCTET_STREAM_VALUE);
+    HttpEntity<Object> requestEntity = new HttpEntity<Object>(headers);
+
+    // 1.
+    ResponseEntity<byte[]> response1 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            startKey, endKey, startTime, endTime, maxResponseSize, response1);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 2. with time range
+    startTime = System.getProperty("startTime", "-1");
+    endTime = System.getProperty("endTime", "-1");
+    map.put("startTime", startTime);
+    map.put("endTime", endTime);
+    ResponseEntity<byte[]> response2 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            startKey, endKey, startTime, endTime, maxResponseSize, response2);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 3. with maxResponseSize
+    maxResponseSize = System.getProperty("maxResponseSize", "6");
+    map.put("maxResponseSize", maxResponseSize);
+    ResponseEntity<byte[]> response3 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            startKey, endKey, startTime, endTime, maxResponseSize, response3);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+  }
+
+  /**
+   * The main method.
+   * 
+   * @param args
+   *          the arguments
+   */
+  public static void main(String[] args) {
+
+    /*
+     * Run this program with system properties
+     * 
+     * -DhostName=mon.hw.com:8090
+     * -Dkeys=18800006-1800000b-06-0019-b39d,18800006-
+     * 1800000b-06-0050-5af6-64840-40785
+     * -DstartKey=18000002-18800002-06-0436-0019-2440-34545
+     * -DendKey=18000002-18800002-06-b773-0019-2840-34585
+     */
+
+    hostName = System.getProperty("hostName");
+
+    String keys = System.getProperty("keys");
+
+    String statyKey = System.getProperty("startKey");
+    String endKey = System.getProperty("endKey");
+
+    getPcapsByKeys(keys);
+    getPcapsByKeysRange(statyKey, endKey);
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/rest/JettyServiceRunner.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/rest/JettyServiceRunner.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/rest/JettyServiceRunner.java
new file mode 100644
index 0000000..1fdb025
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/rest/JettyServiceRunner.java
@@ -0,0 +1,26 @@
+package com.opensoc.pcapservice.rest;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.ws.rs.core.Application;
+
+import com.opensoc.pcapservice.PcapReceiverImplRestEasy;
+
+public class JettyServiceRunner extends Application  {
+	
+
+	private static Set services = new HashSet(); 
+		
+	public  JettyServiceRunner() {     
+		// initialize restful services   
+		services.add(new PcapReceiverImplRestEasy());  
+	}
+	@Override
+	public  Set getSingletons() {
+		return services;
+	}  
+	public  static Set getServices() {  
+		return services;
+	} 
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/rest/PcapService.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/rest/PcapService.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/rest/PcapService.java
new file mode 100644
index 0000000..5f47ead
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/rest/PcapService.java
@@ -0,0 +1,34 @@
+package com.opensoc.pcapservice.rest;
+
+import java.io.IOException;
+
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.jboss.resteasy.plugins.server.servlet.HttpServletDispatcher;
+
+import com.opensoc.helpers.services.PcapServiceCli;
+
+
+public class PcapService {
+
+	public static void main(String[] args) throws IOException {
+
+		PcapServiceCli cli = new PcapServiceCli(args);
+		cli.parse();
+		
+		Server server = new Server(cli.getPort());
+		ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
+		context.setContextPath("/");
+		ServletHolder h = new ServletHolder(new HttpServletDispatcher());
+		h.setInitParameter("javax.ws.rs.Application", "com.opensoc.pcapservice.rest.JettyServiceRunner");
+		context.addServlet(h, "/*");
+		server.setHandler(context);
+		try {
+			server.start();
+			server.join();
+		} catch (Exception e) {
+			e.printStackTrace();
+		}
+	}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/services/alerts/ElasticSearch_KafkaAlertsService.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/services/alerts/ElasticSearch_KafkaAlertsService.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/services/alerts/ElasticSearch_KafkaAlertsService.java
index a4a7fc0..69ec5bc 100644
--- a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/services/alerts/ElasticSearch_KafkaAlertsService.java
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/services/alerts/ElasticSearch_KafkaAlertsService.java
@@ -6,7 +6,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.opensoc.dataservices.common.OpenSOCService;
-import com.opensoc.dataservices.kafkaclient.KafkaConsumer;
 
 @Singleton
 public class ElasticSearch_KafkaAlertsService implements OpenSOCService {

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/resources/config-definition-hbase.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/resources/config-definition-hbase.xml b/opensoc-streaming/OpenSOC-DataServices/src/main/resources/config-definition-hbase.xml
new file mode 100644
index 0000000..efe05e8
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/resources/config-definition-hbase.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="ISO-8859-1" ?>
+
+<configuration>
+	<header>
+		<result delimiterParsingDisabled="true" forceReloadCheck="true"></result>
+		<lookups>
+      		<lookup config-prefix="expr"
+              	config-class="org.apache.commons.configuration.interpol.ExprLookup">
+        		<variables>
+          			<variable name="System" value="Class:java.lang.System"/>
+          			<variable name="net" value="Class:java.net.InetAddress"/>
+          			<variable name="String" value="Class:org.apache.commons.lang.StringUtils"/>
+        		</variables>
+      		</lookup>
+    	</lookups>
+	</header>
+	<override>
+		<!-- 1. properties from 'hbae-config.properties' are loaded first; 
+				if a property is not present in this file, then it will search in the files in the order they are defined here.
+		     2. 'refreshDelay' indicates the minimum delay in milliseconds between checks to see if the underlying file is changed.
+		     3. 'config-optional' indicates this file is not required --> 
+		
+		<properties fileName="${expr:System.getProperty('configPath')+'/hbase-config.properties'}"  config-optional="true">
+			<reloadingStrategy refreshDelay="${expr:System.getProperty('configRefreshDelay')}"
+	      config-class="org.apache.commons.configuration.reloading.FileChangedReloadingStrategy"/>
+	     </properties>
+		
+		<properties fileName="hbase-config-default.properties" config-optional="true">
+<!-- 					<reloadingStrategy refreshDelay="${expr:System.getProperty('defaultConfigRefreshDelay')}"
+	      config-class="org.apache.commons.configuration.reloading.FileChangedReloadingStrategy"/>
+ -->	     </properties>
+		
+	</override>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/resources/hbase-config-default.properties
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/resources/hbase-config-default.properties b/opensoc-streaming/OpenSOC-DataServices/src/main/resources/hbase-config-default.properties
new file mode 100644
index 0000000..4ee56b6
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/resources/hbase-config-default.properties
@@ -0,0 +1,40 @@
+#hbase zoo keeper configuration
+hbase.zookeeper.quorum=zkpr1,zkpr2,zkpr3
+hbase.zookeeper.clientPort=2181
+hbase.client.retries.number=1
+zookeeper.session.timeout=60000
+zookeeper.recovery.retry=0
+
+#hbase table configuration
+hbase.table.name=pcap
+hbase.table.column.family=t
+hbase.table.column.qualifier=pcap
+hbase.table.column.maxVersions=5
+
+# scan size limit configuration in MB or KB; if the input is negative or greater than max value throw an error.
+hbase.scan.result.size.unit=MB
+hbase.scan.default.result.size=6
+hbase.scan.max.result.size=60
+
+# time stamp conversion configuration; possible values 'SECONDS'(seconds), 'MILLIS'(milli seconds), 'MICROS' (micro seconds)
+hbase.table.data.time.unit=MILLIS
+
+#number of retries in case of ZooKeeper or HBase server down
+hbase.hconnection.retries.number=3
+
+#configuration for including pcaps in the reverse traffic
+pcaps.include.reverse.traffic = false
+
+#maximum table row size in KB or MB 
+hbase.table.row.size.unit = KB
+hbase.table.max.row.size = 70
+
+# tokens of row key configuration
+hbase.table.row.key.tokens=7
+rest.api.input.key.min.tokens=5
+
+# whether or not to include the last row from the previous request, applicable for only partial response scenario
+hbase.table.scan.include.duplicate.lastrow= true;
+
+#number of digits for appending tokens of the row key
+hbase.table.row.key.token.appending.digits=5

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/resources/hbase-site.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/resources/hbase-site.xml b/opensoc-streaming/OpenSOC-DataServices/src/main/resources/hbase-site.xml
new file mode 100644
index 0000000..5c3c819
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/resources/hbase-site.xml
@@ -0,0 +1,127 @@
+<!--Tue Apr  1 18:16:39 2014-->
+  <configuration>
+    <property>
+    <name>hbase.tmp.dir</name>
+    <value>/disk/h/hbase</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.chunkpool.maxsize</name>
+    <value>0.5</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.codecs</name>
+    <value>lzo,gz,snappy</value>
+  </property>
+    <property>
+    <name>hbase.hstore.flush.retries.number</name>
+    <value>120</value>
+  </property>
+    <property>
+    <name>hbase.client.keyvalue.maxsize</name>
+    <value>10485760</value>
+  </property>
+    <property>
+    <name>hbase.rootdir</name>
+    <value>hdfs://nn1:8020/apps/hbase/data</value>
+  </property>
+    <property>
+    <name>hbase.defaults.for.version.skip</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.client.scanner.caching</name>
+    <value>100</value>
+  </property>
+    <property>
+    <name>hbase.superuser</name>
+    <value>hbase</value>
+  </property>
+    <property>
+    <name>hfile.block.cache.size</name>
+    <value>0.40</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.checksum.verify</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.mslab.enabled</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.max.filesize</name>
+    <value>107374182400</value>
+  </property>
+    <property>
+    <name>hbase.cluster.distributed</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>zookeeper.session.timeout</name>
+    <value>30000</value>
+  </property>
+    <property>
+    <name>zookeeper.znode.parent</name>
+    <value>/hbase-unsecure</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.global.memstore.lowerLimit</name>
+    <value>0.38</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.handler.count</name>
+    <value>240</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.mslab.chunksize</name>
+    <value>8388608</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.quorum</name>
+    <value>zkpr1,zkpr2,zkpr3</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.useMulti</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.majorcompaction</name>
+    <value>86400000</value>
+  </property>
+    <property>
+    <name>hbase.hstore.blockingStoreFiles</name>
+    <value>200</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.property.clientPort</name>
+    <value>2181</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.flush.size</name>
+    <value>134217728</value>
+  </property>
+    <property>
+    <name>hbase.security.authorization</name>
+    <value>false</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.global.memstore.upperLimit</name>
+    <value>0.4</value>
+  </property>
+    <property>
+    <name>hbase.hstore.compactionThreshold</name>
+    <value>4</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.block.multiplier</name>
+    <value>8</value>
+  </property>
+    <property>
+    <name>hbase.security.authentication</name>
+    <value>simple</value>
+  </property>
+    <property>
+    <name>dfs.client.read.shortcircuit</name>
+    <value>true</value>
+  </property>
+  </configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/test/java/com/opensoc/pcapservice/CellTimestampComparatorTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/test/java/com/opensoc/pcapservice/CellTimestampComparatorTest.java b/opensoc-streaming/OpenSOC-DataServices/src/test/java/com/opensoc/pcapservice/CellTimestampComparatorTest.java
new file mode 100644
index 0000000..c2a4bf2
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/test/java/com/opensoc/pcapservice/CellTimestampComparatorTest.java
@@ -0,0 +1,92 @@
+package com.opensoc.pcapservice;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.hbase.Cell;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import com.opensoc.pcapservice.CellTimestampComparator;
+
+/**
+ * The Class CellTimestampComparatorTest.
+ */
+public class CellTimestampComparatorTest {
+
+  /**
+   * Sets the up.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @Before
+  public void setUp() throws Exception {
+  }
+
+  /**
+   * Tear down.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  /**
+   * Test_less.
+   */
+  @Test
+  public void test_less() {
+    // mocking
+    Cell cell1 = Mockito.mock(Cell.class);
+    Mockito.when(cell1.getTimestamp()).thenReturn(13945345808L);
+    Cell cell2 = Mockito.mock(Cell.class);
+    Mockito.when(cell2.getTimestamp()).thenReturn(13845345808L);
+
+    CellTimestampComparator comparator = new CellTimestampComparator();
+
+    // actual call and verify
+    Assert.assertTrue(comparator.compare(cell2, cell1) == -1);
+
+  }
+
+  /**
+   * Test_greater.
+   */
+  @Test
+  public void test_greater() {
+    // mocking
+    Cell cell1 = Mockito.mock(Cell.class);
+    Mockito.when(cell1.getTimestamp()).thenReturn(13745345808L);
+    Cell cell2 = Mockito.mock(Cell.class);
+    Mockito.when(cell2.getTimestamp()).thenReturn(13945345808L);
+
+    CellTimestampComparator comparator = new CellTimestampComparator();
+
+    // actual call and verify
+    Assert.assertTrue(comparator.compare(cell2, cell1) == 1);
+
+  }
+
+  /**
+   * Test_equal.
+   */
+  @Test
+  public void test_equal() {
+    // mocking
+    Cell cell1 = Mockito.mock(Cell.class);
+    Mockito.when(cell1.getTimestamp()).thenReturn(13945345808L);
+    Cell cell2 = Mockito.mock(Cell.class);
+    Mockito.when(cell2.getTimestamp()).thenReturn(13945345808L);
+
+    CellTimestampComparator comparator = new CellTimestampComparator();
+
+    // actual call and verify
+    Assert.assertTrue(comparator.compare(cell2, cell1) == 0);
+
+  }
+
+}


[10/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/PcapGetterHBaseImplTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/PcapGetterHBaseImplTest.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/PcapGetterHBaseImplTest.java
deleted file mode 100644
index ea49d9c..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/PcapGetterHBaseImplTest.java
+++ /dev/null
@@ -1,536 +0,0 @@
-package com.cisco.opensoc.hbase.client;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.collections.ListUtils;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Scan;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.mockito.Mockito;
-import org.springframework.util.Assert;
-
-import com.cisco.opensoc.hbase.client.PcapGetterHBaseImpl;
-import com.cisco.opensoc.hbase.client.PcapsResponse;
-
-/**
- * The Class PcapGetterHBaseImplTest.
- */
-public class PcapGetterHBaseImplTest {
-
-  /**
-   * Sets the up.
-   * 
-   * @throws Exception
-   *           the exception
-   */
-  @Before
-  public void setUp() throws Exception {
-  }
-
-  /**
-   * Tear down.
-   * 
-   * @throws Exception
-   *           the exception
-   */
-  @After
-  public void tearDown() throws Exception {
-  }
-
-  /**
-   * Test_get pcaps_with list.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @SuppressWarnings("unchecked")
-  @Test
-  public void test_getPcaps_withList() throws IOException {
-    // mocking
-    String[] keys = { "0a07002b-0a078039-06-1e8b-0087",
-        "0a070025-0a07807a-06-aab8-c360" };
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-    PcapGetterHBaseImpl spy = Mockito.spy(pcapGetter);
-
-    List<byte[]> mockPcaps = new ArrayList<byte[]>();
-    mockPcaps.add(getTestPcapBytes());
-
-    // Mockito.doReturn(mockPcaps).when(spy).scanPcaps(Mockito.any(ArrayList.class),
-    // Mockito.any(HTable.class), Mockito.any(Scan.class),
-    // Mockito.any(byte[].class), Mockito.any(byte[].class));
-    //
-    //
-    // actual call
-    // PcapsResponse response = spy.getPcaps(Arrays.asList(keys));
-
-    // verify
-    // Assert.assertTrue(response.getResponseSize() == mockPcaps.get(0).length);
-  }
-
-  /**
-   * Test_get pcaps_with key.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @SuppressWarnings("unchecked")
-  @Test
-  public void test_getPcaps_withKey() throws IOException {
-    // mocking
-    String key = "0a07002b-0a078039-06-1e8b-0087";
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-    PcapGetterHBaseImpl spy = Mockito.spy(pcapGetter);
-
-    List<byte[]> mockPcaps = new ArrayList<byte[]>();
-    mockPcaps.add(getTestPcapBytes());
-
-    // //
-    // Mockito.doReturn(mockPcaps).when(spy).scanPcaps(Mockito.any(ArrayList.class),
-    // Mockito.any(HTable.class), Mockito.any(Scan.class),
-    // Mockito.any(byte[].class), Mockito.any(byte[].class));
-    //
-
-    // actual call
-    // PcapsResponse response = spy.getPcaps(key);
-
-    // verify
-    // Assert.assertTrue(response.getResponseSize() == mockPcaps.get(0).length);
-  }
-
-  /**
-   * Test_get pcaps_with key and timestamps.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @SuppressWarnings("unchecked")
-  @Test
-  public void test_getPcaps_withKeyAndTimestamps() throws IOException {
-    // mocking
-    String key = "0a07002b-0a078039-06-1e8b-0087";
-    long startTime = 1376782349234555L;
-    long endTime = 1396782349234555L;
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-    PcapGetterHBaseImpl spy = Mockito.spy(pcapGetter);
-
-    List<byte[]> mockPcaps = new ArrayList<byte[]>();
-    mockPcaps.add(getTestPcapBytes());
-
-    // Mockito.doReturn(mockPcaps).when(spy).scanPcaps(Mockito.any(ArrayList.class),
-    // Mockito.any(HTable.class), Mockito.any(Scan.class),
-    // Mockito.any(byte[].class), Mockito.any(byte[].class));
-
-    // actual call
-    // PcapsResponse response = spy.getPcaps(key, startTime, endTime, false);
-
-    // verify
-    // Assert.assertTrue(response.getResponseSize() == mockPcaps.get(0).length);
-  }
-
-  /**
-   * Test_get pcaps_with key_multiple pcaps.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @SuppressWarnings("unchecked")
-  @Test
-  public void test_getPcaps_withKey_multiplePcaps() throws IOException {
-    // mocking
-    String key = "0a07002b-0a078039-06-1e8b-0087";
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-    PcapGetterHBaseImpl spy = Mockito.spy(pcapGetter);
-
-    List<byte[]> mockPcaps = new ArrayList<byte[]>();
-    mockPcaps.add(getTestPcapBytes());
-    mockPcaps.add(getTestPcapBytes());
-
-    /*
-     * Mockito.doReturn(mockPcaps).when(spy).scanPcaps(Mockito.any(ArrayList.class
-     * ), Mockito.any(HTable.class), Mockito.any(Scan.class),
-     * Mockito.any(byte[].class), Mockito.any(byte[].class));
-     */
-    // actual call
-    // PcapsResponse response = spy.getPcaps(key);
-
-    // verify
-    // Assert.assertNotNull(response);
-    // Assert.assertTrue(response.getResponseSize() > mockPcaps.get(0).length);
-  }
-
-  /**
-   * Gets the test pcap bytes.
-   * 
-   * @return the test pcap bytes
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  private byte[] getTestPcapBytes() throws IOException {
-    File fin = new File("src/test/resources/test-tcp-packet.pcap");
-    byte[] pcapBytes = FileUtils.readFileToByteArray(fin);
-    return pcapBytes;
-  }
-
-  /**
-   * Test_remove duplicates.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_removeDuplicates() throws IOException {
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-    List<String> keys = new ArrayList<String>();
-
-    keys.add("18800006-1800000b-06-0050-5af6");
-    keys.add("18800006-1800000b-11-0035-3810");
-    keys.add("18800006-1800000b-06-0019-caac");
-    keys.add("18800006-1800000b-06-0050-5af6");
-
-    List<String> deDupKeys = pcapGetter.removeDuplicateKeys(keys);
-    Assert.isTrue(deDupKeys.size() == 3);
-    List<String> testKeys = new ArrayList<String>();
-    keys.add("18800006-1800000b-06-0050-5af6");
-    keys.add("18800006-1800000b-11-0035-3810");
-    keys.add("18800006-1800000b-06-0019-caac");
-
-    ListUtils.isEqualList(deDupKeys, testKeys);
-  }
-
-  /**
-   * Test_sort keys by asc order_with out reverse traffic.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_sortKeysByAscOrder_withOutReverseTraffic()
-      throws IOException {
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-    List<String> keys = new ArrayList<String>();
-    keys.add("18800006-1800000b-11-0035-3810");
-    keys.add("18800006-1800000b-06-0050-5af6");
-    keys.add("18800006-1800000b-06-0019-caac");
-
-    List<String> result = pcapGetter.sortKeysByAscOrder(keys, false);
-
-    List<String> testKeys = new ArrayList<String>();
-    testKeys.add("18800006-1800000b-06-0019-caac");
-    testKeys.add("18800006-1800000b-06-0050-5af6");
-    testKeys.add("18800006-1800000b-11-0035-3810");
-
-    Assert.isTrue(ListUtils.isEqualList(result, testKeys));
-  }
-
-  /**
-   * Test_sort keys by asc order_with reverse traffic.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_sortKeysByAscOrder_withReverseTraffic() throws IOException {
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-    List<String> keys = new ArrayList<String>();
-    keys.add("18800006-1800000b-11-0035-3812");
-    keys.add("18800006-1800000b-11-0035-3810");
-    keys.add("18800006-1800000b-11-0035-3811");
-
-    List<String> result = pcapGetter.sortKeysByAscOrder(keys, true);
-    Assert.isTrue(result.size() == 6);
-  }
-
-  /**
-   * Test_sort keys by asc order_get unprocessed sublist of keys.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_sortKeysByAscOrder_getUnprocessedSublistOfKeys()
-      throws IOException {
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-    List<String> keys = new ArrayList<String>();
-    keys.add("18800006-1800000b-11-0035-3810");
-    keys.add("18800006-1800000b-06-0050-5af6");
-    keys.add("18800006-1800000b-06-0019-caac");
-    System.out.println("original keys =" + keys.toString());
-
-    List<String> sortedKeys = pcapGetter.sortKeysByAscOrder(keys, false);
-    System.out.println("after sortKeysByAscOrder =" + sortedKeys.toString());
-
-    List<String> unprocessedKeys1 = pcapGetter.getUnprocessedSublistOfKeys(
-        sortedKeys, "18800006-1800000b-06-0019-caac-65140-40815");
-    System.out.println("unprocessedKeys1 =" + unprocessedKeys1);
-    Assert.isTrue(unprocessedKeys1.size() == 2);
-
-    List<String> unprocessedKeys2 = pcapGetter.getUnprocessedSublistOfKeys(
-        sortedKeys, "18800006-1800000b-06-0050-5af6-65140-40815");
-    // System.out.println("unprocessedKeys2 ="+unprocessedKeys2);
-    Assert.isTrue(unprocessedKeys2.size() == 1);
-
-    List<String> unprocessedKeys3 = pcapGetter.getUnprocessedSublistOfKeys(
-        sortedKeys, "18800006-1800000b-11-0035-3810-6514040815");
-    // System.out.println("unprocessedKeys3 ="+unprocessedKeys3);
-    Assert.isTrue(unprocessedKeys3.size() == 0);
-
-  }
-
-  /**
-   * Test_sort keys by asc order_get unprocessed sublist of keys_with out match.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_sortKeysByAscOrder_getUnprocessedSublistOfKeys_withOutMatch()
-      throws IOException {
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-    List<String> keys = new ArrayList<String>();
-    keys.add("18800006-1800000b-11-0035-3810");
-    keys.add("18800006-1800000b-06-0050-5af6");
-    keys.add("18800006-1800000b-06-0019-caac");
-    System.out.println("original keys =" + keys.toString());
-
-    List<String> sortedKeys = pcapGetter.sortKeysByAscOrder(keys, false);
-    System.out.println("after sortKeysByAscOrder =" + sortedKeys.toString());
-
-    List<String> unprocessedKeys1 = pcapGetter.getUnprocessedSublistOfKeys(
-        sortedKeys, "18800006-1800000b-11-89-455-65140-40815");
-    System.out.println("unprocessedKeys1 =" + unprocessedKeys1);
-    Assert.isTrue(unprocessedKeys1.size() == 3);
-  }
-
-  /**
-   * Test_create start and stop row keys.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_createStartAndStopRowKeys() throws IOException {
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-    String key = "18800006-1800000b-11-0035-3810";
-    Map<String, String> map = pcapGetter.createStartAndStopRowKeys(key, false,
-        false);
-    System.out.println("map =" + map.toString());
-
-    String lastRowKey = "18800006-1800000b-11-0035-3810-23234-32423";
-    Map<String, String> map1 = pcapGetter.createStartAndStopRowKeys(
-        lastRowKey, true, false);
-    System.out.println("map1 =" + map1.toString());
-
-    String lastRowKey2 = "18800006-1800000b-11-0035-3810-23234-32423";
-    Map<String, String> map2 = pcapGetter.createStartAndStopRowKeys(
-        lastRowKey2, true, true);
-    System.out.println("map2 =" + map2.toString());
-
-  }
-
-  /**
-   * Test_check if valid input_valid.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_checkIfValidInput_valid() throws IOException {
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-    List<String> keys = new ArrayList<String>();
-    keys.add("18800006-1800000b-11-0035-3810");
-    keys.add("18800006-1800000b-06-0050-5af6");
-    keys.add("18800006-1800000b-06-0019-caac");
-
-    String lastRowKey = "18800006-1800000b-11-0035-3810-23234-32423";
-
-    boolean response = pcapGetter.checkIfValidInput(keys, lastRowKey);
-    Assert.isTrue(response);
-
-  }
-
-  /**
-   * Test_check if valid input_in valid.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_checkIfValidInput_inValid() throws IOException {
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-    @SuppressWarnings("unchecked")
-    boolean response = pcapGetter.checkIfValidInput(Collections.EMPTY_LIST,
-        null);
-    Assert.isTrue(!response);
-
-  }
-
-  /**
-   * Test_check if valid input_valid_mixed.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_checkIfValidInput_valid_mixed() throws IOException {
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-    String lastRowKey = "18800006-1800000b-11-0035-3810-23234-32423";
-    @SuppressWarnings("unchecked")
-    boolean response = pcapGetter.checkIfValidInput(Collections.EMPTY_LIST,
-        lastRowKey);
-    Assert.isTrue(response);
-  }
-
-  /**
-   * Test_create get request.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_createGetRequest() throws IOException {
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-    String key = "18800006-1800000b-11-0035-3810-23234-324230";
-
-    long startTime = 139812323L; // in seconds
-    long endTime = 139923424L; // in seconds
-
-    Get get = pcapGetter.createGetRequest(key, startTime, endTime);
-    Assert.notNull(get);
-
-    Assert.isTrue(Arrays.equals(get.getRow(), key.getBytes()));
-    // compare in micros as the data creation time unit is set to Micros in
-    // properties file.
-    Assert.isTrue(get.getTimeRange().getMin() == startTime * 1000 * 1000);
-    Assert.isTrue(get.getTimeRange().getMax() == endTime * 1000 * 1000);
-  }
-
-  /**
-   * Test_create get request_default time range.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_createGetRequest_defaultTimeRange() throws IOException {
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-    String key = "18800006-1800000b-11-0035-3810-23234-324230";
-
-    Get get = pcapGetter.createGetRequest(key, -1, -1);
-    Assert.notNull(get);
-
-    Assert.isTrue(Arrays.equals(get.getRow(), key.getBytes()));
-    Assert.isTrue(get.getTimeRange().getMin() == 0);
-  }
-
-  /**
-   * Test_create get request_with start time.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_createGetRequest_withStartTime() throws IOException {
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-    String key = "18800006-1800000b-11-0035-3810-23234-324230";
-
-    long startTime = 139812323L; // in seconds
-
-    Get get = pcapGetter.createGetRequest(key, startTime, -1);
-    Assert.notNull(get);
-
-    Assert.isTrue(Arrays.equals(get.getRow(), key.getBytes()));
-    Assert.isTrue(get.getTimeRange().getMin() == startTime * 1000 * 1000);
-    Assert.isTrue(get.getTimeRange().getMax() == Long.valueOf(Long.MAX_VALUE));
-  }
-
-  /**
-   * Test_create get request_with end time.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_createGetRequest_withEndTime() throws IOException {
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-    String key = "18800006-1800000b-11-0035-3810-23234-324230";
-
-    long endTime = 139923424L; // in seconds
-
-    Get get = pcapGetter.createGetRequest(key, -1, endTime);
-    Assert.notNull(get);
-
-    Assert.isTrue(Arrays.equals(get.getRow(), key.getBytes()));
-    Assert.isTrue(get.getTimeRange().getMin() == 0);
-    Assert.isTrue(get.getTimeRange().getMax() == endTime * 1000 * 1000);
-  }
-
-  /**
-   * Test_create scan request.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_createScanRequest() throws IOException {
-    // mocking
-    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
-        .getInstance();
-
-    PcapsResponse pcapsResponse = new PcapsResponse();
-
-    Map<String, String> keysMap = new HashMap<String, String>();
-    String startKey = "0a07002b-0a078039-06-1e8b-0087-00000-00000";
-    String endKey = "0a070025-0a07807a-06-aab8-c360-99999-99999";
-    keysMap.put("startKey", startKey);
-    keysMap.put("endKey", endKey);
-
-    long startTime = 139812323L; // in seconds
-    long endTime = 139923424L; // in seconds
-    long maxResultSize = 673424;
-
-    // actual call
-    Scan scan = pcapGetter.createScanRequest(pcapsResponse, keysMap, startTime,
-        endTime, maxResultSize);
-
-    // verify time range
-    Assert.isTrue(scan.getTimeRange().getMin() == startTime * 1000 * 1000); // compare
-                                                                            // in
-                                                                            // micros
-    Assert.isTrue(scan.getTimeRange().getMax() == endTime * 1000 * 1000); // compare
-                                                                          // in
-                                                                          // micros
-
-    // verify start and stop rows
-    Assert.isTrue(Arrays.equals(scan.getStartRow(), startKey.getBytes()));
-    Assert.isTrue(Arrays.equals(scan.getStopRow(), endKey.getBytes()));
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/PcapHelperTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/PcapHelperTest.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/PcapHelperTest.java
deleted file mode 100644
index 88392d1..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/PcapHelperTest.java
+++ /dev/null
@@ -1,321 +0,0 @@
-/**
- * 
- */
-package com.cisco.opensoc.hbase.client;
-
-import java.util.Arrays;
-import java.util.List;
-
-import org.eclipse.jdt.internal.core.Assert;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.powermock.api.mockito.PowerMockito;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.modules.junit4.PowerMockRunner;
-
-import com.cisco.opensoc.hbase.client.PcapHelper;
-import com.cisco.opensoc.hbase.client.PcapHelper.TimeUnit;
-
-// TODO: Auto-generated Javadoc
-/**
- * The Class PcapHelperTest.
- * 
- * @author Sayi
- */
-@RunWith(PowerMockRunner.class)
-@PrepareForTest(PcapHelper.class)
-public class PcapHelperTest {
-
-  /**
-   * Sets the up.
-   * 
-   * @throws Exception
-   *           the exception
-   */
-  @Before
-  public void setUp() throws Exception {
-    PowerMockito.spy(PcapHelper.class);
-  }
-
-  /**
-   * Tear down.
-   * 
-   * @throws Exception
-   *           the exception
-   */
-  @After
-  public void tearDown() throws Exception {
-  }
-
-  /**
-   * Input time is in SECONDS and data creation time is in SECONDS.
-   */
-  @Test
-  public void test_convertToDataCreationTimeUnit_seconds_seconds() {
-    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
-        TimeUnit.SECONDS);
-    PowerMockito.verifyNoMoreInteractions();
-
-    long endTime = 1111122222L; // input time in seconds
-    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
-
-    Assert.isTrue(1111122222L == time);
-  }
-
-  /**
-   * Input time is in MILLIS and data creation time is in SECONDS.
-   */
-  @Test
-  public void test_convertToDataCreationTimeUnit_millis_seconds() {
-    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
-        TimeUnit.SECONDS);
-    PowerMockito.verifyNoMoreInteractions();
-
-    long endTime = 1111122222333L; // input time in millis
-    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
-
-    Assert.isTrue(1111122222L == time);
-  }
-
-  /**
-   * Input time is in MICROS and data creation time is in SECONDS.
-   */
-  @Test
-  public void test_convertToDataCreationTimeUnit_micros_seconds() {
-    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
-        TimeUnit.SECONDS);
-    PowerMockito.verifyNoMoreInteractions();
-
-    long endTime = 1111122222333444L; // input time in micros
-    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
-
-    Assert.isTrue(1111122222L == time);
-  }
-
-  /**
-   * Input time is in SECONDS and data creation time is in MILLIS.
-   */
-  @Test
-  public void test_convertToDataCreationTimeUnit_seconds_millis() {
-    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
-        TimeUnit.MILLIS);
-    PowerMockito.verifyNoMoreInteractions();
-
-    long endTime = 1111122222L; // input time in seconds
-    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
-
-    Assert.isTrue(1111122222000L == time);
-  }
-
-  /**
-   * Input time is in MILLIS and data creation time is in MILLIS.
-   */
-  @Test
-  public void test_convertToDataCreationTimeUnit_millis_millis() {
-    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
-        TimeUnit.MILLIS);
-    PowerMockito.verifyNoMoreInteractions();
-
-    long endTime = 111112222233L; // input time in millis
-    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
-
-    Assert.isTrue(111112222233L == time);
-  }
-
-  /**
-   * Input time is in MICROS and data creation time is in MILLIS.
-   */
-  @Test
-  public void test_convertToDataCreationTimeUnit_micros_millis() {
-    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
-        TimeUnit.MILLIS);
-    PowerMockito.verifyNoMoreInteractions();
-
-    long endTime = 111112222233344L; // input time in micros
-    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
-
-    Assert.isTrue(111112222233L == time);
-  }
-
-  /**
-   * Input time is in SECONDS and data creation time is in MICROS.
-   */
-  @Test
-  public void test_convertToDataCreationTimeUnit_seconds_micros() {
-    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
-        TimeUnit.MICROS);
-    PowerMockito.verifyNoMoreInteractions();
-
-    long endTime = 1111122222L; // input time in seconds
-    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
-
-    Assert.isTrue(1111122222000000L == time);
-  }
-
-  /**
-   * Input time is in SECONDS and data creation time is in MICROS.
-   */
-  @Test
-  public void test_convertToDataCreationTimeUnit_seconds_micros_random() {
-    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
-        TimeUnit.MICROS);
-    PowerMockito.verifyNoMoreInteractions();
-
-    long endTime = 13388; // input time in seconds
-    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
-
-    Assert.isTrue(13388000000L == time);
-  }
-
-  /**
-   * Input time is in MILLIS and data creation time is in MICROS.
-   */
-  @Test
-  public void test_convertToDataCreationTimeUnit_millis_micros() {
-    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
-        TimeUnit.MICROS);
-    PowerMockito.verifyNoMoreInteractions();
-
-    long endTime = 111112222233L; // input time in millis
-    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
-
-    Assert.isTrue(111112222233000L == time);
-  }
-
-  /**
-   * Input time is in MICROS and data creation time is in MICROS.
-   */
-  @Test
-  public void test_convertToDataCreationTimeUnit_micros_micros() {
-    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
-        TimeUnit.MICROS);
-    PowerMockito.verifyNoMoreInteractions();
-
-    long endTime = 1111122222334444L; // input time in micros
-    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
-
-    Assert.isTrue(1111122222334444L == time);
-  }
-
-  /**
-   * Input time is in SECONDS and data creation time is in MICROS.
-   */
-  @Test
-  public void test_convertToDataCreationTimeUnit_seconds_micros_0() {
-    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
-        TimeUnit.MICROS);
-    PowerMockito.verifyNoMoreInteractions();
-
-    long endTime = 0; // input time in micros
-    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
-
-    Assert.isTrue(0 == time);
-  }
-
-  /**
-   * Input time is in SECONDS and data creation time is in MICROS.
-   */
-  @Test
-  public void test_convertToDataCreationTimeUnit_seconds_micros_1() {
-    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
-        TimeUnit.MICROS);
-    PowerMockito.verifyNoMoreInteractions();
-
-    long endTime = 1; // input time in micros
-    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
-
-    Assert.isTrue(1000000L == time);
-  }
-
-  /**
-   * Input time is in SECONDS and data creation time is in MICROS.
-   */
-  @Test
-  public void test_convertToDataCreationTimeUnit_seconds_micros_decimal() {
-    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
-        TimeUnit.MICROS);
-    PowerMockito.verifyNoMoreInteractions();
-
-    long inputTime = 13; // input time in seconds (double to long type casting)
-    long time = PcapHelper.convertSecondsToDataCreationTimeUnit(inputTime);
-
-    Assert.isTrue(13000000L == time);
-  }
-
-  /**
-   * Input time is in SECONDS and data creation time is in MICROS.
-   */
-  @Test
-  public void test_convertToDataCreationTimeUnit_() {
-    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
-        TimeUnit.MICROS);
-    PowerMockito.verifyNoMoreInteractions();
-
-    long endTime = (long) 111.333; // input time in seconds (double to long type
-                                   // casting)
-    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
-
-    Assert.isTrue(111000000L == time);
-  }
-
-  /**
-   * Test_get data creation time unit.
-   */
-  @Test
-  public void test_getDataCreationTimeUnit() {
-    TimeUnit dataCreationTimeUnit = PcapHelper.getDataCreationTimeUnit();
-    Assert.isTrue(TimeUnit.MICROS == dataCreationTimeUnit);
-  }
-
-  /**
-   * Test_reverse key_valid.
-   */
-  @Test
-  public void test_reverseKey_valid() {
-    String key = "162.242.152.24-162.242.153.12-TCP-38190-9092";
-    String reversekey = PcapHelper.reverseKey(key);
-    Assert.isTrue("162.242.153.12-162.242.152.24-TCP-9092-38190"
-        .equals(reversekey));
-  }
-
-  /**
-   * Test_reverse key_valid_with fragment.
-   */
-  @Test
-  public void test_reverseKey_valid_withFragment() {
-    String key = "162.242.152.24-162.242.153.12-TCP-38190-9092-fragmentId";
-    String reversekey = PcapHelper.reverseKey(key);
-    Assert.isTrue("162.242.153.12-162.242.152.24-TCP-9092-38190"
-        .equals(reversekey));
-  }
-
-  /**
-   * Test_reverse key_in valid.
-   */
-  @Test
-  public void test_reverseKey_inValid() {
-    String key = "162.242.152.24-162.242.153.12-TCP-38190-9092-ipId-fragmentId-extra";
-    String reversekey = PcapHelper.reverseKey(key);
-    Assert.isTrue("".equals(reversekey));
-  }
-
-  /**
-   * Test_reverse key_as list.
-   */
-  @Test
-  public void test_reverseKey_asList() {
-    String[] keys = {
-        "162.242.152.24-162.242.153.12-TCP-38190-9092-fragmentId",
-        "162.242.152.24-162.242.153.12-UDP-38190-9092" };
-
-    List<String> reverseKeys = PcapHelper.reverseKey(Arrays.asList(keys));
-
-    Assert.isTrue("162.242.153.12-162.242.152.24-TCP-9092-38190"
-        .equals(reverseKeys.get(0)));
-    Assert.isTrue("162.242.153.12-162.242.152.24-UDP-9092-38190"
-        .equals(reverseKeys.get(1)));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/PcapReceiverImplTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/PcapReceiverImplTest.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/PcapReceiverImplTest.java
deleted file mode 100644
index f59bea6..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/PcapReceiverImplTest.java
+++ /dev/null
@@ -1,232 +0,0 @@
-/**
- * 
- */
-package com.cisco.opensoc.hbase.client;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.mockito.Mockito;
-import org.powermock.api.mockito.PowerMockito;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.modules.junit4.PowerMockRunner;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.ResponseEntity;
-import org.springframework.util.Assert;
-
-import com.cisco.opensoc.hbase.client.PcapGetterHBaseImpl;
-import com.cisco.opensoc.hbase.client.PcapReceiverImpl;
-import com.cisco.opensoc.hbase.client.PcapsResponse;
-
-// TODO: Auto-generated Javadoc
-/**
- * The Class PcapReceiverImplTest.
- * 
- * @author Sayi
- */
-@RunWith(PowerMockRunner.class)
-@PrepareForTest(PcapGetterHBaseImpl.class)
-public class PcapReceiverImplTest {
-
-  /** The pcap receiver. */
-  PcapReceiverImpl pcapReceiver = new PcapReceiverImpl();
-
-  /** The exception. */
-  @Rule
-  public ExpectedException exception = ExpectedException.none();
-
-  /**
-   * Sets the up.
-   * 
-   * @throws Exception
-   *           the exception
-   */
-  @Before
-  public void setUp() throws Exception {
-  }
-
-  /**
-   * Tear down.
-   * 
-   * @throws Exception
-   *           the exception
-   */
-  @After
-  public void tearDown() throws Exception {
-  }
-
-  /**
-   * Test parse keys_single.
-   */
-  @Test
-  public void testParseKeys_single() {
-    String[] keysArr = { "234234234,565675675" };
-    List<String> keysList = Arrays.asList(keysArr);
-    List<String> parsedKeys = pcapReceiver.parseKeys(keysList);
-    Assert.isTrue(parsedKeys.size() == 2);
-  }
-
-  /**
-   * Test parse keys_multiple.
-   */
-  @Test
-  public void testParseKeys_multiple() {
-    String[] keysArr = { "234234234,565675675", "675757,234242" };
-    List<String> keysList = Arrays.asList(keysArr);
-    List<String> parsedKeys = pcapReceiver.parseKeys(keysList);
-    Assert.isTrue(parsedKeys.size() == 4);
-  }
-
-  /**
-   * Test parse keys_empty.
-   */
-  @Test
-  public void testParseKeys_empty() {
-    exception.expect(IllegalArgumentException.class);
-    pcapReceiver.parseKeys(Collections.<String> emptyList());
-  }
-
-  /**
-   * Test parse keys_null.
-   */
-  @Test
-  public void testParseKeys_null() {
-    exception.expect(IllegalArgumentException.class);
-    pcapReceiver.parseKeys(null);
-  }
-
-  /**
-   * Test_get pcaps by keys_complete response.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_getPcapsByKeys_completeResponse() throws IOException {
-    // mocking
-    String[] keysArry = { "0a07002b-0a078039-06-1e8b-0087",
-        "0a070025-0a07807a-06-aab8-c360" };
-    List<String> keys = Arrays.asList(keysArry);
-    String lastRowKey = null;
-    long startTime = 1376782349234555L;
-    long endTime = 1396782349234555L;
-    PcapsResponse response = new PcapsResponse();
-    response.setStatus(PcapsResponse.Status.COMPLETE);
-    List<byte[]> pcaps = new ArrayList<byte[]>();
-    byte[] pcap = { 12, 123 };
-    pcaps.add(pcap);
-    response.setPcaps(pcaps);
-
-    PcapGetterHBaseImpl pcapGetter = Mockito.mock(PcapGetterHBaseImpl.class);
-
-    PowerMockito.mockStatic(PcapGetterHBaseImpl.class);
-    PowerMockito.when(PcapGetterHBaseImpl.getInstance()).thenReturn(pcapGetter);
-    PowerMockito.when(
-        pcapGetter.getPcaps(keys, lastRowKey, startTime, endTime,
-            false, false, 6291456)).thenReturn(response);
-
-    PcapReceiverImpl restImpl = new PcapReceiverImpl();
-
-    // actual call
-    ResponseEntity<byte[]> result = restImpl.getPcapsByKeys(keys,
-        lastRowKey, startTime, endTime, false, false, null);
-
-    // verify
-    Assert.notNull(result);
-    Assert.notNull(result.getBody());
-    Assert.isTrue(result.getStatusCode() == HttpStatus.OK);
-    Assert.isTrue(result.getHeaders().size() == 1); // 'Content-Disposition'
-  }
-
-  /**
-   * Test_get pcaps by keys_partial response.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_getPcapsByKeys_partialResponse() throws IOException {
-    // mocking
-    String[] keysArry = { "0a07002b-0a078039-06-1e8b-0087",
-        "0a070025-0a07807a-06-aab8-c360" };
-    List<String> keys = Arrays.asList(keysArry);
-    String lastRowKey = null;
-    long startTime = 1376782349234555L;
-    long endTime = 1396782349234555L;
-    PcapsResponse response = new PcapsResponse();
-    response.setStatus(PcapsResponse.Status.PARTIAL);
-    List<byte[]> pcaps = new ArrayList<byte[]>();
-    byte[] pcap = { 12, 123 };
-    pcaps.add(pcap);
-    response.setPcaps(pcaps);
-
-    PcapGetterHBaseImpl pcapGetter = Mockito.mock(PcapGetterHBaseImpl.class);
-
-    PowerMockito.mockStatic(PcapGetterHBaseImpl.class);
-    PowerMockito.when(PcapGetterHBaseImpl.getInstance()).thenReturn(pcapGetter);
-    PowerMockito.when(
-        pcapGetter.getPcaps(keys, lastRowKey, startTime, endTime,
-            false, false, 6291456)).thenReturn(response);
-
-    PcapReceiverImpl restImpl = new PcapReceiverImpl();
-
-    // actual call
-    ResponseEntity<byte[]> result = restImpl.getPcapsByKeys(keys,
-        lastRowKey, startTime, endTime, false, false, null);
-
-    // verify
-    Assert.notNull(result);
-    Assert.notNull(result.getBody());
-    Assert.isTrue(result.getStatusCode() == HttpStatus.PARTIAL_CONTENT);
-    Assert.isTrue(result.getHeaders().size() == 2); // 'lastRowKey',
-                                                    // 'Content-Disposition'
-  }
-
-  /**
-   * Test_get pcaps by keys_partial no content.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_getPcapsByKeys_partialNoContent() throws IOException {
-    // mocking
-    String[] keysArry = { "0a07002b-0a078039-06-1e8b-0087",
-        "0a070025-0a07807a-06-aab8-c360" };
-    List<String> keys = Arrays.asList(keysArry);
-    String lastRowKey = null;
-    long startTime = 1376782349234555L;
-    long endTime = 1396782349234555L;
-    PcapsResponse response = new PcapsResponse();
-
-    PcapGetterHBaseImpl pcapGetter = Mockito.mock(PcapGetterHBaseImpl.class);
-
-    PowerMockito.mockStatic(PcapGetterHBaseImpl.class);
-    PowerMockito.when(PcapGetterHBaseImpl.getInstance()).thenReturn(pcapGetter);
-    PowerMockito.when(
-        pcapGetter.getPcaps(keys, lastRowKey, startTime, endTime,
-            false, false, 6291456)).thenReturn(response);
-
-    PcapReceiverImpl restImpl = new PcapReceiverImpl();
-
-    // actual call
-    ResponseEntity<byte[]> result = restImpl.getPcapsByKeys(keys,
-        lastRowKey, startTime, endTime, false, false, null);
-
-    // verify
-    Assert.notNull(result);
-    Assert.isNull(result.getBody());
-    Assert.isTrue(result.getStatusCode() == HttpStatus.NO_CONTENT);
-    Assert.isTrue(result.getHeaders().isEmpty());
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/PcapScannerHBaseImplTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/PcapScannerHBaseImplTest.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/PcapScannerHBaseImplTest.java
deleted file mode 100644
index 46f365a..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/PcapScannerHBaseImplTest.java
+++ /dev/null
@@ -1,234 +0,0 @@
-package com.cisco.opensoc.hbase.client;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import junit.framework.Assert;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Scan;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.mockito.Mockito;
-
-import com.cisco.opensoc.hbase.client.PcapScannerHBaseImpl;
-
-// TODO: Auto-generated Javadoc
-/**
- * The Class PcapScannerHBaseImplTest.
- */
-public class PcapScannerHBaseImplTest {
-
-  /**
-   * Sets the up.
-   * 
-   * @throws Exception
-   *           the exception
-   */
-  @Before
-  public void setUp() throws Exception {
-  }
-
-  /**
-   * Tear down.
-   * 
-   * @throws Exception
-   *           the exception
-   */
-  @After
-  public void tearDown() throws Exception {
-  }
-
-  /**
-   * Test_create scan request.
-   * 
-   * @throws IOException
-   *           the IO exception
-   */
-  @Test
-  public void test_createScanRequest() throws IOException {
-    // mocking
-    PcapScannerHBaseImpl pcapScanner = (PcapScannerHBaseImpl) PcapScannerHBaseImpl
-        .getInstance();
-    byte[] cf = "cf".getBytes();
-    byte[] cq = "pcap".getBytes();
-    String startKey = "0a07002b-0a078039-06-1e8b-0087";
-    String endKey = "0a070025-0a07807a-06-aab8-c360";
-    long maxResultSize = 60;
-    long startTime = 1376782349234555L;
-    long endTime = 1396782349234555L;
-
-    // actual call
-    Scan scan = pcapScanner.createScanRequest(cf, cq, startKey, endKey,
-        maxResultSize, -1, -1);
-
-    // verify
-    Assert.assertTrue(scan.getTimeRange().getMin() == 0);
-    Assert.assertTrue(Arrays.equals(scan.getStartRow(), startKey.getBytes()));
-    Assert.assertTrue(Arrays.equals(scan.getStopRow(), endKey.getBytes()));
-  }
-
-  /**
-   * Test_create scan request_with timestamps.
-   * 
-   * @throws IOException
-   *           the IO exception
-   */
-  @Test
-  public void test_createScanRequest_withTimestamps() throws IOException {
-    // mocking
-    PcapScannerHBaseImpl pcapScanner = (PcapScannerHBaseImpl) PcapScannerHBaseImpl
-        .getInstance();
-    byte[] cf = "cf".getBytes();
-    byte[] cq = "pcap".getBytes();
-    String startKey = "0a07002b-0a078039-06-1e8b-0087";
-    String endKey = "0a070025-0a07807a-06-aab8-c360";
-    long maxResultSize = 60;
-    long startTime = 1376782349234555L;
-    long endTime = 1396782349234555L;
-
-    // actual call
-    Scan scan = pcapScanner.createScanRequest(cf, cq, startKey, endKey,
-        maxResultSize, startTime, endTime);
-
-    // verify
-    Assert.assertTrue(scan.getTimeRange().getMin() == 1376782349234555L);
-    Assert.assertTrue(Arrays.equals(scan.getStartRow(), startKey.getBytes()));
-    Assert.assertTrue(Arrays.equals(scan.getStopRow(), endKey.getBytes()));
-  }
-
-  /**
-   * Test_get pcaps_with all arguments.
-   * 
-   * @throws IOException
-   *           the IO exception
-   */
-  @SuppressWarnings({ "unchecked", "unused" })
-  @Test
-  public void test_getPcaps_withAllArguments() throws IOException {
-    // mocking
-    PcapScannerHBaseImpl pcapScanner = (PcapScannerHBaseImpl) PcapScannerHBaseImpl
-        .getInstance();
-    PcapScannerHBaseImpl spy = Mockito.spy(pcapScanner);
-    byte[] cf = "cf".getBytes();
-    byte[] cq = "pcap".getBytes();
-    String startKey = "0a07002b-0a078039-06-1e8b-0087";
-    String endKey = "0a070025-0a07807a-06-aab8-c360";
-    long maxResultSize = 60;
-    long startTime = 1376782349234555L;
-    long endTime = 1396782349234555L;
-
-    List<byte[]> mockPcaps = new ArrayList<byte[]>();
-    mockPcaps.add(getTestPcapBytes());
-
-    Mockito
-        .doReturn(mockPcaps)
-        .when(spy)
-        .scanPcaps(Mockito.any(ArrayList.class), Mockito.any(HTable.class),
-            Mockito.any(Scan.class), Mockito.any(byte[].class),
-            Mockito.any(byte[].class));
-
-    // actual call
-    byte[] response = spy.getPcaps(startKey, endKey, maxResultSize, startTime,
-        endTime);
-
-    // verify
-    Assert.assertTrue(response.length == mockPcaps.get(0).length);
-  }
-
-  /**
-   * Test_get pcaps_with minimal arguments.
-   * 
-   * @throws IOException
-   *           the IO exception
-   */
-  @SuppressWarnings({ "unchecked", "unused" })
-  @Test
-  public void test_getPcaps_withMinimalArguments() throws IOException {
-    // mocking
-    PcapScannerHBaseImpl pcapScanner = (PcapScannerHBaseImpl) PcapScannerHBaseImpl
-        .getInstance();
-    PcapScannerHBaseImpl spy = Mockito.spy(pcapScanner);
-    byte[] cf = "cf".getBytes();
-    byte[] cq = "pcap".getBytes();
-    String startKey = "0a07002b-0a078039-06-1e8b-0087";
-    String endKey = "0a070025-0a07807a-06-aab8-c360";
-    long maxResultSize = 60;
-    long startTime = 1376782349234555L;
-    long endTime = 1396782349234555L;
-
-    List<byte[]> mockPcaps = new ArrayList<byte[]>();
-    mockPcaps.add(getTestPcapBytes());
-
-    Mockito
-        .doReturn(mockPcaps)
-        .when(spy)
-        .scanPcaps(Mockito.any(ArrayList.class), Mockito.any(HTable.class),
-            Mockito.any(Scan.class), Mockito.any(byte[].class),
-            Mockito.any(byte[].class));
-
-    // actual call
-    byte[] response = spy.getPcaps(startKey, endKey);
-
-    // verify
-    Assert.assertTrue(response.length == mockPcaps.get(0).length);
-  }
-
-  /**
-   * Test_get pcaps_multiple pcaps.
-   * 
-   * @throws IOException
-   *           the IO exception
-   */
-  @SuppressWarnings({ "unchecked", "unused" })
-  @Test
-  public void test_getPcaps_multiplePcaps() throws IOException {
-    // mocking
-    PcapScannerHBaseImpl pcapScanner = (PcapScannerHBaseImpl) PcapScannerHBaseImpl
-        .getInstance();
-    PcapScannerHBaseImpl spy = Mockito.spy(pcapScanner);
-    byte[] cf = "cf".getBytes();
-    byte[] cq = "pcap".getBytes();
-    String startKey = "0a07002b-0a078039-06-1e8b-0087";
-    String endKey = "0a070025-0a07807a-06-aab8-c360";
-    long maxResultSize = 60;
-    long startTime = 1376782349234555L;
-    long endTime = 1396782349234555L;
-
-    List<byte[]> mockPcaps = new ArrayList<byte[]>();
-    mockPcaps.add(getTestPcapBytes());
-    mockPcaps.add(getTestPcapBytes());
-
-    Mockito
-        .doReturn(mockPcaps)
-        .when(spy)
-        .scanPcaps(Mockito.any(ArrayList.class), Mockito.any(HTable.class),
-            Mockito.any(Scan.class), Mockito.any(byte[].class),
-            Mockito.any(byte[].class));
-
-    // actual call
-    byte[] response = spy.getPcaps(startKey, endKey);
-
-    // verify
-    Assert.assertNotNull(response);
-    Assert.assertTrue(response.length > mockPcaps.get(0).length);
-  }
-
-  /**
-   * Gets the test pcap bytes.
-   * 
-   * @return the test pcap bytes
-   * @throws IOException
-   *           the IO exception
-   */
-  private byte[] getTestPcapBytes() throws IOException {
-    File fin = new File("src/test/resources/test-tcp-packet.pcap");
-    byte[] pcapBytes = FileUtils.readFileToByteArray(fin);
-    return pcapBytes;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/resources/hbase-config.properties
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/resources/hbase-config.properties b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/resources/hbase-config.properties
deleted file mode 100644
index 0efd799..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/resources/hbase-config.properties
+++ /dev/null
@@ -1,40 +0,0 @@
-#hbase zoo keeper configuration
-hbase.zookeeper.quorum=dn1.hw.com,dn2.hw.com,dn3.hw.com
-hbase.zookeeper.clientPort=2181
-hbase.client.retries.number=1
-zookeeper.session.timeout=60000
-zookeeper.recovery.retry=0
-
-#hbase table configuration
-hbase.table.name=pcap
-hbase.table.column.family=t
-hbase.table.column.qualifier=pcap
-hbase.table.column.maxVersions=5
-
-# scan size limit configuration in MB or KB; if the input is negative or greater than max value throw an error.
-hbase.scan.result.size.unit=MB
-hbase.scan.default.result.size=6
-hbase.scan.max.result.size=60
-
-# time stamp conversion configuration; possible values 'SECONDS'(seconds), 'MILLIS'(milli seconds), 'MICROS' (micro seconds)
-hbase.table.data.time.unit=MICROS
-
-#number of retries in case of ZooKeeper or HBase server down
-hbase.hconnection.retries.number=3
-
-#configuration for including pcaps in the reverse traffic
-pcaps.include.reverse.traffic = false
-
-#maximum table row size in KB or MB 
-hbase.table.row.size.unit = KB
-hbase.table.max.row.size = 0.07
-
-# tokens of row key configuration
-hbase.table.row.key.tokens=7
-rest.api.input.key.min.tokens=5
-
-# whether or not to include the last row from the previous request, applicable for only partial response scenario
-hbase.table.scan.include.duplicate.lastrow= true;
-
-#number of digits for appending tokens of the row key
-hbase.table.row.key.token.appending.digits=5

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/resources/test-tcp-packet.pcap
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/resources/test-tcp-packet.pcap b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/resources/test-tcp-packet.pcap
deleted file mode 100644
index 25d47da..0000000
Binary files a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/resources/test-tcp-packet.pcap and /dev/null differ

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/.pmd
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/.pmd b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/.pmd
deleted file mode 100644
index 8a17775..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/.pmd
+++ /dev/null
@@ -1,1190 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<pmd>
-    <useProjectRuleSet>false</useProjectRuleSet>
-    <ruleSetFile>.ruleset</ruleSetFile>
-    <rules>
-        <rule>
-            <name>IfStmtsMustUseBraces</name>
-            <ruleset>Braces</ruleset>
-        </rule>
-        <rule>
-            <name>IfElseStmtsMustUseBraces</name>
-            <ruleset>Braces</ruleset>
-        </rule>
-        <rule>
-            <name>WhileLoopsMustUseBraces</name>
-            <ruleset>Braces</ruleset>
-        </rule>
-        <rule>
-            <name>ForLoopsMustUseBraces</name>
-            <ruleset>Braces</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryConstructor</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>NullAssignment</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>OnlyOneReturn</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AssignmentInOperand</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AtLeastOneConstructor</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>DontImportSun</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>SuspiciousOctalEscape</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>CallSuperInConstructor</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryParentheses</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>DefaultPackage</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>BooleanInversion</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>DataflowAnomalyAnalysis</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidFinalLocalVariable</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidUsingShortType</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidUsingVolatile</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidUsingNativeCode</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidAccessibilityAlteration</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>DoNotCallGarbageCollectionExplicitly</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>OneDeclarationPerLine</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidPrefixingMethodParameters</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidLiteralsInIfCondition</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>UseObjectForClearerAPI</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>UseConcurrentHashMap</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>UnusedPrivateField</name>
-            <ruleset>Unused Code</ruleset>
-        </rule>
-        <rule>
-            <name>UnusedLocalVariable</name>
-            <ruleset>Unused Code</ruleset>
-        </rule>
-        <rule>
-            <name>UnusedPrivateMethod</name>
-            <ruleset>Unused Code</ruleset>
-        </rule>
-        <rule>
-            <name>UnusedFormalParameter</name>
-            <ruleset>Unused Code</ruleset>
-        </rule>
-        <rule>
-            <name>UnusedModifier</name>
-            <ruleset>Unused Code</ruleset>
-        </rule>
-        <rule>
-            <name>MethodReturnsInternalArray</name>
-            <ruleset>Security Code Guidelines</ruleset>
-        </rule>
-        <rule>
-            <name>ArrayIsStoredDirectly</name>
-            <ruleset>Security Code Guidelines</ruleset>
-        </rule>
-        <rule>
-            <name>ProperCloneImplementation</name>
-            <ruleset>Clone Implementation</ruleset>
-        </rule>
-        <rule>
-            <name>CloneThrowsCloneNotSupportedException</name>
-            <ruleset>Clone Implementation</ruleset>
-        </rule>
-        <rule>
-            <name>CloneMethodMustImplementCloneable</name>
-            <ruleset>Clone Implementation</ruleset>
-        </rule>
-        <rule>
-            <name>JUnitStaticSuite</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>JUnitSpelling</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>JUnitAssertionsShouldIncludeMessage</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>JUnitTestsShouldIncludeAssert</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>TestClassWithoutTestCases</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryBooleanAssertion</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>UseAssertEqualsInsteadOfAssertTrue</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>UseAssertSameInsteadOfAssertTrue</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>UseAssertNullInsteadOfAssertTrue</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>SimplifyBooleanAssertion</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>JUnitTestContainsTooManyAsserts</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>UseAssertTrueInsteadOfAssertEquals</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>CommentRequired</name>
-            <ruleset>Comments</ruleset>
-        </rule>
-        <rule>
-            <name>CommentSize</name>
-            <ruleset>Comments</ruleset>
-        </rule>
-        <rule>
-            <name>CommentContent</name>
-            <ruleset>Comments</ruleset>
-        </rule>
-        <rule>
-            <name>ShortVariable</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>LongVariable</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>ShortMethodName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>VariableNamingConventions</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>MethodNamingConventions</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>ClassNamingConventions</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>AbstractNaming</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidDollarSigns</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>MethodWithSameNameAsEnclosingClass</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>SuspiciousHashcodeMethodName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>SuspiciousConstantFieldName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>SuspiciousEqualsMethodName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidFieldNameMatchingTypeName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidFieldNameMatchingMethodName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>NoPackage</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>PackageCase</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>MisleadingVariableName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>BooleanGetMethodName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>ShortClassName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>GenericsNaming</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>DuplicateImports</name>
-            <ruleset>Import Statements</ruleset>
-        </rule>
-        <rule>
-            <name>DontImportJavaLang</name>
-            <ruleset>Import Statements</ruleset>
-        </rule>
-        <rule>
-            <name>UnusedImports</name>
-            <ruleset>Import Statements</ruleset>
-        </rule>
-        <rule>
-            <name>ImportFromSamePackage</name>
-            <ruleset>Import Statements</ruleset>
-        </rule>
-        <rule>
-            <name>TooManyStaticImports</name>
-            <ruleset>Import Statements</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryFullyQualifiedName</name>
-            <ruleset>Import Statements</ruleset>
-        </rule>
-        <rule>
-            <name>ReplaceVectorWithList</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>ReplaceHashtableWithMap</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>ReplaceEnumerationWithIterator</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidEnumAsIdentifier</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidAssertAsIdentifier</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>IntegerInstantiation</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>ByteInstantiation</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>ShortInstantiation</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>LongInstantiation</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>JUnit4TestShouldUseBeforeAnnotation</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>JUnit4TestShouldUseAfterAnnotation</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>JUnit4TestShouldUseTestAnnotation</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>JUnit4SuitesShouldUseSuiteAnnotation</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>JUnitUseExpected</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryParentheses</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryBlock</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>DontNestJsfInJstlIteration</name>
-            <ruleset>Basic JSF</ruleset>
-        </rule>
-        <rule>
-            <name>MistypedCDATASection</name>
-            <ruleset>Basic XML</ruleset>
-        </rule>
-        <rule>
-            <name>CallSuperFirst</name>
-            <ruleset>Android</ruleset>
-        </rule>
-        <rule>
-            <name>CallSuperLast</name>
-            <ruleset>Android</ruleset>
-        </rule>
-        <rule>
-            <name>DoNotHardCodeSDCard</name>
-            <ruleset>Android</ruleset>
-        </rule>
-        <rule>
-            <name>JumbledIncrementer</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>ForLoopShouldBeWhileLoop</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>OverrideBothEqualsAndHashcode</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>DoubleCheckedLocking</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>ReturnFromFinallyBlock</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>UnconditionalIfStatement</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>BooleanInstantiation</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>CollapsibleIfStatements</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>ClassCastExceptionWithToArray</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidDecimalLiteralsInBigDecimalConstructor</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>MisplacedNullCheck</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidThreadGroup</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>BrokenNullCheck</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>BigIntegerInstantiation</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidUsingOctalValues</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidUsingHardCodedIP</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>CheckResultSet</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidMultipleUnaryOperators</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>ExtendsObject</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>CheckSkipResult</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidBranchingStatementAsLastInLoop</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>DontCallThreadRun</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>DontUseFloatTypeForLoopIndices</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyCatchBlock</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyIfStmt</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyWhileStmt</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyTryBlock</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyFinallyBlock</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptySwitchStatements</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptySynchronizedBlock</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyStatementNotInLoop</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyInitializer</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyStatementBlock</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyStaticInitializer</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>CouplingBetweenObjects</name>
-            <ruleset>Coupling</ruleset>
-        </rule>
-        <rule>
-            <name>ExcessiveImports</name>
-            <ruleset>Coupling</ruleset>
-        </rule>
-        <rule>
-            <name>LooseCoupling</name>
-            <ruleset>Coupling</ruleset>
-        </rule>
-        <rule>
-            <name>LoosePackageCoupling</name>
-            <ruleset>Coupling</ruleset>
-        </rule>
-        <rule>
-            <name>LawOfDemeter</name>
-            <ruleset>Coupling</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryConversionTemporary</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryReturn</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryFinalModifier</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UselessOverridingMethod</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UselessOperationOnImmutable</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UnusedNullCheckInEquals</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UselessParentheses</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>NoLongScripts</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>NoScriptlets</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>NoInlineStyleInformation</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>NoClassAttribute</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>NoJspForward</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>IframeMissingSrcAttribute</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>NoHtmlComments</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>DuplicateJspImports</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>JspEncoding</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>NoInlineScript</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidCatchingThrowable</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>SignatureDeclareThrowsException</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>ExceptionAsFlowControl</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidCatchingNPE</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidThrowingRawExceptionTypes</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidThrowingNullPointerException</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidRethrowingException</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>DoNotExtendJavaLangError</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>DoNotThrowExceptionInFinally</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidThrowingNewInstanceOfSameException</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidCatchingGenericException</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidLosingExceptionInformation</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>UseCorrectExceptionLogging</name>
-            <ruleset>Jakarta Commons Logging</ruleset>
-        </rule>
-        <rule>
-            <name>ProperLogger</name>
-            <ruleset>Jakarta Commons Logging</ruleset>
-        </rule>
-        <rule>
-            <name>GuardDebugLogging</name>
-            <ruleset>Jakarta Commons Logging</ruleset>
-        </rule>
-        <rule>
-            <name>LocalVariableCouldBeFinal</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>MethodArgumentCouldBeFinal</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidInstantiatingObjectsInLoops</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>UseArrayListInsteadOfVector</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>SimplifyStartsWith</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>UseStringBufferForStringAppends</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>UseArraysAsList</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidArrayLoops</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryWrapperObjectCreation</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>AddEmptyString</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>RedundantFieldInitializer</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>PrematureDeclaration</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>UseProperClassLoader</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>MDBAndSessionBeanNamingConvention</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>RemoteSessionInterfaceNamingConvention</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>LocalInterfaceSessionNamingConvention</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>LocalHomeNamingConvention</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>RemoteInterfaceNamingConvention</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>DoNotCallSystemExit</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>StaticEJBFieldShouldBeFinal</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>DoNotUseThreads</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidDuplicateLiterals</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>StringInstantiation</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>StringToString</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>InefficientStringBuffering</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryCaseChange</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>UseStringBufferLength</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>AppendCharacterWithChar</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>ConsecutiveLiteralAppends</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>UseIndexOfChar</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>InefficientEmptyStringCheck</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>InsufficientStringBufferDeclaration</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>UselessStringValueOf</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>StringBufferInstantiationWithChar</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>UseEqualsToCompareStrings</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidStringBufferField</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>MoreThanOneLogger</name>
-            <ruleset>Java Logging</ruleset>
-        </rule>
-        <rule>
-            <name>LoggerIsNotStaticFinal</name>
-            <ruleset>Java Logging</ruleset>
-        </rule>
-        <rule>
-            <name>SystemPrintln</name>
-            <ruleset>Java Logging</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidPrintStackTrace</name>
-            <ruleset>Java Logging</ruleset>
-        </rule>
-        <rule>
-            <name>UseConcatOnce</name>
-            <ruleset>XPath in XSL</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidAxisNavigation</name>
-            <ruleset>XPath in XSL</ruleset>
-        </rule>
-        <rule>
-            <name>AssignmentInOperand</name>
-            <ruleset>Basic Ecmascript</ruleset>
-        </rule>
-        <rule>
-            <name>UnreachableCode</name>
-            <ruleset>Basic Ecmascript</ruleset>
-        </rule>
-        <rule>
-            <name>InnaccurateNumericLiteral</name>
-            <ruleset>Basic Ecmascript</ruleset>
-        </rule>
-        <rule>
-            <name>ConsistentReturn</name>
-            <ruleset>Basic Ecmascript</ruleset>
-        </rule>
-        <rule>
-            <name>ScopeForInVariable</name>
-            <ruleset>Basic Ecmascript</ruleset>
-        </rule>
-        <rule>
-            <name>EqualComparison</name>
-            <ruleset>Basic Ecmascript</ruleset>
-        </rule>
-        <rule>
-            <name>GlobalVariable</name>
-            <ruleset>Basic Ecmascript</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidTrailingComma</name>
-            <ruleset>Basic Ecmascript</ruleset>
-        </rule>
-        <rule>
-            <name>IfStmtsMustUseBraces</name>
-            <ruleset>Braces</ruleset>
-        </rule>
-        <rule>
-            <name>WhileLoopsMustUseBraces</name>
-            <ruleset>Braces</ruleset>
-        </rule>
-        <rule>
-            <name>IfElseStmtsMustUseBraces</name>
-            <ruleset>Braces</ruleset>
-        </rule>
-        <rule>
-            <name>ForLoopsMustUseBraces</name>
-            <ruleset>Braces</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyFinalizer</name>
-            <ruleset>Finalizer</ruleset>
-        </rule>
-        <rule>
-            <name>FinalizeOnlyCallsSuperFinalize</name>
-            <ruleset>Finalizer</ruleset>
-        </rule>
-        <rule>
-            <name>FinalizeOverloaded</name>
-            <ruleset>Finalizer</ruleset>
-        </rule>
-        <rule>
-            <name>FinalizeDoesNotCallSuperFinalize</name>
-            <ruleset>Finalizer</ruleset>
-        </rule>
-        <rule>
-            <name>FinalizeShouldBeProtected</name>
-            <ruleset>Finalizer</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidCallingFinalize</name>
-            <ruleset>Finalizer</ruleset>
-        </rule>
-        <rule>
-            <name>UseSingleton</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>SimplifyBooleanReturns</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>SimplifyBooleanExpressions</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>SwitchStmtsShouldHaveDefault</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidDeeplyNestedIfStmts</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidReassigningParameters</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>SwitchDensity</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>ConstructorCallsOverridableMethod</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AccessorClassGeneration</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>FinalFieldCouldBeStatic</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>CloseResource</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>NonStaticInitializer</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>DefaultLabelNotLastInSwitchStmt</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>NonCaseLabelInSwitchStatement</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>OptimizableToArrayCall</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>BadComparison</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>EqualsNull</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>ConfusingTernary</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>InstantiationToGetClass</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>IdempotentOperations</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>SimpleDateFormatNeedsLocale</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>ImmutableField</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>UseLocaleWithCaseConversions</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidProtectedFieldInFinalClass</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AssignmentToNonFinalStatic</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>MissingStaticMethodInNonInstantiatableClass</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidSynchronizedAtMethodLevel</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>MissingBreakInSwitch</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>UseNotifyAllInsteadOfNotify</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidInstanceofChecksInCatchClause</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AbstractClassWithoutAbstractMethod</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>SimplifyConditional</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>CompareObjectsWithEquals</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>PositionLiteralsFirstInComparisons</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryLocalBeforeReturn</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>NonThreadSafeSingleton</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>UncommentedEmptyMethod</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>UncommentedEmptyConstructor</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidConstantsInterface</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>UnsynchronizedStaticDateFormatter</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>PreserveStackTrace</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>UseCollectionIsEmpty</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>ClassWithOnlyPrivateConstructorsShouldBeFinal</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyMethodInAbstractClassShouldBeAbstract</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>SingularField</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>ReturnEmptyArrayRatherThanNull</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AbstractClassWithoutAnyMethod</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>TooFewBranchesForASwitchStatement</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>LogicInversion</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>UseVarargs</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>FieldDeclarationsShouldBeAtStartOfClass</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>GodClass</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>NPathComplexity</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>ExcessiveMethodLength</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>ExcessiveParameterList</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>ExcessiveClassLength</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>CyclomaticComplexity</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>ExcessivePublicCount</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>TooManyFields</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>NcssMethodCount</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>NcssTypeCount</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>NcssConstructorCount</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>TooManyMethods</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>BeanMembersShouldSerialize</name>
-            <ruleset>JavaBeans</ruleset>
-        </rule>
-        <rule>
-            <name>MissingSerialVersionUID</name>
-            <ruleset>JavaBeans</ruleset>
-        </rule>
-    </rules>
-    <includeDerivedFiles>false</includeDerivedFiles>
-    <violationsAsErrors>true</violationsAsErrors>
-    <fullBuildEnabled>true</fullBuildEnabled>
-</pmd>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/README.txt
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/README.txt b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/README.txt
deleted file mode 100644
index f4203f9..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/README.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-'rest' module contains the web layer configuration for REST API which communicates with HBase and fetches pcaps.
-Spring frameworks 'org.springframework.web.servlet.DispatcherServlet' is configured to intercept all requests (/*) and the 
-application context is built using the configuration file 'ipcap-config.xml'.
-
-REST APIs :
-1. http://{hostname:port}//cisco-rest/pcapGetter/getPcapsByKeys?<query params here. Refer documentation below>
-2. http://mon.hw.com:8090/cisco-rest-0.0.5-SNAPSHOT/pcapGetter/getPcapsByKeyRange?<query params here. Refer documentation below>
-3. http://mon.hw.com:8090/cisco-rest-0.0.5-SNAPSHOT/pcapGetter/getPcapsByIdentifiers?<query params here. Refer documentation below>
-
-Refer the wiki documentation for further details : https://hwcsco.atlassian.net/wiki/pages/viewpage.action?pageId=5242892	

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/pom.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/pom.xml b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/pom.xml
deleted file mode 100644
index bf5f5db..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/pom.xml
+++ /dev/null
@@ -1,52 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<parent>
-		<groupId>com.cisco.opensoc</groupId>
-		<artifactId>opensoc</artifactId>
-		<version>0.1.3-SNAPSHOT</version>
-		<relativePath>../../../pom.xml</relativePath>
-	</parent>
-	<artifactId>opensoc-rest-service</artifactId>
-	<packaging>war</packaging>
-
-
-	<dependencies>
-		<dependency>
-			<groupId>com.cisco.opensoc</groupId>
-			<artifactId>opensoc-hbase</artifactId>
-			<version>${project.parent.version}</version>
-		</dependency>
-	
-		<dependency>
-			<groupId>org.springframework.integration</groupId>
-			<artifactId>spring-integration-http</artifactId>
-			<version>${spring.integration.version}</version>
-		</dependency>
-		<dependency>
-			<groupId>org.springframework</groupId>
-			<artifactId>spring-webmvc</artifactId>
-			<version>${spring.version}</version>
-		</dependency>
-		
-		<dependency>
-			<groupId>log4j</groupId>
-			<artifactId>log4j</artifactId>
-			<version>${logger.version}</version>
-			<exclusions>
-				<exclusion>
-					<groupId>com.sun.jmx</groupId>
-					<artifactId>jmxri</artifactId>
-				</exclusion>
-				<exclusion>
-					<groupId>com.sun.jdmk</groupId>
-					<artifactId>jmxtools</artifactId>
-				</exclusion>
-				<exclusion>
-					<groupId>javax.jms</groupId>
-					<artifactId>jms</artifactId>
-				</exclusion>
-			</exclusions>
-		</dependency>
-	</dependencies>
-</project>
\ No newline at end of file



[11/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapGetterHBaseImpl.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapGetterHBaseImpl.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapGetterHBaseImpl.java
deleted file mode 100644
index 9d85639..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapGetterHBaseImpl.java
+++ /dev/null
@@ -1,790 +0,0 @@
-package com.cisco.opensoc.hbase.client;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.MasterNotRunningException;
-import org.apache.hadoop.hbase.ZooKeeperConnectionException;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.NoServerForRegionException;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.log4j.Logger;
-import org.springframework.util.Assert;
-import org.springframework.util.CollectionUtils;
-
-import com.google.common.annotations.VisibleForTesting;
-
-/**
- * Singleton class which integrates with HBase table and returns pcaps sorted by
- * timestamp(dsc) for the given list of keys. Creates HConnection if it is not
- * already created and the same connection instance is being used for all
- * requests
- * 
- * @author sheetal
- * @version $Revision: 1.0 $
- */
-public class PcapGetterHBaseImpl implements IPcapGetter {
-
-  /** The pcap getter h base. */
-  private static IPcapGetter pcapGetterHBase = null;
-
-  /** The Constant LOG. */
-  private static final Logger LOGGER = Logger
-      .getLogger(PcapGetterHBaseImpl.class);
-
-  /*
-   * (non-Javadoc)
-   * 
-   * @see com.cisco.opensoc.hbase.client.IPcapGetter#getPcaps(java.util.List,
-   * java.lang.String, long, long, boolean, boolean, long)
-   */
-  @Override
-  public PcapsResponse getPcaps(List<String> keys, String lastRowKey,
-      long startTime, long endTime, boolean includeReverseTraffic,
-      boolean includeDuplicateLastRow, long maxResultSize) throws IOException {
-    Assert
-        .isTrue(
-            checkIfValidInput(keys, lastRowKey),
-            "No valid input. One of the value must be present from {keys, lastRowKey}");
-    LOGGER.info(" keys=" + keys.toString() + ";  lastRowKey="
-        + lastRowKey);
-
-    PcapsResponse pcapsResponse = new PcapsResponse();
-    // 1. Process partial response key
-    if (StringUtils.isNotEmpty(lastRowKey)) {
-      pcapsResponse = processKey(pcapsResponse, lastRowKey, startTime,
-          endTime, true, includeDuplicateLastRow, maxResultSize);
-      // LOGGER.debug("after scanning lastRowKey=" +
-      // pcapsResponse.toString()+"*********************************************************************");
-      if (pcapsResponse.getStatus() == PcapsResponse.Status.PARTIAL) {
-        return pcapsResponse;
-      }
-    }
-    // 2. Process input keys
-    List<String> sortedKeys = sortKeysByAscOrder(keys, includeReverseTraffic);
-    List<String> unprocessedKeys = new ArrayList<String>();
-    unprocessedKeys.addAll(sortedKeys);
-    if (StringUtils.isNotEmpty(lastRowKey)) {
-      unprocessedKeys.clear();
-      unprocessedKeys = getUnprocessedSublistOfKeys(sortedKeys,
-          lastRowKey);
-    }
-    LOGGER.info("unprocessedKeys in getPcaps" + unprocessedKeys.toString());
-    if (!CollectionUtils.isEmpty(unprocessedKeys)) {
-      for (int i = 0; i < unprocessedKeys.size(); i++) {
-        pcapsResponse = processKey(pcapsResponse, unprocessedKeys.get(i),
-            startTime, endTime, false, includeDuplicateLastRow, maxResultSize);
-        // LOGGER.debug("after scanning input unprocessedKeys.get(" + i + ") ="
-        // +
-        // pcapsResponse.toString()+"*********************************************************************");
-        if (pcapsResponse.getStatus() == PcapsResponse.Status.PARTIAL) {
-          return pcapsResponse;
-        }
-      }
-    }
-    return pcapsResponse;
-  }
-
-  /*
-   * (non-Javadoc)
-   * 
-   * @see com.cisco.opensoc.hbase.client.IPcapGetter#getPcaps(java.lang.String, long,
-   * long, boolean)
-   */
-  @Override
-  public PcapsResponse getPcaps(String key, long startTime, long endTime,
-      boolean includeReverseTraffic) throws IOException {
-    Assert.hasText(key, "key must not be null or empty");
-    return getPcaps(Arrays.asList(key), null, startTime, endTime,
-        includeReverseTraffic, false, ConfigurationUtil.getDefaultResultSize());
-  }
-
-  /*
-   * (non-Javadoc)
-   * 
-   * @see com.cisco.opensoc.hbase.client.IPcapGetter#getPcaps(java.util.List)
-   */
-  @Override
-  public PcapsResponse getPcaps(List<String> keys) throws IOException {
-    Assert.notEmpty(keys, "'keys' must not be null or empty");
-    return getPcaps(keys, null, -1, -1,
-        ConfigurationUtil.isDefaultIncludeReverseTraffic(), false,
-        ConfigurationUtil.getDefaultResultSize());
-  }
-
-  /*
-   * (non-Javadoc)
-   * 
-   * @see com.cisco.opensoc.hbase.client.IPcapGetter#getPcaps(java.lang.String)
-   */
-  @Override
-  public PcapsResponse getPcaps(String key) throws IOException {
-    Assert.hasText(key, "key must not be null or empty");
-    return getPcaps(Arrays.asList(key), null, -1, -1,
-        ConfigurationUtil.isDefaultIncludeReverseTraffic(), false,
-        ConfigurationUtil.getDefaultResultSize());
-  }
-
-  /**
-   * Always returns the singleton instance.
-   * 
-   * @return IPcapGetter singleton instance
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  public static IPcapGetter getInstance() throws IOException {
-    if (pcapGetterHBase == null) {
-      synchronized (PcapGetterHBaseImpl.class) {
-        if (pcapGetterHBase == null) {
-          pcapGetterHBase = new PcapGetterHBaseImpl();
-        }
-      }
-    }
-    return pcapGetterHBase;
-  }
-
-  /**
-   * Instantiates a new pcap getter h base impl.
-   */
-  private PcapGetterHBaseImpl() {
-  }
-
-  /**
-   * Adds reverse keys to the list if the flag 'includeReverseTraffic' is set to
-   * true; removes duplicates and sorts the list by ascending order;.
-   * 
-   * @param keys
-   *          input keys
-   * @param includeReverseTraffic
-   *          flag whether or not to include reverse traffic
-   * @return List<String>
-   */
-  @VisibleForTesting
-  List<String> sortKeysByAscOrder(List<String> keys,
-      boolean includeReverseTraffic) {
-    Assert.notEmpty(keys, "'keys' must not be null");
-    if (includeReverseTraffic) {
-      keys.addAll(PcapHelper.reverseKey(keys));
-    }
-    List<String> deDupKeys = removeDuplicateKeys(keys);
-    Collections.sort(deDupKeys);
-    return deDupKeys;
-  }
-
-  /**
-   * Removes the duplicate keys.
-   * 
-   * @param keys
-   *          the keys
-   * @return the list
-   */
-  @VisibleForTesting
-  List<String> removeDuplicateKeys(List<String> keys) {
-    Set<String> set = new HashSet<String>(keys);
-    return new ArrayList<String>(set);
-  }
-
-  /**
-   * <p>
-   * Returns the sublist starting from the element after the lastRowKey
-   * to the last element in the list; if the 'lastRowKey' is not matched
-   * the complete list will be returned.
-   * </p>
-   * 
-   * <pre>
-   * Eg :
-   *  keys = [18800006-1800000b-06-0019-caac, 18800006-1800000b-06-0050-5af6, 18800006-1800000b-11-0035-3810]
-   *  lastRowKey = "18800006-1800000b-06-0019-caac-65140-40815"
-   *  and the response from this method [18800006-1800000b-06-0050-5af6, 18800006-1800000b-11-0035-3810]
-   * </pre>
-   * 
-   * @param keys
-   *          keys
-   * @param lastRowKey
-   *          last row key of the previous partial response
-   * @return List<String>
-   */
-  @VisibleForTesting
-  List<String> getUnprocessedSublistOfKeys(List<String> keys,
-      String lastRowKey) {
-    Assert.notEmpty(keys, "'keys' must not be null");
-    Assert.hasText(lastRowKey, "'lastRowKey' must not be null");
-    String partialKey = getTokens(lastRowKey, 5);
-    int startIndex = 0;
-    for (int i = 0; i < keys.size(); i++) {
-      if (partialKey.equals(keys.get(i))) {
-        startIndex = i + 1;
-        break;
-      }
-    }
-    List<String> unprocessedKeys = keys.subList(startIndex, keys.size());
-    return unprocessedKeys;
-  }
-
-  /**
-   * Returns the first 'noOfTokens' tokens from the given key; token delimiter
-   * "-";.
-   * 
-   * @param key
-   *          given key
-   * @param noOfTokens
-   *          number of tokens to retrieve
-   * @return the tokens
-   */
-  @VisibleForTesting
-  String getTokens(String key, int noOfTokens) {
-    String delimeter = HBaseConfigConstants.PCAP_KEY_DELIMETER;
-    String regex = "\\" + delimeter;
-    String[] keyTokens = key.split(regex);
-    Assert.isTrue(noOfTokens < keyTokens.length,
-        "Invalid value for 'noOfTokens'");
-    StringBuffer sbf = new StringBuffer();
-    for (int i = 0; i < noOfTokens; i++) {
-      sbf.append(keyTokens[i]);
-      if (i != (noOfTokens - 1)) {
-        sbf.append(HBaseConfigConstants.PCAP_KEY_DELIMETER);
-      }
-
-    }
-    return sbf.toString();
-  }
-
-  /**
-   * Process key.
-   * 
-   * @param pcapsResponse
-   *          the pcaps response
-   * @param key
-   *          the key
-   * @param startTime
-   *          the start time
-   * @param endTime
-   *          the end time
-   * @param isPartialResponse
-   *          the is partial response
-   * @param includeDuplicateLastRow
-   *          the include duplicate last row
-   * @param maxResultSize
-   *          the max result size
-   * @return the pcaps response
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @VisibleForTesting
-  PcapsResponse processKey(PcapsResponse pcapsResponse, String key,
-      long startTime, long endTime, boolean isPartialResponse,
-      boolean includeDuplicateLastRow, long maxResultSize) throws IOException {
-    HTable table = null;
-    Scan scan = null;
-    List<Cell> scannedCells = null;
-    try {
-      // 1. Create start and stop row for the key;
-      Map<String, String> keysMap = createStartAndStopRowKeys(key,
-          isPartialResponse, includeDuplicateLastRow);
-
-      // 2. if the input key contains all fragments (7) and it is not part
-      // of previous partial response (isPartialResponse),
-      // 'keysMap' will be null; do a Get; currently not doing any
-      // response size related checks for Get;
-      // by default all cells from a specific row are sorted by timestamp
-      if (keysMap == null) {
-        Get get = createGetRequest(key, startTime, endTime);
-        List<Cell> cells = executeGetRequest(table, get);
-        for (Cell cell : cells) {
-          pcapsResponse.addPcaps(CellUtil.cloneValue(cell));
-        }
-        return pcapsResponse;
-      }
-      // 3. Create and execute Scan request
-      scan = createScanRequest(pcapsResponse, keysMap, startTime, endTime,
-          maxResultSize);
-      scannedCells = executeScanRequest(table, scan);
-      LOGGER.info("scannedCells size :" + scannedCells.size());
-      addToResponse(pcapsResponse, scannedCells, maxResultSize);
-
-    } catch (IOException e) {
-      LOGGER.error("Exception occurred while fetching Pcaps for the keys :"
-          + key, e);
-      if (e instanceof ZooKeeperConnectionException
-          || e instanceof MasterNotRunningException
-          || e instanceof NoServerForRegionException) {
-        int maxRetryLimit = ConfigurationUtil.getConnectionRetryLimit();
-        System.out.println("maxRetryLimit =" + maxRetryLimit);
-        for (int attempt = 1; attempt <= maxRetryLimit; attempt++) {
-          System.out.println("attempting  =" + attempt);
-          try {
-            HBaseConfigurationUtil.closeConnection(); // closing the
-            // existing
-            // connection
-            // and retry,
-            // it will
-            // create a new
-            // HConnection
-            scannedCells = executeScanRequest(table, scan);
-            addToResponse(pcapsResponse, scannedCells, maxResultSize);
-            break;
-          } catch (IOException ie) {
-            if (attempt == maxRetryLimit) {
-              LOGGER.error("Throwing the exception after retrying "
-                  + maxRetryLimit + " times.");
-              throw e;
-            }
-          }
-        }
-      }
-
-    } finally {
-      if (table != null) {
-        table.close();
-      }
-    }
-    return pcapsResponse;
-  }
-
-  /**
-   * Adds the to response.
-   * 
-   * @param pcapsResponse
-   *          the pcaps response
-   * @param scannedCells
-   *          the scanned cells
-   * @param maxResultSize
-   *          the max result size
-   */
-  private void addToResponse(PcapsResponse pcapsResponse,
-      List<Cell> scannedCells, long maxResultSize) {
-    String lastKeyFromCurrentScan = null;
-    if (scannedCells != null && scannedCells.size() > 0) {
-      lastKeyFromCurrentScan = new String(CellUtil.cloneRow(scannedCells
-          .get(scannedCells.size() - 1)));
-    }
-    // 4. calculate the response size
-    Collections.sort(scannedCells, PcapHelper.getCellTimestampComparator());
-    for (Cell sortedCell : scannedCells) {
-      pcapsResponse.addPcaps(CellUtil.cloneValue(sortedCell));
-    }
-    if (!pcapsResponse.isResonseSizeWithinLimit(maxResultSize)) {
-      pcapsResponse.setStatus(PcapsResponse.Status.PARTIAL); // response size
-                                                             // reached
-      pcapsResponse.setLastRowKey(new String(lastKeyFromCurrentScan));
-    }
-  }
-
-  /**
-   * Builds start and stop row keys according to the following logic : 1.
-   * Creates tokens out of 'key' using pcap_id delimiter ('-') 2. if the input
-   * 'key' contains (assume : configuredTokensInRowKey=7 and
-   * minimumTokensIninputKey=5): a). 5 tokens
-   * ("srcIp-dstIp-protocol-srcPort-dstPort") startKey =
-   * "srcIp-dstIp-protocol-srcPort-dstPort-00000-00000" stopKey =
-   * "srcIp-dstIp-protocol-srcPort-dstPort-99999-99999" b). 6 tokens
-   * ("srcIp-dstIp-protocol-srcPort-dstPort-id1") startKey =
-   * "srcIp-dstIp-protocol-srcPort-dstPort-id1-00000" stopKey =
-   * "srcIp-dstIp-protocol-srcPort-dstPort-id1-99999"
-   * 
-   * c). 7 tokens ("srcIp-dstIp-protocol-srcPort-dstPort-id1-id2") 1>. if the
-   * key is NOT part of the partial response from previous request, return
-   * 'null' 2>. if the key is part of partial response from previous request
-   * startKey = "srcIp-dstIp-protocol-srcPort-dstPort-id1-(id2+1)"; 1 is added
-   * to exclude this key as it was included in the previous request stopKey =
-   * "srcIp-dstIp-protocol-srcPort-dstPort-99999-99999"
-   * 
-   * @param key
-   *          the key
-   * @param isLastRowKey
-   *          if the key is part of partial response
-   * @param includeDuplicateLastRow
-   *          the include duplicate last row
-   * @return Map<String, String>
-   */
-  @VisibleForTesting
-  Map<String, String> createStartAndStopRowKeys(String key,
-      boolean isLastRowKey, boolean includeDuplicateLastRow) {
-    String delimeter = HBaseConfigConstants.PCAP_KEY_DELIMETER;
-    String regex = "\\" + delimeter;
-    String[] keyTokens = key.split(regex);
-
-    String startKey = null;
-    String endKey = null;
-    Map<String, String> map = new HashMap<String, String>();
-
-    int configuredTokensInRowKey = ConfigurationUtil
-        .getConfiguredTokensInRowkey();
-    int minimumTokensIninputKey = ConfigurationUtil
-        .getMinimumTokensInInputkey();
-    Assert
-        .isTrue(
-            minimumTokensIninputKey <= configuredTokensInRowKey,
-            "tokens in the input key (separated by '-'), must be less than or equal to the tokens used in hbase table row key ");
-    // in case if the input key contains 'configuredTokensInRowKey' tokens and
-    // it is NOT a
-    // partial response key, do a Get instead of Scan
-    if (keyTokens.length == configuredTokensInRowKey) {
-      if (!isLastRowKey) {
-        return null;
-      }
-      // it is a partial response key; 'startKey' is same as input partial
-      // response key; 'endKey' can be built by replacing
-      // (configuredTokensInRowKey - minimumTokensIninputKey) tokens
-      // of input partial response key with '99999'
-      if (keyTokens.length == minimumTokensIninputKey) {
-        return null;
-      }
-      int appendingTokenSlots = configuredTokensInRowKey
-          - minimumTokensIninputKey;
-      if (appendingTokenSlots > 0) {
-        String partialKey = getTokens(key, minimumTokensIninputKey);
-        StringBuffer sbfStartNew = new StringBuffer(partialKey);
-        StringBuffer sbfEndNew = new StringBuffer(partialKey);
-        for (int i = 0; i < appendingTokenSlots; i++) {
-          if (i == (appendingTokenSlots - 1)) {
-            if (!includeDuplicateLastRow) {
-              sbfStartNew
-                  .append(HBaseConfigConstants.PCAP_KEY_DELIMETER)
-                  .append(
-                      Integer.valueOf(keyTokens[minimumTokensIninputKey + i]) + 1);
-            } else {
-              sbfStartNew.append(HBaseConfigConstants.PCAP_KEY_DELIMETER)
-                  .append(keyTokens[minimumTokensIninputKey + i]);
-            }
-          } else {
-            sbfStartNew.append(HBaseConfigConstants.PCAP_KEY_DELIMETER).append(
-                keyTokens[minimumTokensIninputKey + i]);
-          }
-          sbfEndNew.append(HBaseConfigConstants.PCAP_KEY_DELIMETER).append(
-              getMaxLimitForAppendingTokens());
-        }
-        startKey = sbfStartNew.toString();
-        endKey = sbfEndNew.toString();
-      }
-    } else {
-      StringBuffer sbfStart = new StringBuffer(key);
-      StringBuffer sbfEnd = new StringBuffer(key);
-      for (int i = keyTokens.length; i < configuredTokensInRowKey; i++) {
-        sbfStart.append(HBaseConfigConstants.PCAP_KEY_DELIMETER).append(
-            getMinLimitForAppendingTokens());
-        sbfEnd.append(HBaseConfigConstants.PCAP_KEY_DELIMETER).append(
-            getMaxLimitForAppendingTokens());
-      }
-      startKey = sbfStart.toString();
-      endKey = sbfEnd.toString();
-    }
-    map.put(HBaseConfigConstants.START_KEY, startKey);
-    map.put(HBaseConfigConstants.END_KEY, endKey);
-
-    return map;
-  }
-
-  /**
-   * Returns false if keys is empty or null AND lastRowKey is null or
-   * empty; otherwise returns true;.
-   * 
-   * @param keys
-   *          input row keys
-   * @param lastRowKey
-   *          partial response key
-   * @return boolean
-   */
-  @VisibleForTesting
-  boolean checkIfValidInput(List<String> keys, String lastRowKey) {
-    if (CollectionUtils.isEmpty(keys)
-        && StringUtils.isEmpty(lastRowKey)) {
-      return false;
-    }
-    return true;
-  }
-
-  /**
-   * Executes the given Get request.
-   * 
-   * @param table
-   *          hbase table
-   * @param get
-   *          Get
-   * @return List<Cell>
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  private List<Cell> executeGetRequest(HTable table, Get get)
-      throws IOException {
-    LOGGER.info("Get :" + get.toString());
-    table = (HTable) HBaseConfigurationUtil.getConnection().getTable(
-        ConfigurationUtil.getTableName());
-    Result result = table.get(get);
-    List<Cell> cells = result.getColumnCells(
-        ConfigurationUtil.getColumnFamily(),
-        ConfigurationUtil.getColumnQualifier());
-    return cells;
-  }
-
-  /**
-   * Execute scan request.
-   * 
-   * @param table
-   *          hbase table
-   * @param scan
-   *          the scan
-   * @return the list
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  private List<Cell> executeScanRequest(HTable table, Scan scan)
-      throws IOException {
-    LOGGER.info("Scan :" + scan.toString());
-    table = (HTable) HBaseConfigurationUtil.getConnection().getTable(
-    		ConfigurationUtil.getConfiguration().getString("hbase.table.name"));
-    ResultScanner resultScanner = table.getScanner(scan);
-    List<Cell> scannedCells = new ArrayList<Cell>();
-    for (Result result = resultScanner.next(); result != null; result = resultScanner
-        .next()) {
-      List<Cell> cells = result.getColumnCells(
-          ConfigurationUtil.getColumnFamily(),
-          ConfigurationUtil.getColumnQualifier());
-      if (cells != null) {
-        for (Cell cell : cells) {
-          scannedCells.add(cell);
-        }
-      }
-    }
-    return scannedCells;
-  }
-
-  /**
-   * Creates the get request.
-   * 
-   * @param key
-   *          the key
-   * @param startTime
-   *          the start time
-   * @param endTime
-   *          the end time
-   * @return the gets the
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @VisibleForTesting
-  Get createGetRequest(String key, long startTime, long endTime)
-      throws IOException {
-    Get get = new Get(Bytes.toBytes(key));
-    // set family name
-    get.addFamily(ConfigurationUtil.getColumnFamily());
-
-    // set column family, qualifier
-    get.addColumn(ConfigurationUtil.getColumnFamily(),
-        ConfigurationUtil.getColumnQualifier());
-
-    // set max versions
-    get.setMaxVersions(ConfigurationUtil.getMaxVersions());
-
-    // set time range
-    setTimeRangeOnGet(get, startTime, endTime);
-    return get;
-  }
-
-  /**
-   * Creates the scan request.
-   * 
-   * @param pcapsResponse
-   *          the pcaps response
-   * @param keysMap
-   *          the keys map
-   * @param startTime
-   *          the start time
-   * @param endTime
-   *          the end time
-   * @param maxResultSize
-   *          the max result size
-   * @return the scan
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @VisibleForTesting
-  Scan createScanRequest(PcapsResponse pcapsResponse,
-      Map<String, String> keysMap, long startTime, long endTime,
-      long maxResultSize) throws IOException {
-    Scan scan = new Scan();
-    // set column family, qualifier
-    scan.addColumn(ConfigurationUtil.getColumnFamily(),
-        ConfigurationUtil.getColumnQualifier());
-
-    // set start and stop keys
-    scan.setStartRow(keysMap.get(HBaseConfigConstants.START_KEY).getBytes());
-    scan.setStopRow(keysMap.get(HBaseConfigConstants.END_KEY).getBytes());
-
-    // set max results size : remaining size = max results size - ( current
-    // pcaps response size + possible maximum row size)
-    long remainingSize = maxResultSize
-        - (pcapsResponse.getResponseSize() + ConfigurationUtil.getMaxRowSize());
-
-    if (remainingSize > 0) {
-      scan.setMaxResultSize(remainingSize);
-    }
-    // set max versions
-    scan.setMaxVersions(ConfigurationUtil.getConfiguration().getInt(
-        "hbase.table.column.maxVersions"));
-
-    // set time range
-    setTimeRangeOnScan(scan, startTime, endTime);
-    return scan;
-  }
-
-  /**
-   * Sets the time range on scan.
-   * 
-   * @param scan
-   *          the scan
-   * @param startTime
-   *          the start time
-   * @param endTime
-   *          the end time
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  private void setTimeRangeOnScan(Scan scan, long startTime, long endTime)
-      throws IOException {
-    boolean setTimeRange = true;
-    if (startTime < 0 && endTime < 0) {
-      setTimeRange = false;
-    }
-    if (setTimeRange) {
-      if (startTime < 0) {
-        startTime = 0;
-      } else {
-        startTime = PcapHelper.convertToDataCreationTimeUnit(startTime);
-      }
-      if (endTime < 0) {
-        endTime = Long.MAX_VALUE;
-      } else {
-        endTime = PcapHelper.convertToDataCreationTimeUnit(endTime);
-      }
-      Assert.isTrue(startTime < endTime,
-          "startTime value must be less than endTime value");
-      scan.setTimeRange(startTime, endTime);
-    }
-  }
-
-  /**
-   * Sets the time range on get.
-   * 
-   * @param get
-   *          the get
-   * @param startTime
-   *          the start time
-   * @param endTime
-   *          the end time
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  private void setTimeRangeOnGet(Get get, long startTime, long endTime)
-      throws IOException {
-    boolean setTimeRange = true;
-    if (startTime < 0 && endTime < 0) {
-      setTimeRange = false;
-    }
-    if (setTimeRange) {
-      if (startTime < 0) {
-        startTime = 0;
-      } else {
-        startTime = PcapHelper.convertToDataCreationTimeUnit(startTime);
-      }
-      if (endTime < 0) {
-        endTime = Long.MAX_VALUE;
-      } else {
-        endTime = PcapHelper.convertToDataCreationTimeUnit(endTime);
-      }
-      Assert.isTrue(startTime < endTime,
-          "startTime value must be less than endTime value");
-      get.setTimeRange(startTime, endTime);
-    }
-  }
-
-  /**
-   * Gets the min limit for appending tokens.
-   * 
-   * @return the min limit for appending tokens
-   */
-  private String getMinLimitForAppendingTokens() {
-    int digits = ConfigurationUtil.getAppendingTokenDigits();
-    StringBuffer sbf = new StringBuffer();
-    for (int i = 0; i < digits; i++) {
-      sbf.append("0");
-    }
-    return sbf.toString();
-  }
-
-  /**
-   * Gets the max limit for appending tokens.
-   * 
-   * @return the max limit for appending tokens
-   */
-  private String getMaxLimitForAppendingTokens() {
-    int digits = ConfigurationUtil.getAppendingTokenDigits();
-    StringBuffer sbf = new StringBuffer();
-    for (int i = 0; i < digits; i++) {
-      sbf.append("9");
-    }
-    return sbf.toString();
-  }
-
-  /**
-   * The main method.
-   * 
-   * @param args
-   *          the arguments
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  public static void main(String[] args) throws IOException {
-    if (args == null || args.length < 2) {
-      usage();
-      return;
-    }
-    String outputFileName = null;
-    outputFileName = args[1];
-    List<String> keys = Arrays.asList(StringUtils.split(args[2], ","));
-    System.out.println("Geting keys " + keys);
-    long startTime = 0;
-    long endTime = Long.MAX_VALUE;
-    if (args.length > 3) {
-      startTime = Long.valueOf(args[3]);
-    }
-    if (args.length > 4) {
-      endTime = Long.valueOf(args[4]);
-    }
-    System.out.println("With start time " + startTime + " and end time "
-        + endTime);
-    PcapGetterHBaseImpl downloader = new PcapGetterHBaseImpl();
-    PcapsResponse pcaps = downloader.getPcaps(keys, null, startTime, endTime,
-        false, false, 6);
-    File file = new File(outputFileName);
-    FileUtils.write(file, "", false);
-    FileUtils.writeByteArrayToFile(file, pcaps.getPcaps(), true);
-  }
-
-  /**
-   * Usage.
-   */
-  private static void usage() {
-    System.out.println("java " + PcapGetterHBaseImpl.class.getName() // $codepro.audit.disable
-        // debuggingCode
-        + " <zk quorum> <output file> <start key> [stop key]");
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapHelper.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapHelper.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapHelper.java
deleted file mode 100644
index 469974f..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapHelper.java
+++ /dev/null
@@ -1,205 +0,0 @@
-package com.cisco.opensoc.hbase.client;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.log4j.Logger;
-import org.mortbay.log.Log;
-import org.springframework.util.Assert;
-
-import com.google.common.annotations.VisibleForTesting;
-
-/**
- * utility class which holds methods related to time conversions, building
- * reverse keys.
- */
-public class PcapHelper {
-
-  /** The Constant LOGGER. */
-  private static final Logger LOGGER = Logger.getLogger(PcapHelper.class);
-
-  /** The cell timestamp comparator. */
-  private static CellTimestampComparator CELL_TIMESTAMP_COMPARATOR = new CellTimestampComparator();
-
-  /**
-   * The Enum TimeUnit.
-   */
-  public enum TimeUnit {
-
-    /** The seconds. */
-    SECONDS,
-    /** The millis. */
-    MILLIS,
-    /** The micros. */
-    MICROS,
-    /** The unknown. */
-    UNKNOWN
-  };
-
-  /**
-   * Converts the given time to the 'hbase' data creation time unit.
-   * 
-   * @param inputTime
-   *          the input time
-   * @return the long
-   */
-  public static long convertToDataCreationTimeUnit(long inputTime) {
-    if (inputTime <= 9999999999L) {
-      return convertSecondsToDataCreationTimeUnit(inputTime); // input time unit
-                                                              // is in seconds
-    } else if (inputTime <= 9999999999999L) {
-      return convertMillisToDataCreationTimeUnit(inputTime); // input time unit
-                                                             // is in millis
-    } else if (inputTime <= 9999999999999999L) {
-      return convertMicrosToDataCreationTimeUnit(inputTime); // input time unit
-                                                             // it in micros
-    }
-    return inputTime; // input time unit is unknown
-  }
-
-  /**
-   * Returns the 'hbase' data creation time unit by reading
-   * 'hbase.table.data.time.unit' property in 'hbase-config' properties file; If
-   * none is mentioned in properties file, returns <code>TimeUnit.UNKNOWN</code>
-   * 
-   * @return TimeUnit
-   */
-  @VisibleForTesting
-  public static TimeUnit getDataCreationTimeUnit() {
-    String timeUnit = ConfigurationUtil.getConfiguration().getString(
-        "hbase.table.data.time.unit");
-    LOGGER.debug("hbase.table.data.time.unit=" + timeUnit.toString());
-    if (StringUtils.isNotEmpty(timeUnit)) {
-      return TimeUnit.valueOf(timeUnit);
-    }
-    return TimeUnit.UNKNOWN;
-  }
-
-  /**
-   * Convert seconds to data creation time unit.
-   * 
-   * @param inputTime
-   *          the input time
-   * @return the long
-   */
-  @VisibleForTesting
-  public static long convertSecondsToDataCreationTimeUnit(long inputTime) {
-    System.out.println("convert Seconds To DataCreation TimeUnit");
-    TimeUnit dataCreationTimeUnit = getDataCreationTimeUnit();
-    if (TimeUnit.SECONDS == dataCreationTimeUnit) {
-      return inputTime;
-    } else if (TimeUnit.MILLIS == dataCreationTimeUnit) {
-      return inputTime * 1000;
-    } else if (TimeUnit.MICROS == dataCreationTimeUnit) {
-      return inputTime * 1000 * 1000;
-    }
-    return inputTime;
-  }
-
-  /**
-   * Builds the reverseKey to fetch the pcaps in the reverse traffic
-   * (destination to source).
-   * 
-   * @param key
-   *          indicates hbase rowKey (partial or full) in the format
-   *          "srcAddr-dstAddr-protocol-srcPort-dstPort-fragment"
-   * @return String indicates the key in the format
-   *         "dstAddr-srcAddr-protocol-dstPort-srcPort"
-   */
-  public static String reverseKey(String key) {
-    Assert.hasText(key, "key must not be null or empty");
-    String delimeter = HBaseConfigConstants.PCAP_KEY_DELIMETER;
-    String regex = "\\" + delimeter;
-    StringBuffer sb = new StringBuffer();
-    try {
-      String[] tokens = key.split(regex);
-      Assert
-          .isTrue(
-              (tokens.length == 5 || tokens.length == 6 || tokens.length == 7),
-              "key is not in the format : 'srcAddr-dstAddr-protocol-srcPort-dstPort-{ipId-fragment identifier}'");
-      sb.append(tokens[1]).append(delimeter).append(tokens[0])
-          .append(delimeter).append(tokens[2]).append(delimeter)
-          .append(tokens[4]).append(delimeter).append(tokens[3]);
-    } catch (Exception e) {
-      Log.warn("Failed to reverse the key. Reverse scan won't be performed.", e);
-    }
-    return sb.toString();
-  }
-
-  /**
-   * Builds the reverseKeys to fetch the pcaps in the reverse traffic
-   * (destination to source). If all keys in the input are not in the expected
-   * format, it returns an empty list;
-   * 
-   * @param keys
-   *          indicates list of hbase rowKeys (partial or full) in the format
-   *          "srcAddr-dstAddr-protocol-srcPort-dstPort-fragment"
-   * @return List<String> indicates the list of keys in the format
-   *         "dstAddr-srcAddr-protocol-dstPort-srcPort"
-   */
-  public static List<String> reverseKey(List<String> keys) {
-    Assert.notEmpty(keys, "'keys' must not be null or empty");
-    List<String> reverseKeys = new ArrayList<String>();
-    for (String key : keys) {
-      if (key != null) {
-        String reverseKey = reverseKey(key);
-        if (StringUtils.isNotEmpty(reverseKey)) {
-          reverseKeys.add(reverseKey);
-        }
-      }
-    }
-    return reverseKeys;
-  }
-
-  /**
-   * Returns Comparator for sorting pcaps cells based on the timestamp (dsc).
-   * 
-   * @return CellTimestampComparator
-   */
-  public static CellTimestampComparator getCellTimestampComparator() {
-    return CELL_TIMESTAMP_COMPARATOR;
-  }
-
-  /**
-   * Convert millis to data creation time unit.
-   * 
-   * @param inputTime
-   *          the input time
-   * @return the long
-   */
-  @VisibleForTesting
-  private static long convertMillisToDataCreationTimeUnit(long inputTime) {
-    System.out.println("convert Millis To DataCreation TimeUnit");
-    TimeUnit dataCreationTimeUnit = getDataCreationTimeUnit();
-    if (TimeUnit.SECONDS == dataCreationTimeUnit) {
-      return (inputTime / 1000);
-    } else if (TimeUnit.MILLIS == dataCreationTimeUnit) {
-      return inputTime;
-    } else if (TimeUnit.MICROS == dataCreationTimeUnit) {
-      return inputTime * 1000;
-    }
-    return inputTime;
-  }
-
-  /**
-   * Convert micros to data creation time unit.
-   * 
-   * @param inputTime
-   *          the input time
-   * @return the long
-   */
-  @VisibleForTesting
-  private static long convertMicrosToDataCreationTimeUnit(long inputTime) {
-    System.out.println("convert Micros To DataCreation TimeUnit");
-    TimeUnit dataCreationTimeUnit = getDataCreationTimeUnit();
-    if (TimeUnit.SECONDS == dataCreationTimeUnit) {
-      return inputTime / (1000 * 1000);
-    } else if (TimeUnit.MILLIS == dataCreationTimeUnit) {
-      return inputTime / 1000;
-    } else if (TimeUnit.MICROS == dataCreationTimeUnit) {
-      return inputTime;
-    }
-    return inputTime;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapReceiverImpl.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapReceiverImpl.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapReceiverImpl.java
deleted file mode 100644
index f6eeab2..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapReceiverImpl.java
+++ /dev/null
@@ -1,212 +0,0 @@
-package com.cisco.opensoc.hbase.client;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.log4j.Logger;
-import org.springframework.http.HttpStatus;
-import org.springframework.http.ResponseEntity;
-import org.springframework.stereotype.Controller;
-import org.springframework.util.Assert;
-import org.springframework.util.LinkedMultiValueMap;
-import org.springframework.util.MultiValueMap;
-import org.springframework.web.bind.annotation.RequestMapping;
-import org.springframework.web.bind.annotation.RequestParam;
-
-import com.cisco.opensoc.pcap.parsing.PcapUtils;
-import com.google.common.annotations.VisibleForTesting;
-
-/**
- * Single point of entry for all REST calls. Exposes methods to fetch pcaps for
- * the given list of keys or range of keys and optional start time and end time.
- * If the caller doesn't provide start time and end time, all pcaps from
- * beginning of the time to until now are returned.
- * 
- * @author Sayi
- * 
- */
-@Controller
-public class PcapReceiverImpl implements IPcapReceiver {
-
-  /** The Constant LOGGER. */
-  private static final Logger LOGGER = Logger.getLogger(PcapReceiverImpl.class);
-
-  /** The Constant HEADER_CONTENT_DISPOSITION_NAME. */
-  private static final String HEADER_CONTENT_DISPOSITION_NAME = "Content-Disposition";
-
-  /** The Constant HEADER_CONTENT_DISPOSITION_VALUE. */
-  private static final String HEADER_CONTENT_DISPOSITION_VALUE = "attachment; filename=\"managed-threat.pcap\"";
-
-  /** partial response key header name. */
-  private static final String HEADER_PARTIAL_RESPONE_KEY = "lastRowKey";
-
-  /*
-   * (non-Javadoc)
-   * 
-   * @see com.cisco.opensoc.hbase.client.IPcapReceiver#getPcapsByKeys(java.util.List,
-   * java.lang.String, long, long, boolean, boolean,
-   * javax.servlet.http.HttpServletResponse)
-   */
-  @Override
-  @RequestMapping(value = "/pcapGetter/getPcapsByKeys", produces = "application/octet-stream")
-  public ResponseEntity<byte[]> getPcapsByKeys(
-      @RequestParam(required = false) List<String> keys,
-      @RequestParam(required = false) String lastRowKey,
-      @RequestParam(defaultValue = "-1") long startTime,
-      @RequestParam(defaultValue = "-1") long endTime,
-      @RequestParam(required = false) boolean includeDuplicateLastRow,
-      @RequestParam(defaultValue = "false") boolean includeReverseTraffic,
-      @RequestParam(required = false) String maxResponseSize)
-      throws IOException {
-    Assert.notEmpty(keys, "'keys' must not be null or empty");
-    PcapsResponse pcapResponse = null;
-    MultiValueMap<String, String> headers = new LinkedMultiValueMap<String, String>();
-    try {
-      IPcapGetter pcapGetter = PcapGetterHBaseImpl.getInstance();
-      pcapResponse = pcapGetter.getPcaps(parseKeys(keys), lastRowKey,
-          startTime, endTime, includeReverseTraffic, includeDuplicateLastRow,
-          ConfigurationUtil.validateMaxResultSize(maxResponseSize));
-      LOGGER.info("pcaps response in REST layer =" + pcapResponse.toString());
-
-      // return http status '204 No Content' if the pcaps response size is 0
-      if (pcapResponse == null || pcapResponse.getResponseSize() == 0) {
-        return new ResponseEntity<byte[]>(HttpStatus.NO_CONTENT);
-      }
-
-      // return http status '206 Partial Content', the partial response file and
-      // 'lastRowKey' header , if the pcaps response status is 'PARTIAL'
-      headers.add(HEADER_CONTENT_DISPOSITION_NAME,
-          HEADER_CONTENT_DISPOSITION_VALUE);
-      if (pcapResponse.getStatus() == PcapsResponse.Status.PARTIAL) {
-        headers.add(HEADER_PARTIAL_RESPONE_KEY,
-            pcapResponse.getLastRowKey());
-        return new ResponseEntity<byte[]>(pcapResponse.getPcaps(), headers,
-            HttpStatus.PARTIAL_CONTENT);
-      }
-
-    } catch (IOException e) {
-      LOGGER.error("Exception occurred while fetching Pcaps for the keys :"
-          + keys.toString(), e);
-      throw e;
-    }
-
-    // return http status '200 OK' along with the complete pcaps response file,
-    // and headers
-    return new ResponseEntity<byte[]>(pcapResponse.getPcaps(), headers,
-        HttpStatus.OK);
-  }
-
-  /*
-   * (non-Javadoc)
-   * 
-   * @see
-   * com.cisco.opensoc.hbase.client.IPcapReceiver#getPcapsByKeyRange(java.lang.String
-   * , java.lang.String, java.lang.String, long, long,
-   * javax.servlet.http.HttpServletResponse)
-   */
-  @Override
-  @RequestMapping(value = "/pcapGetter/getPcapsByKeyRange", produces = "application/octet-stream")
-  public ResponseEntity<byte[]> getPcapsByKeyRange(
-      @RequestParam String startKey,
-      @RequestParam(required = false) String endKey,
-      @RequestParam(required = false) String maxResponseSize,
-      @RequestParam(defaultValue = "-1") long startTime,
-      @RequestParam(defaultValue = "-1") long endTime) throws IOException {
-    Assert.hasText(startKey, "'startKey' must not be null or empty");
-    MultiValueMap<String, String> headers = new LinkedMultiValueMap<String, String>();
-    byte[] response = null;
-    try {
-      IPcapScanner pcapScanner = PcapScannerHBaseImpl.getInstance();
-      response = pcapScanner.getPcaps(startKey, endKey,
-          ConfigurationUtil.validateMaxResultSize(maxResponseSize), startTime,
-          endTime);
-      if (response == null || response.length == 0) {
-        return new ResponseEntity<byte[]>(HttpStatus.NO_CONTENT);
-      }
-      headers.add(HEADER_CONTENT_DISPOSITION_NAME,
-          HEADER_CONTENT_DISPOSITION_VALUE);
-
-    } catch (IOException e) {
-      LOGGER.error(
-          "Exception occurred while fetching Pcaps for the key range : startKey="
-              + startKey + ", endKey=" + endKey, e);
-      throw e;
-    }
-    // return http status '200 OK' along with the complete pcaps response file,
-    // and headers
-    return new ResponseEntity<byte[]>(response, headers, HttpStatus.OK);
-  }
-
-  /*
-   * (non-Javadoc)
-   * 
-   * @see
-   * com.cisco.opensoc.hbase.client.IPcapReceiver#getPcapsByIdentifiers(java.lang
-   * .String, java.lang.String, java.lang.String, java.lang.String,
-   * java.lang.String, long, long, boolean,
-   * javax.servlet.http.HttpServletResponse)
-   */
-  @Override
-  @RequestMapping(value = "/pcapGetter/getPcapsByIdentifiers", produces = "application/octet-stream")
-  public ResponseEntity<byte[]> getPcapsByIdentifiers(
-      @RequestParam String srcIp, @RequestParam String dstIp,
-      @RequestParam String protocol, @RequestParam String srcPort,
-      @RequestParam String dstPort,
-      @RequestParam(defaultValue = "-1") long startTime,
-      @RequestParam(defaultValue = "-1") long endTime,
-      @RequestParam(defaultValue = "false") boolean includeReverseTraffic)
-      throws IOException {
-    Assert.hasText(srcIp, "'srcIp' must not be null or empty");
-    Assert.hasText(dstIp, "'dstIp' must not be null or empty");
-    Assert.hasText(protocol, "'protocol' must not be null or empty");
-    Assert.hasText(srcPort, "'srcPort' must not be null or empty");
-    Assert.hasText(dstPort, "'dstPort' must not be null or empty");
-    MultiValueMap<String, String> headers = new LinkedMultiValueMap<String, String>();
-    PcapsResponse response = null;
-    try {
-      String sessionKey = PcapUtils.getSessionKey(srcIp, dstIp, protocol,
-          srcPort, dstPort);
-      LOGGER.info("sessionKey =" + sessionKey);
-      IPcapGetter pcapGetter = PcapGetterHBaseImpl.getInstance();
-      response = pcapGetter.getPcaps(Arrays.asList(sessionKey), null,
-          startTime, endTime, includeReverseTraffic, false,
-          ConfigurationUtil.getDefaultResultSize());
-      if (response == null || response.getResponseSize() == 0) {
-        return new ResponseEntity<byte[]>(HttpStatus.NO_CONTENT);
-      }
-      headers.add(HEADER_CONTENT_DISPOSITION_NAME,
-          HEADER_CONTENT_DISPOSITION_VALUE);
-
-    } catch (IOException e) {
-      LOGGER.error("Exception occurred while fetching Pcaps by identifiers :",
-          e);
-      throw e;
-    }
-    // return http status '200 OK' along with the complete pcaps response file,
-    // and headers
-    return new ResponseEntity<byte[]>(response.getPcaps(), headers,
-        HttpStatus.OK);
-  }
-
-  /**
-   * This method parses the each value in the List using delimiter ',' and
-   * builds a new List;.
-   * 
-   * @param keys
-   *          list of keys to be parsed
-   * @return list of keys
-   */
-  @VisibleForTesting
-  List<String> parseKeys(List<String> keys) {
-    Assert.notEmpty(keys);
-    List<String> parsedKeys = new ArrayList<String>();
-    for (String key : keys) {
-      parsedKeys.addAll(Arrays.asList(StringUtils.split(StringUtils.trim(key),
-          ",")));
-    }
-    return parsedKeys;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapScannerHBaseImpl.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapScannerHBaseImpl.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapScannerHBaseImpl.java
deleted file mode 100644
index 5e0649e..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapScannerHBaseImpl.java
+++ /dev/null
@@ -1,302 +0,0 @@
-package com.cisco.opensoc.hbase.client;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.CellUtil;
-import org.apache.hadoop.hbase.MasterNotRunningException;
-import org.apache.hadoop.hbase.ZooKeeperConnectionException;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.NoServerForRegionException;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.log4j.Logger;
-import org.springframework.util.Assert;
-
-import com.cisco.opensoc.pcap.parsing.PcapMerger;
-import com.google.common.annotations.VisibleForTesting;
-
-/**
- * Singleton class which integrates with HBase table and returns sorted pcaps
- * based on the timestamp for the given range of keys. Creates HConnection if it
- * is not already created and the same connection instance is being used for all
- * requests
- * 
- * @author sheetal
- * @version $Revision: 1.0 $
- */
-public class PcapScannerHBaseImpl implements IPcapScanner {
-
-  /** The Constant LOGGER. */
-  private static final Logger LOGGER = Logger
-      .getLogger(PcapScannerHBaseImpl.class);
-
-  /** The Constant DEFAULT_HCONNECTION_RETRY_LIMIT. */
-  private static final int DEFAULT_HCONNECTION_RETRY_LIMIT = 0;
-
-  /** The pcap scanner h base. */
-  private static IPcapScanner pcapScannerHBase = null;
-
-  /*
-   * (non-Javadoc)
-   * 
-   * @see com.cisco.opensoc.hbase.client.IPcapScanner#getPcaps(java.lang.String,
-   * java.lang.String, long, long, long)
-   */
-  @Override
-  public byte[] getPcaps(String startKey, String endKey, long maxResultSize,
-      long startTime, long endTime) throws IOException {
-    Assert.hasText(startKey, "startKey must no be null or empty");
-    byte[] cf = Bytes.toBytes(ConfigurationUtil.getConfiguration()
-        .getString("hbase.table.column.family"));
-    byte[] cq = Bytes.toBytes(ConfigurationUtil.getConfiguration()
-        .getString("hbase.table.column.qualifier"));
-    // create scan request
-    Scan scan = createScanRequest(cf, cq, startKey, endKey, maxResultSize,
-        startTime, endTime);
-    List<byte[]> pcaps = new ArrayList<byte[]>();
-    HTable table = null;
-    try {
-      pcaps = scanPcaps(pcaps, table, scan, cf, cq);
-    } catch (IOException e) {
-      LOGGER.error(
-          "Exception occurred while fetching Pcaps for the key range : startKey="
-              + startKey + ", endKey=" + endKey, e);
-      if (e instanceof ZooKeeperConnectionException
-          || e instanceof MasterNotRunningException
-          || e instanceof NoServerForRegionException) {
-        int maxRetryLimit = getConnectionRetryLimit();
-        for (int attempt = 1; attempt <= maxRetryLimit; attempt++) {
-          try {
-            HBaseConfigurationUtil.closeConnection(); // closing the existing
-                                                      // connection and retry,
-                                                      // it will create a new
-                                                      // HConnection
-            pcaps = scanPcaps(pcaps, table, scan, cf, cq);
-            break;
-          } catch (IOException ie) {
-            if (attempt == maxRetryLimit) {
-              System.out.println("Throwing the exception after retrying "
-                  + maxRetryLimit + " times.");
-              throw e;
-            }
-          }
-        }
-      } else {
-        throw e;
-      }
-    } finally {
-      if (table != null) {
-        table.close();
-      }
-    }
-    if (pcaps.size() == 1) {
-      return pcaps.get(0);
-    }
-    ByteArrayOutputStream baos = new ByteArrayOutputStream();
-    PcapMerger.merge(baos, pcaps);
-    byte[] response = baos.toByteArray();
-    return response;
-  }
-
-  /**
-   * Creates the scan request.
-   * 
-   * @param cf
-   *          the cf
-   * @param cq
-   *          the cq
-   * @param startKey
-   *          the start key
-   * @param endKey
-   *          the end key
-   * @param maxResultSize
-   *          the max result size
-   * @param startTime
-   *          the start time
-   * @param endTime
-   *          the end time
-   * @return the scan
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @VisibleForTesting
-  Scan createScanRequest(byte[] cf, byte[] cq, String startKey, String endKey,
-      long maxResultSize, long startTime, long endTime) throws IOException {
-    Scan scan = new Scan();
-    scan.addColumn(cf, cq);
-    scan.setMaxVersions(ConfigurationUtil.getConfiguration().getInt(
-        "hbase.table.column.maxVersions"));
-    scan.setStartRow(startKey.getBytes());
-    if (endKey != null) {
-      scan.setStopRow(endKey.getBytes());
-    }
-    scan.setMaxResultSize(maxResultSize);
-    boolean setTimeRange = true;
-    if (startTime < 0 && endTime < 0) {
-      setTimeRange = false;
-    }
-    if (setTimeRange) {
-      if (startTime < 0) {
-        startTime = 0;
-      } else {
-        startTime = PcapHelper.convertToDataCreationTimeUnit(startTime);
-      }
-      if (endTime < 0) {
-        endTime = Long.MAX_VALUE;
-      } else {
-        endTime = PcapHelper.convertToDataCreationTimeUnit(endTime);
-      }
-      Assert.isTrue(startTime < endTime,
-          "startTime value must be less than endTime value");
-    }
-    // create Scan request;
-    if (setTimeRange) {
-      scan.setTimeRange(startTime, endTime);
-    }
-    return scan;
-  }
-
-  /**
-   * Scan pcaps.
-   * 
-   * @param pcaps
-   *          the pcaps
-   * @param table
-   *          the table
-   * @param scan
-   *          the scan
-   * @param cf
-   *          the cf
-   * @param cq
-   *          the cq
-   * @return the list
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @VisibleForTesting
-  List<byte[]> scanPcaps(List<byte[]> pcaps, HTable table, Scan scan,
-      byte[] cf, byte[] cq) throws IOException {
-    LOGGER.info("Scan =" + scan.toString());
-    table = (HTable) HBaseConfigurationUtil.getConnection().getTable(
-    		ConfigurationUtil.getConfiguration().getString("hbase.table.name"));
-    ResultScanner resultScanner = table.getScanner(scan);
-    List<Cell> scannedCells = new ArrayList<Cell>();
-    for (Result result = resultScanner.next(); result != null; result = resultScanner
-        .next()) {
-      List<Cell> cells = result.getColumnCells(cf, cq);
-      if (cells != null) {
-        for (Cell cell : cells) {
-          scannedCells.add(cell);
-        }
-      }
-    }
-    Collections.sort(scannedCells, PcapHelper.getCellTimestampComparator());
-    LOGGER.info("sorted cells :" + scannedCells.toString());
-    for (Cell sortedCell : scannedCells) {
-      pcaps.add(CellUtil.cloneValue(sortedCell));
-    }
-    return pcaps;
-  }
-
-  /**
-   * Gets the connection retry limit.
-   * 
-   * @return the connection retry limit
-   */
-  private int getConnectionRetryLimit() {
-    return ConfigurationUtil.getConfiguration().getInt(
-        "hbase.hconnection.retries.number", DEFAULT_HCONNECTION_RETRY_LIMIT);
-  }
-
-  /*
-   * (non-Javadoc)
-   * 
-   * @see com.cisco.opensoc.hbase.client.IPcapScanner#getPcaps(java.lang.String,
-   * java.lang.String)
-   */
-  @Override
-  public byte[] getPcaps(String startKey, String endKey) throws IOException {
-    Assert.hasText(startKey, "startKey must no be null or empty");
-    Assert.hasText(endKey, "endKey must no be null or empty");
-    return getPcaps(startKey, endKey, ConfigurationUtil.getDefaultResultSize(),
-        -1, -1);
-  }
-
-  /**
-   * Always returns the singleton instance.
-   * 
-   * @return IPcapScanner singleton instance
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  public static IPcapScanner getInstance() throws IOException {
-    if (pcapScannerHBase == null) {
-      synchronized (PcapScannerHBaseImpl.class) {
-        if (pcapScannerHBase == null) {
-          pcapScannerHBase = new PcapScannerHBaseImpl();
-        }
-      }
-    }
-    return pcapScannerHBase;
-  }
-
-  /**
-   * Instantiates a new pcap scanner h base impl.
-   */
-  private PcapScannerHBaseImpl() {
-  }
-
-  /**
-   * The main method.
-   */
-  // public static void main(String[] args) throws IOException {
-  // if (args == null || args.length < 3) {
-  // usage();
-  // return;
-  // }
-  // String outputFileName = null;
-  // String startKey = null;
-  // String stopKey = null;
-  // outputFileName = args[0];
-  // startKey = args[1];
-  // if (args.length > 2) { // NOPMD by sheetal on 1/29/14 3:55 PM
-  // stopKey = args[2];
-  // }
-  // PcapScannerHBaseImpl downloader = new PcapScannerHBaseImpl();
-  // byte[] pcaps = downloader.getPcaps(startKey, stopKey, defaultResultSize, 0,
-  // Long.MAX_VALUE);
-  // File file = new File(outputFileName);
-  // FileUtils.write(file, "", false);
-  // ByteArrayOutputStream baos = new ByteArrayOutputStream(); //
-  // $codepro.audit.disable
-  // // closeWhereCreated
-  // PcapMerger.merge(baos, pcaps);
-  // FileUtils.writeByteArrayToFile(file, baos.toByteArray(), true);
-  // }
-
-  /**
-   * Usage.
-   */
-  @SuppressWarnings("unused")
-  private static void usage() {
-    System.out.println("java " + PcapScannerHBaseImpl.class.getName() // NOPMD
-                                                                      // by
-        // sheetal
-        // <!-- //
-        // $codepro.audit.disable
-        // debuggingCode
-        // -->
-        // on
-        // 1/29/14
-        // 3:55
-        // PM
-        + " <zk quorum> <output file> <start key> [stop key]");
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapsResponse.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapsResponse.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapsResponse.java
deleted file mode 100644
index a8c8d1b..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/PcapsResponse.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/**
- * 
- */
-package com.cisco.opensoc.hbase.client;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import com.cisco.opensoc.pcap.parsing.PcapMerger;
-
-/**
- * Holds pcaps data, status and the partial response key.
- * 
- * @author Sayi
- */
-public class PcapsResponse {
-
-  /**
-   * The Enum Status.
-   */
-  public enum Status {
-    
-    /** The partial. */
-    PARTIAL, 
- /** The complete. */
- COMPLETE
-  };
-
-  /** response of the processed keys. */
-  private List<byte[]> pcaps = new ArrayList<byte[]>();;
-
-  /** partial response key. */
-  private String lastRowKey;
-
-  /** The status. */
-  private Status status = Status.COMPLETE;
-
-  /**
-   * Sets the pcaps.
-   * 
-   * @param pcaps
-   *          the new pcaps
-   */
-  public void setPcaps(List<byte[]> pcaps) {
-    this.pcaps = pcaps;
-  }
-
-  /**
-   * Adds the pcaps.
-   * 
-   * @param pcaps
-   *          the pcaps
-   */
-  public void addPcaps(byte[] pcaps) {
-    this.pcaps.add(pcaps);
-  }
-
-  /**
-   * Gets the partial response key.
-   * 
-   * @return the partial response key
-   */
-  public String getLastRowKey() {
-    return lastRowKey;
-  }
-
-  /**
-   * Sets the partial response key.
-   * 
-   * @param lastRowKey
-   *          the last row key
-   */
-  public void setLastRowKey(String lastRowKey) {
-    this.lastRowKey = lastRowKey;
-  }
-
-  /**
-   * Gets the status.
-   * 
-   * @return the status
-   */
-  public Status getStatus() {
-    return status;
-  }
-
-  /**
-   * Sets the status.
-   * 
-   * @param status
-   *          the new status
-   */
-  public void setStatus(Status status) {
-    this.status = status;
-  }
-
-  /**
-   * Checks if is resonse size within limit.
-   * 
-   * @param maxResultSize
-   *          the max result size
-   * @return true, if is resonse size within limit
-   */
-  public boolean isResonseSizeWithinLimit(long maxResultSize) {
-    // System.out.println("isResonseSizeWithinLimit() : getResponseSize() < (input|default result size - maximum packet size ) ="+
-    // getResponseSize()+ " < " + ( maxResultSize
-    // -ConfigurationUtil.getMaxRowSize()));
-    return getResponseSize() < (maxResultSize - ConfigurationUtil
-        .getMaxRowSize());
-  }
-
-  /**
-   * Gets the response size.
-   * 
-   * @return the response size
-   */
-  public long getResponseSize() {
-    long responseSize = 0;
-    for (byte[] pcap : this.pcaps) {
-      responseSize = responseSize + pcap.length;
-    }
-    return responseSize;
-  }
-
-  /**
-   * Gets the pcaps.
-   * 
-   * @return the pcaps
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  public byte[] getPcaps() throws IOException {
-    if (pcaps.size() == 1) {
-      return pcaps.get(0);
-    }
-    ByteArrayOutputStream baos = new ByteArrayOutputStream();
-    PcapMerger.merge(baos, pcaps);
-    return baos.toByteArray();
-  }
-
-  /* (non-Javadoc)
-   * @see java.lang.Object#toString()
-   */
-  @Override
-  public String toString() {
-    return "PcapsResponse [lastRowKey=" + lastRowKey
-        + ", status=" + status + ", pcapsSize="
-        + String.valueOf(getResponseSize()) + "]";
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/RestTestingUtil.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/RestTestingUtil.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/RestTestingUtil.java
deleted file mode 100644
index f8e82d3..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/RestTestingUtil.java
+++ /dev/null
@@ -1,238 +0,0 @@
-package com.cisco.opensoc.hbase.client;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.springframework.http.HttpEntity;
-import org.springframework.http.HttpHeaders;
-import org.springframework.http.HttpMethod;
-import org.springframework.http.MediaType;
-import org.springframework.http.ResponseEntity;
-import org.springframework.web.client.RestTemplate;
-
-/**
- * The Class RestTestingUtil.
- */
-public class RestTestingUtil {
-  
-  /** The host name. */
-  public static String hostName = null;
-
-  /**
-   * Gets the pcaps by keys.
-   * 
-   * @param keys
-   *          the keys
-   * @return the pcaps by keys
-   */
-  @SuppressWarnings("unchecked")
-  private static void getPcapsByKeys(String keys) {
-    System.out
-        .println("**********************getPcapsByKeys ******************************************************************************************");
-    // 1.
-    String url = "http://" + hostName
-        + "/cisco-rest/pcapGetter/getPcapsByKeys?keys={keys}"
-        + "&includeReverseTraffic={includeReverseTraffic}"
-        + "&startTime={startTime}" + "&endTime={endTime}"
-        + "&maxResponseSize={maxResponseSize}";
-    // default values
-    String startTime = "-1";
-    String endTime = "-1";
-    String maxResponseSize = "6";
-    String includeReverseTraffic = "false";
-
-    @SuppressWarnings("rawtypes")
-    Map map = new HashMap();
-    map.put("keys", keys);
-    map.put("includeReverseTraffic", includeReverseTraffic);
-    map.put("startTime", startTime);
-    map.put("endTime", endTime);
-    map.put("maxResponseSize", maxResponseSize);
-
-    RestTemplate template = new RestTemplate();
-
-    // set headers and entity to send
-    HttpHeaders headers = new HttpHeaders();
-    headers.set("Accept", MediaType.APPLICATION_OCTET_STREAM_VALUE);
-    HttpEntity<Object> requestEntity = new HttpEntity<Object>(headers);
-
-    // 1.
-    ResponseEntity<byte[]> response1 = template.exchange(url, HttpMethod.GET,
-        requestEntity, byte[].class, map);
-    System.out
-        .println("----------------------------------------------------------------------------------------------------");
-    System.out
-        .format(
-            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
-            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
-            response1);
-    System.out
-        .println("----------------------------------------------------------------------------------------------------");
-    System.out.println();
-
-    // 2. with reverse traffic
-    includeReverseTraffic = "true";
-    map.put("includeReverseTraffic", includeReverseTraffic);
-    ResponseEntity<byte[]> response2 = template.exchange(url, HttpMethod.GET,
-        requestEntity, byte[].class, map);
-    System.out
-        .println("----------------------------------------------------------------------------------------------------");
-    System.out
-        .format(
-            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
-            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
-            response2);
-    System.out
-        .println("----------------------------------------------------------------------------------------------------");
-    System.out.println();
-
-    // 3.with time range
-    startTime = System.getProperty("startTime", "-1");
-    endTime = System.getProperty("endTime", "-1");
-    map.put("startTime", startTime);
-    map.put("endTime", endTime);
-    ResponseEntity<byte[]> response3 = template.exchange(url, HttpMethod.GET,
-        requestEntity, byte[].class, map);
-    System.out
-        .println("----------------------------------------------------------------------------------------------------");
-    System.out
-        .format(
-            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
-            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
-            response3);
-    System.out
-        .println("----------------------------------------------------------------------------------------------------");
-    System.out.println();
-
-    // 4.with maxResponseSize
-    maxResponseSize = System.getProperty("maxResponseSize", "6");
-    map.put("maxResponseSize", maxResponseSize);
-    ResponseEntity<byte[]> response4 = template.exchange(url, HttpMethod.GET,
-        requestEntity, byte[].class, map);
-    System.out
-        .println("----------------------------------------------------------------------------------------------------");
-    System.out
-        .format(
-            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
-            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
-            response4);
-    System.out
-        .println("----------------------------------------------------------------------------------------------------");
-    System.out.println();
-
-  }
-
-  /**
-   * Gets the pcaps by keys range.
-   * 
-   * @param startKey
-   *          the start key
-   * @param endKey
-   *          the end key
-   * @return the pcaps by keys range
-   */
-  @SuppressWarnings("unchecked")
-  private static void getPcapsByKeysRange(String startKey, String endKey) {
-    System.out
-        .println("**********************getPcapsByKeysRange ******************************************************************************************");
-    // 1.
-    String url = "http://" + hostName
-        + "/cisco-rest/pcapGetter/getPcapsByKeyRange?startKey={startKey}"
-        + "&endKey={endKey}" + "&startTime={startTime}" + "&endTime={endTime}"
-        + "&maxResponseSize={maxResponseSize}";
-    // default values
-    String startTime = "-1";
-    String endTime = "-1";
-    String maxResponseSize = "6";
-    @SuppressWarnings("rawtypes")
-    Map map = new HashMap();
-    map.put("startKey", startKey);
-    map.put("endKey", "endKey");
-    map.put("startTime", startTime);
-    map.put("endTime", endTime);
-    map.put("maxResponseSize", maxResponseSize);
-
-    RestTemplate template = new RestTemplate();
-
-    // set headers and entity to send
-    HttpHeaders headers = new HttpHeaders();
-    headers.set("Accept", MediaType.APPLICATION_OCTET_STREAM_VALUE);
-    HttpEntity<Object> requestEntity = new HttpEntity<Object>(headers);
-
-    // 1.
-    ResponseEntity<byte[]> response1 = template.exchange(url, HttpMethod.GET,
-        requestEntity, byte[].class, map);
-    System.out
-        .println("----------------------------------------------------------------------------------------------------");
-    System.out
-        .format(
-            "getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
-            startKey, endKey, startTime, endTime, maxResponseSize, response1);
-    System.out
-        .println("----------------------------------------------------------------------------------------------------");
-    System.out.println();
-
-    // 2. with time range
-    startTime = System.getProperty("startTime", "-1");
-    endTime = System.getProperty("endTime", "-1");
-    map.put("startTime", startTime);
-    map.put("endTime", endTime);
-    ResponseEntity<byte[]> response2 = template.exchange(url, HttpMethod.GET,
-        requestEntity, byte[].class, map);
-    System.out
-        .println("----------------------------------------------------------------------------------------------------");
-    System.out
-        .format(
-            "getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
-            startKey, endKey, startTime, endTime, maxResponseSize, response2);
-    System.out
-        .println("----------------------------------------------------------------------------------------------------");
-    System.out.println();
-
-    // 3. with maxResponseSize
-    maxResponseSize = System.getProperty("maxResponseSize", "6");
-    map.put("maxResponseSize", maxResponseSize);
-    ResponseEntity<byte[]> response3 = template.exchange(url, HttpMethod.GET,
-        requestEntity, byte[].class, map);
-    System.out
-        .println("----------------------------------------------------------------------------------------------------");
-    System.out
-        .format(
-            "getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
-            startKey, endKey, startTime, endTime, maxResponseSize, response3);
-    System.out
-        .println("----------------------------------------------------------------------------------------------------");
-    System.out.println();
-
-  }
-
-  /**
-   * The main method.
-   * 
-   * @param args
-   *          the arguments
-   */
-  public static void main(String[] args) {
-
-    /*
-     * Run this program with system properties
-     * 
-     * -DhostName=mon.hw.com:8090
-     * -Dkeys=18800006-1800000b-06-0019-b39d,18800006-
-     * 1800000b-06-0050-5af6-64840-40785
-     * -DstartKey=18000002-18800002-06-0436-0019-2440-34545
-     * -DendKey=18000002-18800002-06-b773-0019-2840-34585
-     */
-
-    hostName = System.getProperty("hostName");
-
-    String keys = System.getProperty("keys");
-
-    String statyKey = System.getProperty("startKey");
-    String endKey = System.getProperty("endKey");
-
-    getPcapsByKeys(keys);
-    getPcapsByKeysRange(statyKey, endKey);
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/resources/config-definition-hbase.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/resources/config-definition-hbase.xml b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/resources/config-definition-hbase.xml
deleted file mode 100644
index efe05e8..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/resources/config-definition-hbase.xml
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0" encoding="ISO-8859-1" ?>
-
-<configuration>
-	<header>
-		<result delimiterParsingDisabled="true" forceReloadCheck="true"></result>
-		<lookups>
-      		<lookup config-prefix="expr"
-              	config-class="org.apache.commons.configuration.interpol.ExprLookup">
-        		<variables>
-          			<variable name="System" value="Class:java.lang.System"/>
-          			<variable name="net" value="Class:java.net.InetAddress"/>
-          			<variable name="String" value="Class:org.apache.commons.lang.StringUtils"/>
-        		</variables>
-      		</lookup>
-    	</lookups>
-	</header>
-	<override>
-		<!-- 1. properties from 'hbae-config.properties' are loaded first; 
-				if a property is not present in this file, then it will search in the files in the order they are defined here.
-		     2. 'refreshDelay' indicates the minimum delay in milliseconds between checks to see if the underlying file is changed.
-		     3. 'config-optional' indicates this file is not required --> 
-		
-		<properties fileName="${expr:System.getProperty('configPath')+'/hbase-config.properties'}"  config-optional="true">
-			<reloadingStrategy refreshDelay="${expr:System.getProperty('configRefreshDelay')}"
-	      config-class="org.apache.commons.configuration.reloading.FileChangedReloadingStrategy"/>
-	     </properties>
-		
-		<properties fileName="hbase-config-default.properties" config-optional="true">
-<!-- 					<reloadingStrategy refreshDelay="${expr:System.getProperty('defaultConfigRefreshDelay')}"
-	      config-class="org.apache.commons.configuration.reloading.FileChangedReloadingStrategy"/>
- -->	     </properties>
-		
-	</override>
-</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/resources/hbase-config-default.properties
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/resources/hbase-config-default.properties b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/resources/hbase-config-default.properties
deleted file mode 100644
index e9924ee..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/resources/hbase-config-default.properties
+++ /dev/null
@@ -1,40 +0,0 @@
-#hbase zoo keeper configuration
-hbase.zookeeper.quorum=zkpr1,zkpr2,zkpr3
-hbase.zookeeper.clientPort=2181
-hbase.client.retries.number=1
-zookeeper.session.timeout=60000
-zookeeper.recovery.retry=0
-
-#hbase table configuration
-hbase.table.name=pcap
-hbase.table.column.family=t
-hbase.table.column.qualifier=pcap
-hbase.table.column.maxVersions=5
-
-# scan size limit configuration in MB or KB; if the input is negative or greater than max value throw an error.
-hbase.scan.result.size.unit=MB
-hbase.scan.default.result.size=6
-hbase.scan.max.result.size=60
-
-# time stamp conversion configuration; possible values 'SECONDS'(seconds), 'MILLIS'(milli seconds), 'MICROS' (micro seconds)
-hbase.table.data.time.unit=MICROS
-
-#number of retries in case of ZooKeeper or HBase server down
-hbase.hconnection.retries.number=3
-
-#configuration for including pcaps in the reverse traffic
-pcaps.include.reverse.traffic = false
-
-#maximum table row size in KB or MB 
-hbase.table.row.size.unit = KB
-hbase.table.max.row.size = 70
-
-# tokens of row key configuration
-hbase.table.row.key.tokens=7
-rest.api.input.key.min.tokens=5
-
-# whether or not to include the last row from the previous request, applicable for only partial response scenario
-hbase.table.scan.include.duplicate.lastrow= true;
-
-#number of digits for appending tokens of the row key
-hbase.table.row.key.token.appending.digits=5

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/resources/log4j.properties b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/resources/log4j.properties
deleted file mode 100644
index 0b6ca10..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/resources/log4j.properties
+++ /dev/null
@@ -1,21 +0,0 @@
-# Root logger option
-log4j.rootLogger=TRACE,file,stdout
-
-# Direct log messages to a log file
-log4j.appender.file=org.apache.log4j.RollingFileAppender
-log4j.appender.file.File=/var/log/hbase/cisco-hbase.log
-log4j.appender.file.MaxFileSize=1MB
-log4j.appender.file.MaxBackupIndex=1
-log4j.appender.file.layout=org.apache.log4j.PatternLayout
-log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
-
-
-# Direct log messages to console
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.Target=System.out
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
-
-log4j.logger.backtype.storm=DEBUG
-log4j.logger.clojure.tools=DEBUG
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/CellTimestampComparatorTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/CellTimestampComparatorTest.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/CellTimestampComparatorTest.java
deleted file mode 100644
index 639af33..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/CellTimestampComparatorTest.java
+++ /dev/null
@@ -1,92 +0,0 @@
-package com.cisco.opensoc.hbase.client;
-
-import junit.framework.Assert;
-
-import org.apache.hadoop.hbase.Cell;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.mockito.Mockito;
-
-import com.cisco.opensoc.hbase.client.CellTimestampComparator;
-
-/**
- * The Class CellTimestampComparatorTest.
- */
-public class CellTimestampComparatorTest {
-
-  /**
-   * Sets the up.
-   * 
-   * @throws Exception
-   *           the exception
-   */
-  @Before
-  public void setUp() throws Exception {
-  }
-
-  /**
-   * Tear down.
-   * 
-   * @throws Exception
-   *           the exception
-   */
-  @After
-  public void tearDown() throws Exception {
-  }
-
-  /**
-   * Test_less.
-   */
-  @Test
-  public void test_less() {
-    // mocking
-    Cell cell1 = Mockito.mock(Cell.class);
-    Mockito.when(cell1.getTimestamp()).thenReturn(13945345808L);
-    Cell cell2 = Mockito.mock(Cell.class);
-    Mockito.when(cell2.getTimestamp()).thenReturn(13845345808L);
-
-    CellTimestampComparator comparator = new CellTimestampComparator();
-
-    // actual call and verify
-    Assert.assertTrue(comparator.compare(cell1, cell2) == -1);
-
-  }
-
-  /**
-   * Test_greater.
-   */
-  @Test
-  public void test_greater() {
-    // mocking
-    Cell cell1 = Mockito.mock(Cell.class);
-    Mockito.when(cell1.getTimestamp()).thenReturn(13745345808L);
-    Cell cell2 = Mockito.mock(Cell.class);
-    Mockito.when(cell2.getTimestamp()).thenReturn(13945345808L);
-
-    CellTimestampComparator comparator = new CellTimestampComparator();
-
-    // actual call and verify
-    Assert.assertTrue(comparator.compare(cell1, cell2) == 1);
-
-  }
-
-  /**
-   * Test_equal.
-   */
-  @Test
-  public void test_equal() {
-    // mocking
-    Cell cell1 = Mockito.mock(Cell.class);
-    Mockito.when(cell1.getTimestamp()).thenReturn(13945345808L);
-    Cell cell2 = Mockito.mock(Cell.class);
-    Mockito.when(cell2.getTimestamp()).thenReturn(13945345808L);
-
-    CellTimestampComparator comparator = new CellTimestampComparator();
-
-    // actual call and verify
-    Assert.assertTrue(comparator.compare(cell1, cell2) == 0);
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/ConfigurationUtilTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/ConfigurationUtilTest.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/ConfigurationUtilTest.java
deleted file mode 100644
index 48f3973..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/ConfigurationUtilTest.java
+++ /dev/null
@@ -1,50 +0,0 @@
-package com.cisco.opensoc.hbase.client;
-
-import org.eclipse.jdt.internal.core.Assert;
-import org.junit.Test;
-
-import com.cisco.opensoc.hbase.client.ConfigurationUtil;
-import com.cisco.opensoc.hbase.client.ConfigurationUtil.SizeUnit;
-
-/**
- * The Class ConfigurationUtilTest.
- */
-public class ConfigurationUtilTest {
-
-  /**
-   * Test_get max allowable result size in bytes.
-   */
-  @Test
-  public void test_getMaxAllowableResultSizeInBytes() {
-    long result = ConfigurationUtil.getMaxResultSize();
-    Assert.isTrue(result == 62914560);
-  }
-
-  /**
-   * Test_get max allowable results size unit.
-   */
-  @Test
-  public void test_getMaxAllowableResultsSizeUnit() {
-    SizeUnit result = ConfigurationUtil.getResultSizeUnit();
-    Assert.isTrue(SizeUnit.MB == result);
-  }
-
-  /**
-   * Test_get max row size in bytes.
-   */
-  @Test
-  public void test_getMaxRowSizeInBytes() {
-    long result = ConfigurationUtil.getMaxRowSize();
-    Assert.isTrue(result == 71680);
-  }
-
-  /**
-   * Test_get max row size unit.
-   */
-  @Test
-  public void test_getMaxRowSizeUnit() {
-    SizeUnit result = ConfigurationUtil.getRowSizeUnit();
-    Assert.isTrue(SizeUnit.KB == result);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/HBaseConfigurationUtilTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/HBaseConfigurationUtilTest.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/HBaseConfigurationUtilTest.java
deleted file mode 100644
index e8ec8f9..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/HBaseConfigurationUtilTest.java
+++ /dev/null
@@ -1,52 +0,0 @@
-package com.cisco.opensoc.hbase.client;
-
-import java.io.IOException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import org.springframework.util.Assert;
-
-import com.cisco.opensoc.hbase.client.HBaseConfigurationUtil;
-
-/**
- * The Class HBaseConfigurationUtilTest.
- */
-public class HBaseConfigurationUtilTest {
-
-  /**
-   * Sets the up.
-   * 
-   * @throws Exception
-   *           the exception
-   */
-  @Before
-  public void setUp() throws Exception {
-  }
-
-  /**
-   * Tear down.
-   * 
-   * @throws Exception
-   *           the exception
-   */
-  @After
-  public void tearDown() throws Exception {
-  }
-
-  /**
-   * Test_read.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  @Test
-  public void test_read() throws IOException {
-    Configuration configuration = HBaseConfigurationUtil.read();
-    Assert.isTrue(configuration != null, "Configuration must not be null");
-    Assert.isTrue(configuration.get("hbase.client.retries.number").equals("1"),
-        "value must be equal");
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/HBaseIntegrationTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/HBaseIntegrationTest.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/HBaseIntegrationTest.java
deleted file mode 100644
index 3eb2bb0..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/test/java/com/cisco/opensoc/hbase/client/HBaseIntegrationTest.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * 
- */
-package com.cisco.opensoc.hbase.client;
-
-import java.io.IOException;
-
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.util.Bytes;
-
-/**
- * The Class HBaseIntegrationTest.
- * 
- * @author Sayi
- */
-public class HBaseIntegrationTest {
-
-  /** The test util. */
-  private final HBaseTestingUtility testUtil = new HBaseTestingUtility();
-
-  /** The test table. */
-  private HTable testTable;
-
-  /**
-   * Inits the cluster.
-   * 
-   * @throws Exception
-   *           the exception
-   */
-  void initCluster() throws Exception {
-    // testUtil.getConfiguration().addResource("hbase-site-local.xml");
-    // testUtil.getConfiguration().reloadConfiguration();
-    // start mini hbase cluster
-    testUtil.startMiniCluster(1);
-    // create tables
-    createTable();
-
-  }
-
-  /**
-   * Creates the table.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  private void createTable() throws IOException {
-    testTable = testUtil.createTable("test_pcaps_local", "cf");
-    System.out.println("after 'test_pcaps_local' table creation ");
-    // create put
-    Put put = new Put(Bytes.toBytes("1111")); // row key =1111
-    put.add(Bytes.toBytes("cf"), Bytes.toBytes("packet"),
-        Bytes.toBytes("aaaaaaaa"));
-    testTable.put(put);
-    System.out.println("after testTable.put(put)");
-
-  }
-
-  /**
-   * The main method.
-   * 
-   * @param args
-   *          the arguments
-   * @throws Exception
-   *           the exception
-   */
-  public static void main(String[] args) throws Exception {
-    // HBaseIntegrationTest test = new HBaseIntegrationTest();
-    // test.initCluster();
-
-  }
-
-}


[16/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/IseParserTest.log
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/IseParserTest.log b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/IseParserTest.log
new file mode 100644
index 0000000..809501b
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/IseParserTest.log
@@ -0,0 +1,308 @@
+Aug  6 17:26:31 10.34.84.145 Aug  7 00:45:43 stage-pdp01 CISE_Profiler 0000024855 1 0 2014-08-07 00:45:43.741 -07:00 0000288542 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=113, EndpointCertainityMetric=10, EndpointIPAddress=10.56.111.14, EndpointMacAddress=3C:97:0E:C3:F8:F1, EndpointMatchedPolicy=Nortel-Device, EndpointNADAddress=10.56.72.127, EndpointOUI=Wistron InfoComm(Kunshan)Co.\,Ltd., EndpointPolicy=Nortel-Device, EndpointProperty=StaticAssignment=false\,PostureApplicable=Yes\,PolicyVersion=402\,IdentityGroupID=0c1d9270-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=10\,BYODRegistration=Unknown\,FeedService=false\,EndPointPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1407397543718\,MatchedPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,TimeToProfile=19\,StaticGroupAssignment=false\,NmapSubnetScanID=0\,DeviceRegistrationStatus=NotRegistered\,PortalUser=, EndpointSourceEvent=SNMPQuery Probe, EndpointIdentityGroup=Profile
 d, ProfilerServer=stage-pdp01.cisco.com,
+Aug  6 17:26:31 10.34.84.145 Aug  7 00:45:43 stage-pdp01 CISE_Profiler 0000024856 1 0 2014-08-07 00:45:43.786 -07:00 0000288543 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=113, EndpointCertainityMetric=10, EndpointIPAddress=10.56.111.14, EndpointMacAddress=3C:97:0E:C3:F8:F1, EndpointMatchedPolicy=Nortel-Device, EndpointNADAddress=10.56.72.127, EndpointOUI=Wistron InfoComm(Kunshan)Co.\,Ltd., EndpointPolicy=Nortel-Device, EndpointProperty=StaticAssignment=false\,PostureApplicable=Yes\,BYODRegistration=Unknown\,EndPointPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1407397543718\,TimeToProfile=19\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,UpdateTime=0\,PolicyVersion=402\,IdentityGroupID=0c1d9270-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=10\,FeedService=false\,MatchedPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,NmapScanCount=0\,NmapSubnetScanID=0\,PortalUser=, EndpointSourceE
 vent=SNMPQuery Probe, EndpointIdentityGroup=Profiled, ProfilerServer=stage-pdp01.cisco.com,
+Aug  6 20:00:52 10.42.7.64 Aug  7 03:20:05 npf-sjca-pdp02 CISE_Profiler 0000373185 1 0 2014-08-07 03:20:05.549 -07:00 0011310202 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=90, EndpointIPAddress=10.56.129.142, EndpointMacAddress=3C:A9:F4:46:75:CC, EndpointMatchedPolicy=Windows7-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Windows7-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-46-75-cc\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=EXAMPLE\,BYODRegistration=Unknown\,EndPointPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406112353750\,TimeToProfile=11\,Framed-IP-Address=10.56.129.142\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407394245820\,PolicyVersion=403\,Identi
 tyGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=90\,FeedService=false\,MatchedPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1394526689397\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=Mozilla/5.0 (Windows NT 6.1\\\; WOW64\\ rv:30.0) Gecko/20100101 Firefox/30.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf.example.com,
+Aug  6 21:00:48 10.42.7.64 Aug  7 04:20:00 npf-sjca-pdp02 CISE_Profiler 0000373902 1 0 2014-08-07 04:20:00.983 -07:00 0011322557 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=30, EndpointIPAddress=10.56.129.142, EndpointMacAddress=3C:A9:F4:46:75:CC, EndpointMatchedPolicy=Microsoft-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Microsoft-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-46-75-cc\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=EXAMPLE\,BYODRegistration=Unknown\,EndPointPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406112353750\,TimeToProfile=11\,Framed-IP-Address=10.56.129.142\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407406806572\,PolicyVersion=403\,Iden
 tityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=30\,FeedService=false\,MatchedPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1394526689397\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=MS-WebServices/1.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf.example.com,
+Aug  6 22:22:50 10.42.7.64 Aug  7 05:42:03 npf-sjca-pdp02 CISE_Profiler 0000374846 1 0 2014-08-07 05:42:03.617 -07:00 0011340138 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=10, EndpointMacAddress=68:A8:6D:4E:0D:86, EndpointMatchedPolicy=Apple-Device, EndpointOUI=Apple, EndpointPolicy=Apple-Device, EndpointProperty=StaticAssignment=false\,PostureApplicable=Yes\,host-name=PEXAMPLE\,BYODRegistration=Unknown\,EndPointPolicyID=377d8ba0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1407415322895\,TimeToProfile=717\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,PolicyVersion=403\,IdentityGroupID=abbbcac0-89e6-11e1-bf14-005056aa4dd7\,Total Certainty Factor=10\,ciaddr=0.0.0.0\,FeedService=false\,dhcp-parameter-request-list=1\, 3\, 6\, 15\, 119\, 95\, 252\, 44\, 46\,MatchedPolicyID=377d8ba0-68a6-11e1-bc72-0050568e013c\,NmapSubnetScanID=0\,PortalUser=, EndpointSourceEvent=DHCP Probe, EndpointIdentityGroup
 =Apple-Device, ProfilerServer=npf.example.com,
+Aug  6 23:30:10 10.42.7.64 Aug  7 06:49:23 npf-sjca-pdp02 CISE_Profiler 0000375603 1 0 2014-08-07 06:49:23.920 -07:00 0011353768 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=90, EndpointIPAddress=10.56.129.142, EndpointMacAddress=3C:A9:F4:46:75:CC, EndpointMatchedPolicy=Windows7-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Windows7-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-46-75-cc\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=EXAMPLE\,BYODRegistration=Unknown\,EndPointPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406112353750\,TimeToProfile=11\,Framed-IP-Address=10.56.129.142\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407410402099\,PolicyVersion=403\,Identi
 tyGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=90\,FeedService=false\,MatchedPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1394526689397\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=Mozilla/5.0 (Windows NT 6.1\\\; WOW64\\ rv:30.0) Gecko/20100101 Firefox/30.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf.example.com,
+Aug  6 23:30:48 10.42.7.64 Aug  7 06:50:01 npf-sjca-pdp02 CISE_Profiler 0000375611 1 0 2014-08-07 06:50:01.377 -07:00 0011353875 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=50, EndpointIPAddress=10.34.92.103, EndpointMacAddress=3C:A9:F4:29:FC:3C, EndpointMatchedPolicy=Microsoft-Workstation, EndpointNADAddress=10.34.76.212, EndpointOUI=Intel Corporate, EndpointPolicy=Microsoft-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-29-fc-3c\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=EXAMPLE\,BYODRegistration=Unknown\,EndPointPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406109860322\,L4_DST_PORT=50428\,TimeToProfile=7\,Framed-IP-Address=10.34.92.103\,LastNmapScanTime=1380758278898\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=140668603
 4558\,PolicyVersion=403\,IdentityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=50\,operating-system=Microsoft Windows Vista SP0 - SP2\, Server 2008\, or Windows 7 Ultimate\,FeedService=false\,MatchedPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1373657280926\,NmapScanCount=3\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=MS-WebServices/1.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf.example.com,
+Aug  6 23:32:52 10.42.7.64 Aug  7 06:52:05 npf-sjca-pdp02 CISE_Profiler 0000375636 1 0 2014-08-07 06:52:05.272 -07:00 0011354313 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=30, EndpointIPAddress=10.56.129.143, EndpointMacAddress=E8:2A:EA:23:5E:3D, EndpointMatchedPolicy=Microsoft-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Microsoft-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=e8-2a-ea-23-5e-3d\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=ANOY-WS01\,BYODRegistration=Unknown\,EndPointPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406114784910\,TimeToProfile=7\,Framed-IP-Address=10.56.129.143\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407395211208\,PolicyVersion=403\,Ide
 ntityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=30\,FeedService=false\,MatchedPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1405408515121\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=MS-WebServices/1.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf.example.com,
+Aug  6 16:40:52 10.42.7.64 Aug  7 00:00:04 npf-sjca-pdp02 CISE_Failed_Attempts 0000370855 1 0 2014-08-07 00:00:04.527 -07:00 0011266584 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270932, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056EF53E323F4, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:40:57 10.42.7.63 Aug  7 00:00:09 npf-sjca-pdp01 CISE_Failed_Attempts 0001969834 1 0 2014-08-07 00:00:09.568 -07:00 0098648519 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2084839, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4A53E323F9, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:41:24 10.34.84.145 Aug  7 00:00:36 stage-pdp01 CISE_Failed_Attempts 0000024616 1 0 2014-08-07 00:00:36.332 -07:00 0000287007 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19317, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:41:26 10.34.84.145 Aug  7 00:00:38 stage-pdp01 CISE_Failed_Attempts 0000024617 1 0 2014-08-07 00:00:38.336 -07:00 0000287011 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19318, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:41:28 10.34.84.145 Aug  7 00:00:40 stage-pdp01 CISE_Failed_Attempts 0000024618 1 0 2014-08-07 00:00:40.336 -07:00 0000287015 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19319, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:41:30 10.34.84.145 Aug  7 00:00:42 stage-pdp01 CISE_Failed_Attempts 0000024619 1 0 2014-08-07 00:00:42.340 -07:00 0000287019 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19320, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:41:32 10.34.84.145 Aug  7 00:00:44 stage-pdp01 CISE_Failed_Attempts 0000024620 1 0 2014-08-07 00:00:44.340 -07:00 0000287023 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19321, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:41:34 10.34.84.145 Aug  7 00:00:46 stage-pdp01 CISE_Failed_Attempts 0000024621 1 0 2014-08-07 00:00:46.344 -07:00 0000287027 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19322, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:42:02 10.42.7.64 Aug  7 00:01:14 npf-sjca-pdp02 CISE_Failed_Attempts 0000370865 1 0 2014-08-07 00:01:14.610 -07:00 0011266810 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=7, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270940, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F053E3243A, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:42:07 10.42.7.63 Aug  7 00:01:19 npf-sjca-pdp01 CISE_Failed_Attempts 0001969923 1 0 2014-08-07 00:01:19.665 -07:00 0098652715 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2084986, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4B53E3243F, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:42:12 10.42.7.64 Aug  7 00:01:24 npf-sjca-pdp02 CISE_Failed_Attempts 0000370867 1 0 2014-08-07 00:01:24.701 -07:00 0011266815 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270941, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F153E32444, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:42:17 10.42.7.63 Aug  7 00:01:29 npf-sjca-pdp01 CISE_Failed_Attempts 0001969935 1 0 2014-08-07 00:01:29.746 -07:00 0098653362 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085007, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4C53E32449, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:43:22 10.42.7.64 Aug  7 00:02:34 npf-sjca-pdp02 CISE_Failed_Attempts 0000370885 1 0 2014-08-07 00:02:34.792 -07:00 0011267367 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=4, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270956, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F353E3248A, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:43:27 10.42.7.63 Aug  7 00:02:39 npf-sjca-pdp01 CISE_Failed_Attempts 0001970043 1 0 2014-08-07 00:02:39.808 -07:00 0098657578 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085161, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4D53E3248F, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:43:56 10.42.7.64 Aug  7 00:03:08 npf-sjca-pdp02 CISE_Failed_Attempts 0000370897 1 0 2014-08-07 00:03:08.902 -07:00 0011267657 5435 NOTICE RADIUS: NAS conducted several failed authentications of the same scenario, ConfigVersionId=240, Device IP Address=10.56.129.4, Device Port=32770, DestinationIPAddress=10.42.7.64, DestinationPort=1813, RadiusPacketType=AccountingRequest, UserName=yshchory, Protocol=Radius, RequestLatency=49, NetworkDeviceName=NTN-WLC1, User-Name=yshchory, NAS-IP-Address=10.56.129.4, NAS-Port=1, Framed-IP-Address=10.56.129.141, Class=CACS:0a388104000045cd53e2be75:npf-sjca-pdp02/195481465/270958, Called-Station-ID=6c-41-6a-5f-6e-c0, Calling-Station-ID=90-18-7c-7b-59-01, NAS-Identifier=ntn01-11a-wlc1, Acct-Status-Type=Interim-Update, Acct-Delay-Time=0, Acct-Input-Octets=2359603, Acct-Output-Octets=26928466, Acct-Session-Id=53e2be78/90:18:7c:7b:59:01/13844, Acct-Authentic=RADIUS, Acct-Session-Time=1466, Acct-Input-Packets=14866, Acct-Output-Packets=23043, und
 efined-52=
+Aug  6 16:44:01 10.42.7.63 Aug  7 00:03:13 npf-sjca-pdp01 CISE_Failed_Attempts 0001970072 1 0 2014-08-07 00:03:13.112 -07:00 0098658804 5435 NOTICE RADIUS: NAS conducted several failed authentications of the same scenario, ConfigVersionId=133, Device IP Address=10.56.72.127, Device Port=1646, DestinationIPAddress=10.42.7.63, DestinationPort=1813, Protocol=Radius, NetworkDeviceName=ntn01-11a-sw4, User-Name=host/salfi-pc.cisco.com, NAS-IP-Address=10.56.72.127, NAS-Port=50212, Service-Type=Framed, Framed-IP-Address=10.56.111.14, Class=CACS:0A38487F00000397BDA7BCAC:npf-sjca-pdp02/195481465/270957, Called-Station-ID=00-26-99-28-5E-BB, Calling-Station-ID=3C-97-0E-C3-F8-F1, Acct-Status-Type=Interim-Update, Acct-Delay-Time=4, Acct-Input-Octets=225395, Acct-Output-Octets=761436, Acct-Session-Id=00000560, Acct-Authentic=RADIUS, Acct-Session-Time=43, Acct-Input-Packets=1163, Acct-Output-Packets=1080, NAS-Port-Type=Ethernet, NAS-Port-Id=GigabitEthernet2/12, undefined-151=F54C88B0, cisco-av-pair
 =audit-session-id=0A38487F00000397BDA7BCAC, cisco-av-pair=connect-progress=Auth Open, AcsSessionID=npf-sjca-pdp01/195491152/2085221, FailureReason=11038 RADIUS Accounting-Request header contains invalid Authenticator field, Step=11004, Step=11017, Step=11038, Step=5435, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0A38487F00000397BDA7BCAC, TotalFailedAttempts=2, TotalFailedTime=42, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired,
+Aug  6 16:44:32 10.42.7.64 Aug  7 00:03:44 npf-sjca-pdp02 CISE_Failed_Attempts 0000370899 1 0 2014-08-07 00:03:44.851 -07:00 0011267663 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=7, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270963, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F453E324D0, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:44:36 10.34.84.145 Aug  7 00:03:48 stage-pdp01 CISE_Failed_Attempts 0000024632 1 0 2014-08-07 00:03:48.375 -07:00 0000287084 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19329, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:44:37 10.42.7.63 Aug  7 00:03:49 npf-sjca-pdp01 CISE_Failed_Attempts 0001970128 1 0 2014-08-07 00:03:49.893 -07:00 0098661643 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085307, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4E53E324D5, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:44:38 10.34.84.145 Aug  7 00:03:50 stage-pdp01 CISE_Failed_Attempts 0000024633 1 0 2014-08-07 00:03:50.379 -07:00 0000287088 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19330, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:44:40 10.34.84.145 Aug  7 00:03:52 stage-pdp01 CISE_Failed_Attempts 0000024634 1 0 2014-08-07 00:03:52.379 -07:00 0000287092 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19331, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:44:42 10.34.84.145 Aug  7 00:03:54 stage-pdp01 CISE_Failed_Attempts 0000024635 1 0 2014-08-07 00:03:54.387 -07:00 0000287096 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19332, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:44:42 10.42.7.64 Aug  7 00:03:54 npf-sjca-pdp02 CISE_Failed_Attempts 0000370903 1 0 2014-08-07 00:03:54.924 -07:00 0011267670 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=4, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270964, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F553E324DA, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:44:44 10.34.84.145 Aug  7 00:03:56 stage-pdp01 CISE_Failed_Attempts 0000024636 1 0 2014-08-07 00:03:56.386 -07:00 0000287100 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19333, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:44:46 10.34.84.145 Aug  7 00:03:58 stage-pdp01 CISE_Failed_Attempts 0000024637 1 0 2014-08-07 00:03:58.390 -07:00 0000287104 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19334, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:44:47 10.42.7.63 Aug  7 00:03:59 npf-sjca-pdp01 CISE_Failed_Attempts 0001970140 1 0 2014-08-07 00:03:59.951 -07:00 0098662310 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085331, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4F53E324DF, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:44:48 10.42.7.64 Aug  7 00:04:00 npf-sjca-pdp02 CISE_Failed_Attempts 0000370905 1 0 2014-08-07 00:04:00.526 -07:00 0011267674 5435 NOTICE RADIUS: NAS conducted several failed authentications of the same scenario, ConfigVersionId=240, Device IP Address=10.56.72.127, Device Port=1646, DestinationIPAddress=10.42.7.64, DestinationPort=1813, Protocol=Radius, NetworkDeviceName=ntn01-11a-sw4, User-Name=host/salfi-pc.cisco.com, NAS-IP-Address=10.56.72.127, NAS-Port=50212, Service-Type=Framed, Framed-IP-Address=169.254.53.87, Class=CACS:0A38487F00000397BDA7BCAC:npf-sjca-pdp02/195481465/270957, Called-Station-ID=00-26-99-28-5E-BB, Calling-Station-ID=3C-97-0E-C3-F8-F1, Acct-Status-Type=Interim-Update, Acct-Delay-Time=0, Acct-Input-Octets=1458615, Acct-Output-Octets=3836368, Acct-Session-Id=00000560, Acct-Authentic=RADIUS, Acct-Session-Time=95, Acct-Input-Packets=4505, Acct-Output-Packets=5619, NAS-Port-Type=Ethernet, NAS-Port-Id=GigabitEthernet2/12, undefined-151=F54C88B0, cisco-av-p
 air=audit-session-id=0A38487F00000397BDA7BCAC, cisco-av-pair=connect-progress=Auth Open, AcsSessionID=npf-sjca-pdp02/195481465/270965, FailureReason=11038 RADIUS Accounting-Request header contains invalid Authenticator field, Step=11004, Step=11017, Step=11038, Step=5435, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0A38487F00000397BDA7BCAC, TotalFailedAttempts=2, TotalFailedTime=52, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired,
+Aug  6 16:45:52 10.42.7.64 Aug  7 00:05:04 npf-sjca-pdp02 CISE_Failed_Attempts 0000370920 1 0 2014-08-07 00:05:04.969 -07:00 0011267987 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=6, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270977, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F653E32520, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:45:58 10.42.7.63 Aug  7 00:05:09 npf-sjca-pdp01 CISE_Failed_Attempts 0001970212 1 0 2014-08-07 00:05:09.998 -07:00 0098665518 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085460, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5053E32525, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:47:03 10.42.7.64 Aug  7 00:06:15 npf-sjca-pdp02 CISE_Failed_Attempts 0000370931 1 0 2014-08-07 00:06:15.016 -07:00 0011268196 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270985, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F753E32567, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:47:08 10.42.7.63 Aug  7 00:06:20 npf-sjca-pdp01 CISE_Failed_Attempts 0001970324 1 0 2014-08-07 00:06:20.055 -07:00 0098669942 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085599, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5153E3256C, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:47:13 10.42.7.64 Aug  7 00:06:25 npf-sjca-pdp02 CISE_Failed_Attempts 0000370934 1 0 2014-08-07 00:06:25.097 -07:00 0011268209 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270987, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F853E32571, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:47:18 10.42.7.63 Aug  7 00:06:30 npf-sjca-pdp01 CISE_Failed_Attempts 0001970335 1 0 2014-08-07 00:06:30.119 -07:00 0098670037 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085618, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5253E32576, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:47:48 10.34.84.145 Aug  7 00:07:00 stage-pdp01 CISE_Failed_Attempts 0000024649 1 0 2014-08-07 00:07:00.418 -07:00 0000287210 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19342, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:47:50 10.34.84.145 Aug  7 00:07:02 stage-pdp01 CISE_Failed_Attempts 0000024650 1 0 2014-08-07 00:07:02.421 -07:00 0000287214 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19343, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:47:52 10.34.84.145 Aug  7 00:07:04 stage-pdp01 CISE_Failed_Attempts 0000024651 1 0 2014-08-07 00:07:04.425 -07:00 0000287218 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19344, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:47:54 10.34.84.145 Aug  7 00:07:06 stage-pdp01 CISE_Failed_Attempts 0000024652 1 0 2014-08-07 00:07:06.429 -07:00 0000287222 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19345, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:47:56 10.34.84.145 Aug  7 00:07:08 stage-pdp01 CISE_Failed_Attempts 0000024653 1 0 2014-08-07 00:07:08.429 -07:00 0000287226 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19346, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:47:58 10.34.84.145 Aug  7 00:07:10 stage-pdp01 CISE_Failed_Attempts 0000024654 1 0 2014-08-07 00:07:10.433 -07:00 0000287230 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19347, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:48:23 10.42.7.64 Aug  7 00:07:35 npf-sjca-pdp02 CISE_Failed_Attempts 0000370955 1 0 2014-08-07 00:07:35.138 -07:00 0011268472 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271001, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F953E325B7, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:48:28 10.42.7.63 Aug  7 00:07:40 npf-sjca-pdp01 CISE_Failed_Attempts 0001970420 1 0 2014-08-07 00:07:40.178 -07:00 0098673462 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085757, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5353E325BC, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:49:33 10.42.7.64 Aug  7 00:08:45 npf-sjca-pdp02 CISE_Failed_Attempts 0000370984 1 0 2014-08-07 00:08:45.219 -07:00 0011269071 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271016, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FB53E325FD, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:49:38 10.42.7.63 Aug  7 00:08:50 npf-sjca-pdp01 CISE_Failed_Attempts 0001970519 1 0 2014-08-07 00:08:50.259 -07:00 0098677825 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085892, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5453E32602, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:49:43 10.42.7.64 Aug  7 00:08:55 npf-sjca-pdp02 CISE_Failed_Attempts 0000370986 1 0 2014-08-07 00:08:55.298 -07:00 0011269076 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271017, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FC53E32607, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:49:48 10.42.7.63 Aug  7 00:09:00 npf-sjca-pdp01 CISE_Failed_Attempts 0001970524 1 0 2014-08-07 00:09:00.330 -07:00 0098678019 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085909, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5553E3260C, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:50:53 10.42.7.64 Aug  7 00:10:05 npf-sjca-pdp02 CISE_Failed_Attempts 0000370999 1 0 2014-08-07 00:10:05.339 -07:00 0011269371 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271027, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FD53E3264D, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:50:58 10.42.7.63 Aug  7 00:10:10 npf-sjca-pdp01 CISE_Failed_Attempts 0001970625 1 0 2014-08-07 00:10:10.388 -07:00 0098682297 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086061, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5653E32652, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:51:00 10.34.84.145 Aug  7 00:10:12 stage-pdp01 CISE_Failed_Attempts 0000024661 1 0 2014-08-07 00:10:12.492 -07:00 0000287258 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19354, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:51:02 10.34.84.145 Aug  7 00:10:14 stage-pdp01 CISE_Failed_Attempts 0000024662 1 0 2014-08-07 00:10:14.496 -07:00 0000287262 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19355, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:51:04 10.34.84.145 Aug  7 00:10:16 stage-pdp01 CISE_Failed_Attempts 0000024663 1 0 2014-08-07 00:10:16.496 -07:00 0000287266 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19356, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:51:06 10.34.84.145 Aug  7 00:10:18 stage-pdp01 CISE_Failed_Attempts 0000024664 1 0 2014-08-07 00:10:18.500 -07:00 0000287270 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19357, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:51:08 10.34.84.145 Aug  7 00:10:20 stage-pdp01 CISE_Failed_Attempts 0000024665 1 0 2014-08-07 00:10:20.504 -07:00 0000287274 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19358, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:51:10 10.34.84.145 Aug  7 00:10:22 stage-pdp01 CISE_Failed_Attempts 0000024667 1 0 2014-08-07 00:10:22.507 -07:00 0000287279 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19359, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:52:03 10.42.7.64 Aug  7 00:11:15 npf-sjca-pdp02 CISE_Failed_Attempts 0000371005 1 0 2014-08-07 00:11:15.432 -07:00 0011269421 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=4, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271031, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FE53E32693, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:52:08 10.42.7.63 Aug  7 00:11:20 npf-sjca-pdp01 CISE_Failed_Attempts 0001970691 1 0 2014-08-07 00:11:20.468 -07:00 0098685176 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086181, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5753E32698, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:52:13 10.42.7.64 Aug  7 00:11:25 npf-sjca-pdp02 CISE_Failed_Attempts 0000371007 1 0 2014-08-07 00:11:25.515 -07:00 0011269426 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271032, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FF53E3269D, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:52:18 10.42.7.63 Aug  7 00:11:30 npf-sjca-pdp01 CISE_Failed_Attempts 0001970708 1 0 2014-08-07 00:11:30.551 -07:00 0098685669 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=8, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086202, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5853E326A2, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:53:23 10.42.7.64 Aug  7 00:12:35 npf-sjca-pdp02 CISE_Failed_Attempts 0000371016 1 0 2014-08-07 00:12:35.547 -07:00 0011269586 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271040, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570053E326E3, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:53:28 10.42.7.63 Aug  7 00:12:40 npf-sjca-pdp01 CISE_Failed_Attempts 0001970802 1 0 2014-08-07 00:12:40.596 -07:00 0098689883 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086334, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5953E326E8, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:54:12 10.34.84.145 Aug  7 00:13:24 stage-pdp01 CISE_Failed_Attempts 0000024680 1 0 2014-08-07 00:13:24.527 -07:00 0000287388 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19368, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:54:14 10.34.84.145 Aug  7 00:13:26 stage-pdp01 CISE_Failed_Attempts 0000024681 1 0 2014-08-07 00:13:26.531 -07:00 0000287392 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19369, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:54:16 10.34.84.145 Aug  7 00:13:28 stage-pdp01 CISE_Failed_Attempts 0000024682 1 0 2014-08-07 00:13:28.534 -07:00 0000287396 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19370, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:54:18 10.34.84.145 Aug  7 00:13:30 stage-pdp01 CISE_Failed_Attempts 0000024683 1 0 2014-08-07 00:13:30.538 -07:00 0000287400 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19371, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:54:20 10.34.84.145 Aug  7 00:13:32 stage-pdp01 CISE_Failed_Attempts 0000024684 1 0 2014-08-07 00:13:32.538 -07:00 0000287404 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19372, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:54:22 10.34.84.145 Aug  7 00:13:34 stage-pdp01 CISE_Failed_Attempts 0000024685 1 0 2014-08-07 00:13:34.542 -07:00 0000287408 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19373, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:54:33 10.42.7.64 Aug  7 00:13:45 npf-sjca-pdp02 CISE_Failed_Attempts 0000371020 1 0 2014-08-07 00:13:45.628 -07:00 0011269631 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271044, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570153E32729, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:54:38 10.42.7.63 Aug  7 00:13:50 npf-sjca-pdp01 CISE_Failed_Attempts 0001970913 1 0 2014-08-07 00:13:50.668 -07:00 0098695334 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086486, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5A53E3272E, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:54:43 10.42.7.64 Aug  7 00:13:55 npf-sjca-pdp02 CISE_Failed_Attempts 0000371025 1 0 2014-08-07 00:13:55.694 -07:00 0011269740 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=7, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271048, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570253E32733, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:54:48 10.42.7.63 Aug  7 00:14:00 npf-sjca-pdp01 CISE_Failed_Attempts 0001970924 1 0 2014-08-07 00:14:00.705 -07:00 0098695591 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086505, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5B53E32738, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:55:53 10.42.7.64 Aug  7 00:15:05 npf-sjca-pdp02 CISE_Failed_Attempts 0000371036 1 0 2014-08-07 00:15:05.742 -07:00 0011270054 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=6, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271057, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570353E32779, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:55:58 10.42.7.63 Aug  7 00:15:10 npf-sjca-pdp01 CISE_Failed_Attempts 0001970997 1 0 2014-08-07 00:15:10.772 -07:00 0098698954 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086621, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5C53E3277E, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:57:03 10.42.7.64 Aug  7 00:16:15 npf-sjca-pdp02 CISE_Failed_Attempts 0000371051 1 0 2014-08-07 00:16:15.827 -07:00 0011270497 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=6, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271067, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570453E327BF, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:57:08 10.42.7.63 Aug  7 00:16:20 npf-sjca-pdp01 CISE_Failed_Attempts 0001971096 1 0 2014-08-07 00:16:20.857 -07:00 0098703837 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086806, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5D53E327C4, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:57:24 10.34.84.145 Aug  7 00:16:36 stage-pdp01 CISE_Failed_Attempts 0000024697 1 0 2014-08-07 00:16:36.602 -07:00 0000287553 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19384, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:57:26 10.34.84.145 Aug  7 00:16:38 stage-pdp01 CISE_Failed_Attempts 0000024698 1 0 2014-08-07 00:16:38.605 -07:00 0000287557 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19385, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:57:28 10.34.84.145 Aug  7 00:16:40 stage-pdp01 CISE_Failed_Attempts 0000024699 1 0 2014-08-07 00:16:40.609 -07:00 0000287561 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19386, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:57:30 10.34.84.145 Aug  7 00:16:42 stage-pdp01 CISE_Failed_Attempts 0000024700 1 0 2014-08-07 00:16:42.613 -07:00 0000287565 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19387, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:57:32 10.34.84.145 Aug  7 00:16:44 stage-pdp01 CISE_Failed_Attempts 0000024701 1 0 2014-08-07 00:16:44.613 -07:00 0000287569 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19388, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:57:34 10.34.84.145 Aug  7 00:16:46 stage-pdp01 CISE_Failed_Attempts 0000024702 1 0 2014-08-07 00:16:46.617 -07:00 0000287573 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19389, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:58:03 10.42.7.64 Aug  7 00:17:15 npf-sjca-pdp02 CISE_Failed_Attempts 0000371063 1 0 2014-08-07 00:17:15.966 -07:00 0011270832 5435 NOTICE RADIUS: NAS conducted several failed authentications of the same scenario, ConfigVersionId=240, Device IP Address=10.34.76.212, Device Port=32770, DestinationIPAddress=10.42.7.64, DestinationPort=1813, RadiusPacketType=AccountingRequest, UserName=hslai, Protocol=Radius, RequestLatency=25, NetworkDeviceName=sjcm-00a-npf-wlc1, User-Name=hslai, NAS-IP-Address=10.34.76.212, NAS-Port=1, Framed-IP-Address=10.34.94.11, Class=CACS:0a224cd40002fdf953e327f2:npf-sjca-pdp02/195481465/271072, Called-Station-ID=88-43-e1-62-1d-20, Calling-Station-ID=24-a2-e1-3b-4b-cb, NAS-Identifier=sjcm-00a-npf-wlc1, Acct-Status-Type=Interim-Update, Acct-Delay-Time=0, Acct-Input-Octets=5198, Acct-Output-Octets=4093, Acct-Session-Id=53e327f2/24:a2:e1:3b:4b:cb/174403, Acct-Authentic=RADIUS, Acct-Session-Time=9, Acct-Input-Packets=37, Acct-Output-Packets=13, undefined-52
 =
+Aug  6 16:58:13 10.42.7.64 Aug  7 00:17:25 npf-sjca-pdp02 CISE_Failed_Attempts 0000371065 1 0 2014-08-07 00:17:25.902 -07:00 0011270838 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=4, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271076, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570553E32805, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:58:18 10.42.7.63 Aug  7 00:17:30 npf-sjca-pdp01 CISE_Failed_Attempts 0001971204 1 0 2014-08-07 00:17:30.916 -07:00 0098707928 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086981, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5E53E3280A, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:59:23 10.42.7.64 Aug  7 00:18:35 npf-sjca-pdp02 CISE_Failed_Attempts 0000371070 1 0 2014-08-07 00:18:35.942 -07:00 0011271044 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271081, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570653E3284B, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:59:28 10.42.7.64 Aug  7 00:18:40 npf-sjca-pdp02 CISE_Failed_Attempts 0000371072 1 0 2014-08-07 00:18:40.669 -07:00 0011271053 5400 NOTICE Failed-Attempt: Authentication failed, ConfigVersionId=240, Device IP Address=10.56.129.4, Device Port=32770, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=istern, Protocol=Radius, RequestLatency=12, NetworkDeviceName=NTN-WLC1, User-Name=istern, NAS-IP-Address=10.56.129.4, NAS-Port=1, Service-Type=Framed, Framed-MTU=1300, State=37CPMSessionID=0a388104000045de53e2c750\;41SessionID=npf-sjca-pdp02/195481465/271077\;, Called-Station-ID=70-10-5c-f3-2f-80:alpha_example, Calling-Station-ID=f0-27-65-48-8c-8f, NAS-Identifier=ntn01-11a-wlc1, NAS-Port-Type=Wireless - IEEE 802.11, Tunnel-Type=(tag=0) VLAN, Tunnel-Medium-Type=(tag=0) 802, Tunnel-Private-Group-ID=(tag=0) 604, undefined-89=
+Aug  6 16:59:28 10.42.7.63 Aug  7 00:18:40 npf-sjca-pdp01 CISE_Failed_Attempts 0001971282 1 0 2014-08-07 00:18:40.981 -07:00 0098711291 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2087140, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5F53E32850, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 17:00:33 10.42.7.64 Aug  7 00:19:46 npf-sjca-pdp02 CISE_Failed_Attempts 0000371080 1 0 2014-08-07 00:19:46.020 -07:00 0011271232 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271087, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570753E32892, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 17:00:36 10.34.84.145 Aug  7 00:19:48 stage-pdp01 CISE_Failed_Attempts 0000024712 1 0 2014-08-07 00:19:48.660 -07:00 0000287604 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19396, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 17:00:38 10.34.84.145 Aug  7 00:19:50 stage-pdp01 CISE_Failed_Attempts 0000024713 1 0 2014-08-07 00:19:50.664 -07:00 0000287608 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19397, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 17:00:39 10.42.7.63 Aug  7 00:19:51 npf-sjca-pdp01 CISE_Failed_Attempts 0001971393 1 0 2014-08-07 00:19:51.042 -07:00 0098716185 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2087311, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D6053E32897, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 17:00:40 10.34.84.145 Aug  7 00:19:52 stage-pdp01 CISE_Failed_Attempts 0000024714 1 0 2014-08-07 00:19:52.664 -07:00 0000287612 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19398, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 17:00:42 10.34.84.145 Aug  7 00:19:54 stage-pdp01 CISE_Failed_Attempts 0000024715 1 0 2014-08-07 00:19:54.668 -07:00 0000287616 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19399, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 17:00:44 10.34.84.145 Aug  7 00:19:56 stage-pdp01 CISE_Failed_Attempts 0000024716 1 0 2014-08-07 00:19:56.672 -07:00 0000287620 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19400, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 17:00:46 10.34.84.145 Aug  7 00:19:58 stage-pdp01 CISE_Failed_Attempts 0000024717 1 0 2014-08-07 00:19:58.675 -07:00 0000287624 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19401, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 17:01:44 10.42.7.64 Aug  7 00:20:56 npf-sjca-pdp02 CISE_Failed_Attempts 0000371095 1 0 2014

<TRUNCATED>


[17/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/resources/patters/sourcefire
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/resources/patters/sourcefire b/opensoc-streaming/OpenSOC-MessageParsers/src/main/resources/patters/sourcefire
deleted file mode 100644
index 672f684..0000000
--- a/opensoc-streaming/OpenSOC-MessageParsers/src/main/resources/patters/sourcefire
+++ /dev/null
@@ -1,30 +0,0 @@
-POSINT \b(?:[1-9][0-9]*)\b
-NONNEGINT \b(?:[0-9]+)\b
-WORD \b\w+\b
-NOTSPACE \S+
-SPACE \s*
-DATA .*?
-GREEDYDATA .*
-QUOTEDSTRING (?>(?<!\\)(?>"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``))
-UUID [A-Fa-f0-9]{8}-(?:[A-Fa-f0-9]{4}-){3}[A-Fa-f0-9]{12}
-
-# Networking
-MAC (?:%{CISCOMAC}|%{WINDOWSMAC}|%{COMMONMAC})
-CISCOMAC (?:(?:[A-Fa-f0-9]{4}\.){2}[A-Fa-f0-9]{4})
-WINDOWSMAC (?:(?:[A-Fa-f0-9]{2}-){5}[A-Fa-f0-9]{2})
-COMMONMAC (?:(?:[A-Fa-f0-9]{2}:){5}[A-Fa-f0-9]{2})
-IPV6 ((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5
 ]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?
-IPV4 (?<![0-9])(?:(?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2}))(?![0-9])
-IP (?:%{IPV6}|%{IPV4})
-HOSTNAME \b(?:[0-9A-Za-z][0-9A-Za-z-]{0,62})(?:\.(?:[0-9A-Za-z][0-9A-Za-z-]{0,62}))*(\.?|\b)
-HOST %{HOSTNAME}
-IPORHOST (?:%{HOSTNAME}|%{IP})
-HOSTPORT %{IPORHOST}:%{POSINT}
-
-#Sourcefire Logs
-protocol \{[a-zA-Z0-9]+\}
-ip_src_addr (?<![0-9])(?:(?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2}))(?![0-9])
-ip_dst_addr (?<![0-9])(?:(?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2}))(?![0-9])
-ip_src_port [0-9]+
-ip_dst_port [0-9]+
-SOURCEFIRE %{GREEDYDATA}%{protocol}\s%{ip_src_addr}\:%{ip_src_port}\s->\s%{ip_dst_addr}\:%{ip_dst_port}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicBroParserTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicBroParserTest.java b/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicBroParserTest.java
new file mode 100644
index 0000000..e581299
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicBroParserTest.java
@@ -0,0 +1,103 @@
+package com.opensoc.parsing.test;
+
+import java.util.Map;
+
+import junit.framework.TestCase;
+
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+import com.opensoc.parsing.parsers.BasicBroParser;
+
+public class BasicBroParserTest extends TestCase {
+
+	/**
+	 * The parser.
+	 */
+	private BasicBroParser broParser = null;
+	private JSONParser jsonParser = null;
+
+	/**
+	 * Constructs a new <code>BasicBroParserTest</code> instance.
+	 * 
+	 * @throws Exception
+	 */
+	public BasicBroParserTest() throws Exception {
+		broParser = new BasicBroParser();
+		jsonParser = new JSONParser();		
+	}
+
+	@SuppressWarnings("rawtypes")
+	public void testHttpBroMessage() throws ParseException {
+		String rawMessage = "{\"http\":{\"ts\":1402307733473,\"uid\":\"CTo78A11g7CYbbOHvj\",\"id.orig_h\":\"192.249.113.37\",\"id.orig_p\":58808,\"id.resp_h\":\"72.163.4.161\",\"id.resp_p\":80,\"trans_depth\":1,\"method\":\"GET\",\"host\":\"www.cisco.com\",\"uri\":\"/\",\"user_agent\":\"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3\",\"request_body_len\":0,\"response_body_len\":25523,\"status_code\":200,\"status_msg\":\"OK\",\"tags\":[],\"resp_fuids\":[\"FJDyMC15lxUn5ngPfd\"],\"resp_mime_types\":[\"text/html\"]}}";
+		
+		Map rawMessageMap = (Map) jsonParser.parse(rawMessage);
+		JSONObject rawJson = (JSONObject) rawMessageMap.get(rawMessageMap.keySet().iterator().next());
+		
+		JSONObject broJson = broParser.parse(rawMessage.getBytes());
+		assertEquals(broJson.get("timestamp").toString(), rawJson.get("ts").toString());
+		assertEquals(broJson.get("ip_src_addr").toString(), rawJson.get("id.orig_h").toString());
+		assertEquals(broJson.get("ip_dst_addr").toString(), rawJson.get("id.resp_h").toString());
+		assertEquals(broJson.get("ip_src_port").toString(), rawJson.get("id.orig_p").toString());
+		assertEquals(broJson.get("ip_dst_port").toString(), rawJson.get("id.resp_p").toString());
+		assertTrue(broJson.get("original_string").toString().startsWith(rawMessageMap.keySet().iterator().next().toString().toUpperCase()));
+		
+		assertEquals(broJson.get("uid").toString(), rawJson.get("uid").toString());
+		assertEquals(broJson.get("method").toString(), rawJson.get("method").toString());
+		assertEquals(broJson.get("host").toString(), rawJson.get("host").toString());
+		assertEquals(broJson.get("resp_mime_types").toString(), rawJson.get("resp_mime_types").toString());
+	}
+	
+	@SuppressWarnings("rawtypes")
+	public void testDnsBroMessage() throws ParseException {
+		String rawMessage = "{\"dns\":{\"ts\":1402308259609,\"uid\":\"CuJT272SKaJSuqO0Ia\",\"id.orig_h\":\"10.122.196.204\",\"id.orig_p\":33976,\"id.resp_h\":\"144.254.71.184\",\"id.resp_p\":53,\"proto\":\"udp\",\"trans_id\":62418,\"query\":\"www.cisco.com\",\"qclass\":1,\"qclass_name\":\"C_INTERNET\",\"qtype\":28,\"qtype_name\":\"AAAA\",\"rcode\":0,\"rcode_name\":\"NOERROR\",\"AA\":true,\"TC\":false,\"RD\":true,\"RA\":true,\"Z\":0,\"answers\":[\"www.cisco.com.akadns.net\",\"origin-www.cisco.com\",\"2001:420:1201:2::a\"],\"TTLs\":[3600.0,289.0,14.0],\"rejected\":false}}";
+		
+		Map rawMessageMap = (Map) jsonParser.parse(rawMessage);
+		JSONObject rawJson = (JSONObject) rawMessageMap.get(rawMessageMap.keySet().iterator().next());
+		
+		JSONObject broJson = broParser.parse(rawMessage.getBytes());
+		assertEquals(broJson.get("timestamp").toString(), rawJson.get("ts").toString());
+		assertEquals(broJson.get("ip_src_addr").toString(), rawJson.get("id.orig_h").toString());
+		assertEquals(broJson.get("ip_dst_addr").toString(), rawJson.get("id.resp_h").toString());
+		assertEquals(broJson.get("ip_src_port").toString(), rawJson.get("id.orig_p").toString());
+		assertEquals(broJson.get("ip_dst_port").toString(), rawJson.get("id.resp_p").toString());
+		assertTrue(broJson.get("original_string").toString().startsWith(rawMessageMap.keySet().iterator().next().toString().toUpperCase()));
+		
+		assertEquals(broJson.get("qtype").toString(), rawJson.get("qtype").toString());
+		assertEquals(broJson.get("trans_id").toString(), rawJson.get("trans_id").toString());
+	}
+	
+	@SuppressWarnings("rawtypes")
+	public void testFilesBroMessage() throws ParseException {
+		String rawMessage = "{\"files\":{\"analyzers\": [\"X509\",\"MD5\",\"SHA1\"],\"conn_uids\":[\"C4tygJ3qxJBEJEBCeh\"],\"depth\": 0,\"duration\": 0.0,\"fuid\":\"FZEBC33VySG0nHSoO9\",\"is_orig\": false,\"local_orig\": false,\"md5\": \"eba37166385e3ef42464ed9752e99f1b\",\"missing_bytes\": 0,\"overflow_bytes\": 0,\"rx_hosts\": [\"10.220.15.205\"],\"seen_bytes\": 1136,\"sha1\": \"73e42686657aece354fbf685712361658f2f4357\",\"source\": \"SSL\",\"timedout\": false,\"ts\": \"1425845251334\",\"tx_hosts\": [\"68.171.237.7\"]}}";
+		
+		Map rawMessageMap = (Map) jsonParser.parse(rawMessage);
+		JSONObject rawJson = (JSONObject) rawMessageMap.get(rawMessageMap.keySet().iterator().next());
+		
+		JSONObject broJson = broParser.parse(rawMessage.getBytes());
+		assertEquals(broJson.get("timestamp").toString(), rawJson.get("ts").toString());
+		assertEquals(broJson.get("ip_src_addr").toString(), ((JSONArray)rawJson.get("tx_hosts")).get(0).toString());
+		assertEquals(broJson.get("ip_dst_addr").toString(), ((JSONArray)rawJson.get("rx_hosts")).get(0).toString());
+		assertTrue(broJson.get("original_string").toString().startsWith(rawMessageMap.keySet().iterator().next().toString().toUpperCase()));
+		
+		assertEquals(broJson.get("fuid").toString(), rawJson.get("fuid").toString());
+		assertEquals(broJson.get("md5").toString(), rawJson.get("md5").toString());
+		assertEquals(broJson.get("analyzers").toString(), rawJson.get("analyzers").toString());
+	}
+	
+	@SuppressWarnings("rawtypes")
+	public void testProtocolKeyCleanedUp() throws ParseException {
+		String rawMessage = "{\"ht*tp\":{\"ts\":1402307733473,\"uid\":\"CTo78A11g7CYbbOHvj\",\"id.orig_h\":\"192.249.113.37\",\"id.orig_p\":58808,\"id.resp_h\":\"72.163.4.161\",\"id.resp_p\":80,\"trans_depth\":1,\"method\":\"GET\",\"host\":\"www.cisco.com\",\"uri\":\"/\",\"user_agent\":\"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3\",\"request_body_len\":0,\"response_body_len\":25523,\"status_code\":200,\"status_msg\":\"OK\",\"tags\":[],\"resp_fuids\":[\"FJDyMC15lxUn5ngPfd\"],\"resp_mime_types\":[\"text/html\"]}}";
+		
+		Map rawMessageMap = (Map) jsonParser.parse(rawMessage);
+		JSONObject rawJson = (JSONObject) rawMessageMap.get(rawMessageMap.keySet().iterator().next());
+		
+		JSONObject broJson = broParser.parse(rawMessage.getBytes());
+		
+		assertEquals(broJson.get("timestamp").toString(), rawJson.get("ts").toString());
+		assertEquals(broJson.get("ip_src_addr").toString(), rawJson.get("id.orig_h").toString());
+		assertTrue(broJson.get("original_string").toString().startsWith("HTTP"));
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicFireEyeParserTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicFireEyeParserTest.java b/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicFireEyeParserTest.java
new file mode 100644
index 0000000..463890b
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicFireEyeParserTest.java
@@ -0,0 +1,141 @@
+/**
+ * 
+ */
+package com.opensoc.parsing.test;
+
+
+
+import java.util.Iterator;
+import java.util.Map;
+
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+import com.opensoc.parsing.parsers.BasicFireEyeParser;
+import com.opensoc.test.AbstractConfigTest;
+
+/**
+ * <ul>
+ * <li>Title: Test For SourceFireParser</li>
+ * <li>Description: </li>
+ * <li>Created: July 8, 2014</li>
+ * </ul>
+ * @version $Revision: 1.0 $
+ */
+public class BasicFireEyeParserTest extends AbstractConfigTest
+{
+   /**
+    * The inputStrings.
+    */
+    private static String[] inputStrings;
+ 
+   /**
+    * The parser.
+    */
+    private BasicFireEyeParser parser=null;
+
+	
+   /**
+    * Constructs a new <code>BasicFireEyeParserTest</code> instance.
+    * @throws Exception
+    */ 
+    public BasicFireEyeParserTest() throws Exception {
+        super();
+    }
+
+
+	/**
+	 * @throws java.lang.Exception
+	 */
+	public static void setUpBeforeClass() throws Exception {
+	}
+
+	/**
+	 * @throws java.lang.Exception
+	 */
+	public static void tearDownAfterClass() throws Exception {
+	}
+
+	/**
+	 * @throws java.lang.Exception
+	 */
+	public void setUp() throws Exception {
+        super.setUp("com.opensoc.parsing.test.BasicFireEyeParserTest");
+        setInputStrings(super.readTestDataFromFile(this.getConfig().getString("logFile")));
+        parser = new BasicFireEyeParser();  
+	}
+
+	/**
+	 * 	
+	 * 	
+	 * @throws java.lang.Exception
+	 */
+	public void tearDown() throws Exception {
+		parser = null;
+        setInputStrings(null);		
+	}
+
+	/**
+	 * Test method for {@link com.opensoc.parsing.parsers.BasicFireEyeParser#parse(java.lang.String)}.
+	 */
+	@SuppressWarnings({ "rawtypes"})
+	public void testParse() {
+		for (String inputString : getInputStrings()) {
+			JSONObject parsed = parser.parse(inputString.getBytes());
+			assertNotNull(parsed);
+		
+			JSONParser parser = new JSONParser();
+
+			Map json=null;
+			try {
+				json = (Map) parser.parse(parsed.toJSONString());
+			} catch (ParseException e) {
+				e.printStackTrace();
+			}
+			Iterator iter = json.entrySet().iterator();
+			
+			assertNotNull(json);
+			assertFalse(json.isEmpty());
+			
+
+			while (iter.hasNext()) {
+				Map.Entry entry = (Map.Entry) iter.next();
+				String key = (String) entry.getKey();
+				String value = (String) json.get(key).toString();
+				assertNotNull(value);
+			}
+		}
+	}
+
+	/**
+	 * Returns Input String
+	 */
+	public static String[] getInputStrings() {
+		return inputStrings;
+	}
+		
+	/**
+	 * Sets SourceFire Input String
+	 */	
+	public static void setInputStrings(String[] strings) {
+		BasicFireEyeParserTest.inputStrings = strings;
+	}
+	
+    /**
+     * Returns the parser.
+     * @return the parser.
+     */
+    public BasicFireEyeParser getParser() {
+        return parser;
+    }
+
+    /**
+     * Sets the parser.
+     * @param parser the parser.
+     */
+     public void setParser(BasicFireEyeParser parser) {
+    
+        this.parser = parser;
+     }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicIseParserTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicIseParserTest.java b/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicIseParserTest.java
index 5d58cc5..1a872c2 100644
--- a/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicIseParserTest.java
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicIseParserTest.java
@@ -16,22 +16,17 @@
  */
 package com.opensoc.parsing.test;
 
-import java.io.BufferedReader;
-import java.io.FileReader;
 import java.io.IOException;
 import java.net.URL;
-
-import junit.framework.TestCase;
+import java.util.Map;
 
 import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
 import org.json.simple.parser.ParseException;
 
-import com.fasterxml.jackson.databind.JsonNode;
-import com.github.fge.jackson.JsonLoader;
-import com.github.fge.jsonschema.core.report.ProcessingReport;
-import com.github.fge.jsonschema.main.JsonSchemaFactory;
-import com.github.fge.jsonschema.main.JsonValidator;
 import com.opensoc.parsing.parsers.BasicIseParser;
+import com.opensoc.test.AbstractSchemaTest;
+
 
 /**
  * <ul>
@@ -42,11 +37,18 @@ import com.opensoc.parsing.parsers.BasicIseParser;
  * 
  * @version $Revision: 1.1 $
  */
-public class BasicIseParserTest extends TestCase {
-	private static String rawMessage = "";
 
-	private static BasicIseParser iseParser = null;
-	private static String schema_string;
+public class BasicIseParserTest extends AbstractSchemaTest {
+    /**
+     * The inputStrings.
+     */
+     private static String[] inputStrings;   
+
+	 /**
+	 * The parser.
+	 */
+	private static BasicIseParser parser = null;
+
 
 	/**
 	 * Constructs a new <code>BasicIseParserTest</code> instance.
@@ -63,8 +65,6 @@ public class BasicIseParserTest extends TestCase {
 	 * @throws java.lang.Exception
 	 */
 	protected static void setUpBeforeClass() throws Exception {
-		setRawMessage("Aug  6 17:26:31 10.34.84.145 Aug  7 00:45:43 stage-pdp01 CISE_Profiler 0000024855 1 0 2014-08-07 00:45:43.741 -07:00 0000288542 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=113, EndpointCertainityMetric=10, EndpointIPAddress=10.56.111.14, EndpointMacAddress=3C:97:0E:C3:F8:F1, EndpointMatchedPolicy=Nortel-Device, EndpointNADAddress=10.56.72.127, EndpointOUI=Wistron InfoComm(Kunshan)Co.\\,Ltd., EndpointPolicy=Nortel-Device, EndpointProperty=StaticAssignment=false\\,PostureApplicable=Yes\\,PolicyVersion=402\\,IdentityGroupID=0c1d9270-68a6-11e1-bc72-0050568e013c\\,Total Certainty Factor=10\\,BYODRegistration=Unknown\\,FeedService=false\\,EndPointPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\\,FirstCollection=1407397543718\\,MatchedPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\\,TimeToProfile=19\\,StaticGroupAssignment=false\\,NmapSubnetScanID=0\\,DeviceRegistrationStatus=NotRegistered\\,PortalUser=, EndpointSourceEvent=SNMPQuery Prob
 e, EndpointIdentityGroup=Profiled, ProfilerServer=stage-pdp01.cisco.com,");
-
 	}
 
 	/**
@@ -72,7 +72,6 @@ public class BasicIseParserTest extends TestCase {
 	 * @throws java.lang.Exception
 	 */
 	protected static void tearDownAfterClass() throws Exception {
-		setRawMessage("");
 	}
 
 	/*
@@ -82,14 +81,13 @@ public class BasicIseParserTest extends TestCase {
 	 */
 
 	protected void setUp() throws Exception {
-		super.setUp();
-		assertNotNull(getRawMessage());
-		BasicIseParserTest.setIseParser(new BasicIseParser());
+        super.setUp("com.opensoc.parsing.test.BasicLancopeParserTest");
+        setInputStrings(super.readTestDataFromFile(this.getConfig().getString("logFile")));
+        BasicIseParserTest.setIseParser(new BasicIseParser());
 		
 		URL schema_url = getClass().getClassLoader().getResource(
 				"TestSchemas/IseSchema.json");
-		
-		 schema_string = readSchemaFromFile(schema_url);
+		 super.setSchemaJsonString(super.readSchemaFromFile(schema_url));
 	}
 
 	/*
@@ -110,44 +108,21 @@ public class BasicIseParserTest extends TestCase {
 	 * @throws Exception
 	 */
 	public void testParse() throws ParseException, IOException, Exception {
-		// JSONObject parsed = iseParser.parse(getRawMessage().getBytes());
-		// assertNotNull(parsed);
-
-		URL log_url = getClass().getClassLoader().getResource("IseSample.log");
-
-		BufferedReader br = new BufferedReader(new FileReader(log_url.getFile()));
-		String line = "";
-		while ((line = br.readLine()) != null) {
-			System.out.println(line);
-			JSONObject parsed = iseParser.parse(line.getBytes());
-			System.out.println(parsed);
-			assertEquals(true, validateJsonData(schema_string, parsed.toString()));
-
-		}
-		br.close();
-
-	}
-
-	/**
-	 * Returns the rawMessage.
-	 * 
-	 * @return the rawMessage.
-	 */
-
-	public static String getRawMessage() {
-		return rawMessage;
-	}
-
-	/**
-	 * Sets the rawMessage.
-	 * 
-	 * @param rawMessage
-	 *            the rawMessage.
-	 */
-
-	public static void setRawMessage(String rawMessage) {
-
-		BasicIseParserTest.rawMessage = rawMessage;
+        for (String inputString : getInputStrings()) {
+            JSONObject parsed = parser.parse(inputString.getBytes());
+            assertNotNull(parsed);
+        
+            System.out.println(parsed);
+            JSONParser parser = new JSONParser();
+
+            Map<?, ?> json=null;
+            try {
+                json = (Map<?, ?>) parser.parse(parsed.toJSONString());
+                assertEquals(true, validateJsonData(super.getSchemaJsonString(), json.toString()));
+            } catch (ParseException e) {
+                e.printStackTrace();
+            }
+        }
 	}
 
 	/**
@@ -157,54 +132,38 @@ public class BasicIseParserTest extends TestCase {
 	 */
 
 	public BasicIseParser getIseParser() {
-		return iseParser;
+		return parser;
 	}
 
 	/**
 	 * Sets the iseParser.
 	 * 
 	 * @param iseParser
-	 *            the iseParser.
 	 */
 
-	public static void setIseParser(BasicIseParser iseParser) {
-
-		BasicIseParserTest.iseParser = iseParser;
-	}
-
-	private boolean validateJsonData(final String jsonSchema, final String jsonData)
-			throws Exception {
-
-		final JsonNode d = JsonLoader.fromString(jsonData);
-		final JsonNode s = JsonLoader.fromString(jsonSchema);
-
-		final JsonSchemaFactory factory = JsonSchemaFactory.byDefault();
-		JsonValidator v = factory.getValidator();
 
-		ProcessingReport report = v.validate(s, d);
-		System.out.println(report);
-		
-		return report.toString().contains("success");
+	public static void setIseParser(BasicIseParser parser) {
 
+		BasicIseParserTest.parser = parser;
 	}
+   /**
+    * Returns the inputStrings.
+    * @return the inputStrings.
+    */
+   
+   public static String[] getInputStrings() {
+       return inputStrings;
+   }
+
+   /**
+    * Sets the inputStrings.
+    * @param inputStrings the inputStrings.
+    */
+   
+   public static void setInputStrings(String[] inputStrings) {
+       BasicIseParserTest.inputStrings = inputStrings;
+   }   
 
-	private String readSchemaFromFile(URL schema_url) throws Exception {
-		BufferedReader br = new BufferedReader(new FileReader(
-				schema_url.getFile()));
-		String line;
-		StringBuilder sb = new StringBuilder();
-		while ((line = br.readLine()) != null) {
-			System.out.println(line);
-			sb.append(line);
-		}
-		br.close();
-
-		String schema_string = sb.toString().replaceAll("\n", "");
-		schema_string = schema_string.replaceAll(" ", "");
 
-		System.out.println("Read in schema: " + schema_string);
 
-		return schema_string;
-
-	}
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicLancopeParserTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicLancopeParserTest.java b/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicLancopeParserTest.java
index cfdf6e1..126b6be 100644
--- a/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicLancopeParserTest.java
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicLancopeParserTest.java
@@ -16,16 +16,16 @@
  */
 package com.opensoc.parsing.test;
 
-import java.util.Iterator;
+import java.io.IOException;
+import java.net.URL;
 import java.util.Map;
 
-import junit.framework.TestCase;
-
 import org.json.simple.JSONObject;
 import org.json.simple.parser.JSONParser;
 import org.json.simple.parser.ParseException;
 
 import com.opensoc.parsing.parsers.BasicLancopeParser;
+import com.opensoc.test.AbstractSchemaTest;
 
  /**
  * <ul>
@@ -35,10 +35,18 @@ import com.opensoc.parsing.parsers.BasicLancopeParser;
  * </ul>
  * @version $Revision: 1.1 $
  */
-public class BasicLancopeParserTest extends TestCase {
+public class BasicLancopeParserTest extends AbstractSchemaTest {
+    
+    /**
+     * The inputStrings.
+     */
+     private static String[] inputStrings;    
 
-    private  static String rawMessage = "";
-    private static BasicLancopeParser lancopeParser=null;   
+
+    /**
+     * The parser.
+     */
+    private static BasicLancopeParser parser=null;   
 
     /**
      * Constructs a new <code>BasicLancopeParserTest</code> instance.
@@ -53,8 +61,7 @@ public class BasicLancopeParserTest extends TestCase {
      
      * @throws java.lang.Exception
      */
-    protected static void setUpBeforeClass() throws Exception {
-        setRawMessage("{\"message\":\"<131>Jul 17 15:59:01 smc-01 StealthWatch[12365]: 2014-07-17T15:58:30Z 10.40.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.55M points. Policy maximum allows up to 20M points.\",\"@version\":\"1\",\"@timestamp\":\"2014-07-17T15:56:05.992Z\",\"type\":\"syslog\",\"host\":\"10.122.196.201\"}");        
+    protected static void setUpBeforeClass() throws Exception {        
     }
 
     /**
@@ -70,10 +77,13 @@ public class BasicLancopeParserTest extends TestCase {
      */
 
     protected void setUp() throws Exception {
-        super.setUp();
-        setRawMessage("{\"message\":\"<131>Jul 17 15:59:01 smc-01 StealthWatch[12365]: 2014-07-17T15:58:30Z 10.40.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.55M points. Policy maximum allows up to 20M points.\",\"@version\":\"1\",\"@timestamp\":\"2014-07-17T15:56:05.992Z\",\"type\":\"syslog\",\"host\":\"10.122.196.201\"}");        
-        assertNotNull(getRawMessage());
-        BasicLancopeParserTest.setLancopeParser(new BasicLancopeParser());        
+        super.setUp("com.opensoc.parsing.test.BasicLancopeParserTest");
+        setInputStrings(super.readTestDataFromFile(this.getConfig().getString("logFile")));
+        BasicLancopeParserTest.setParser(new BasicLancopeParser());   
+        
+        URL schema_url = getClass().getClassLoader().getResource(
+            "TestSchemas/LancopeSchema.json");
+        super.setSchemaJsonString(super.readSchemaFromFile(schema_url));      
     }
 
     /* 
@@ -87,70 +97,64 @@ public class BasicLancopeParserTest extends TestCase {
 
     /**
      * Test method for {@link com.opensoc.parsing.parsers.BasicLancopeParser#parse(byte[])}.
+     * @throws Exception 
+     * @throws IOException 
      */
-    public void testParse() {
-        byte messages[] = getRawMessage().getBytes();
-        assertNotNull(messages);        
-        JSONObject parsed = lancopeParser.parse(getRawMessage().getBytes());
-        assertNotNull(parsed);
+    public void testParse() throws IOException, Exception {
         
-        System.out.println(parsed);
-        JSONParser parser = new JSONParser();
+        for (String inputString : getInputStrings()) {
+            JSONObject parsed = parser.parse(inputString.getBytes());
+            assertNotNull(parsed);
         
-        Map json=null;
-        try {
-            json = (Map) parser.parse(parsed.toJSONString());
-        } catch (ParseException e) {
-            e.printStackTrace();
+            System.out.println(parsed);
+            JSONParser parser = new JSONParser();
+
+            Map<?, ?> json=null;
+            try {
+                json = (Map<?, ?>) parser.parse(parsed.toJSONString());
+                assertEquals(true, validateJsonData(super.getSchemaJsonString(), json.toString()));
+            } catch (ParseException e) {
+                e.printStackTrace();
+            }
         }
-        Iterator iter = json.entrySet().iterator();
-            
-
-        while (iter.hasNext()) {
-            Map.Entry entry = (Map.Entry) iter.next();
-            String key = (String) entry.getKey();
-            assertNotNull((String) json.get("original_string").toString());
-            
-            assertNotNull((String)json.get("ip_src_addr").toString());
-            assertNotNull((String)json.get("ip_dst_addr").toString());            
-        }        
-    }
-    
-    /**
-     * Returns the rawMessage.
-     * @return the rawMessage.
-     */
-    
-    public static String getRawMessage() {
-        return BasicLancopeParserTest.rawMessage;
     }
 
     /**
-     * Sets the rawMessage.
-     * @param rawMessage the rawMessage.
-     */
-    
-    public static void setRawMessage(String rawMessage) {
-    
-        BasicLancopeParserTest.rawMessage = rawMessage;
-    }
-    /**
-     * Returns the lancopeParser.
-     * @return the lancopeParser.
-     */
-    
-    public static BasicLancopeParser getLancopeParser() {
-        return lancopeParser;
-    }
-
-    /**
-     * Sets the lancopeParser.
-     * @param lancopeParser the lancopeParser.
-     */
-    
-    public static void setLancopeParser(BasicLancopeParser lancopeParser) {
-    
-        BasicLancopeParserTest.lancopeParser = lancopeParser;
-    }
+    * Returns the parser.
+    * @return the parser.
+    */
+   
+   public static BasicLancopeParser getParser() {
+       return parser;
+   }
+
+   /**
+    * Sets the parser.
+    * @param parser the parser.
+    */
+   
+   public static void setParser(BasicLancopeParser parser) {
+   
+       BasicLancopeParserTest.parser = parser;
+   }
+
+   /**
+    * Returns the inputStrings.
+    * @return the inputStrings.
+    */
+   
+   public static String[] getInputStrings() {
+       return inputStrings;
+   }
+
+   /**
+    * Sets the inputStrings.
+    * @param inputStrings the inputStrings.
+    */
+   
+   public static void setInputStrings(String[] inputStrings) {
+   
+       BasicLancopeParserTest.inputStrings = inputStrings;
+   }   
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicPaloAltoFirewallParserTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicPaloAltoFirewallParserTest.java b/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicPaloAltoFirewallParserTest.java
new file mode 100644
index 0000000..23203b0
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicPaloAltoFirewallParserTest.java
@@ -0,0 +1,136 @@
+package com.opensoc.parsing.test;
+
+import java.util.Iterator;
+import java.util.Map;
+
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+import com.opensoc.parsing.parsers.BasicPaloAltoFirewallParser;
+import com.opensoc.test.AbstractConfigTest;
+
+public class BasicPaloAltoFirewallParserTest extends AbstractConfigTest {
+    /**
+    * The inputStrings.
+    */
+   private static String[] inputStrings;
+
+    /**
+     * Constructs a new <code>BasicPaloAltoFirewallParserTest</code> instance.
+     * @throws Exception
+     */ 
+    public BasicPaloAltoFirewallParserTest() throws Exception {
+        super();        
+    }
+
+     /**
+     * Sets the inputStrings.
+     * @param inputStrings the inputStrings.
+     */
+        
+    public static void setInputStrings(String[] inputStrings) {
+    
+        BasicPaloAltoFirewallParserTest.inputStrings = inputStrings;
+    }
+
+     /**
+     * The paParser.
+     */
+    private BasicPaloAltoFirewallParser paParser=null;
+
+		/**
+		 * @throws java.lang.Exception
+		 */
+		public static void setUpBeforeClass() throws Exception {
+		}
+
+		/**
+		 * @throws java.lang.Exception
+		 */
+		public static void tearDownAfterClass() throws Exception {
+			setPAStrings(null);
+		}
+
+		/**
+		 * @throws java.lang.Exception
+		 */
+		public void setUp() throws Exception {
+	          super.setUp("com.opensoc.parsing.test.BasicPaloAltoFirewallParserTest");
+	          setPAStrings(super.readTestDataFromFile(this.getConfig().getString("logFile")));
+	          paParser = new BasicPaloAltoFirewallParser();           
+		}
+
+		/**
+		 * 	
+		 * 	
+		 * @throws java.lang.Exception
+		 */
+		public void tearDown() throws Exception {
+			paParser = null;
+		}
+
+		/**
+		 * Test method for {@link com.opensoc.parsing.parsers.BasicSourcefireParser#parse(java.lang.String)}.
+		 */
+		@SuppressWarnings({ "rawtypes" })
+		public void testParse() {
+			for (String inputString : getInputStrings()) {
+				JSONObject parsed = paParser.parse(inputString.getBytes());
+				assertNotNull(parsed);
+			
+				System.out.println(parsed);
+				JSONParser parser = new JSONParser();
+
+				Map json=null;
+				try {
+					json = (Map) parser.parse(parsed.toJSONString());
+				} catch (ParseException e) {
+					e.printStackTrace();
+				}
+				Iterator iter = json.entrySet().iterator();
+				
+
+				while (iter.hasNext()) {
+					Map.Entry entry = (Map.Entry) iter.next();
+					String key = (String) entry.getKey();
+					String value = (String) json.get(key).toString();
+					assertNotNull(value);
+				}
+			}
+		}
+
+		/**
+		 * Returns  Input String
+		 */
+		public static String[] getInputStrings() {
+			return inputStrings;
+		}
+
+			
+		/**
+		 * Sets  Input String
+		 */	
+		public static void setPAStrings(String[] strings) {
+			BasicPaloAltoFirewallParserTest.inputStrings = strings;
+		}
+        
+        /**
+         * Returns the paParser.
+         * @return the paParser.
+         */
+        public BasicPaloAltoFirewallParser getPaParser() {
+            return paParser;
+        }
+
+        /**
+         * Sets the paParser.
+         * @param paParser the paParser.
+         */
+        
+        public void setPaParser(BasicPaloAltoFirewallParser paParser) {
+        
+            this.paParser = paParser;
+        }
+
+	}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicSourcefireParserTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicSourcefireParserTest.java b/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicSourcefireParserTest.java
index 4faeb39..15c90e2 100644
--- a/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicSourcefireParserTest.java
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BasicSourcefireParserTest.java
@@ -8,13 +8,12 @@ package com.opensoc.parsing.test;
 import java.util.Iterator;
 import java.util.Map;
 
-import junit.framework.TestCase;
-
 import org.json.simple.JSONObject;
 import org.json.simple.parser.JSONParser;
 import org.json.simple.parser.ParseException;
 
 import com.opensoc.parsing.parsers.BasicSourcefireParser;
+import com.opensoc.test.AbstractConfigTest;
 
 /**
  * <ul>
@@ -24,14 +23,28 @@ import com.opensoc.parsing.parsers.BasicSourcefireParser;
  * </ul>
  * @version $Revision: 1.0 $
  */
-public class BasicSourcefireParserTest extends TestCase
-	{
-
-	private  static String sourceFireString = "";
-	private BasicSourcefireParser sourceFireParser=null;
-
-
-
+public class BasicSourcefireParserTest extends AbstractConfigTest
+{
+     /**
+     * The sourceFireStrings.
+     */    
+    private static String[] sourceFireStrings;
+    
+     /**
+     * The sourceFireParser.
+     */
+    private BasicSourcefireParser sourceFireParser=null;
+
+
+    /**
+     * Constructs a new <code>BasicSourcefireParserTest</code> instance.
+     * @throws Exception
+     */
+     
+    public BasicSourcefireParserTest() throws Exception {
+        super();  
+    }
+    
 	/**
 	 * @throws java.lang.Exception
 	 */
@@ -42,15 +55,16 @@ public class BasicSourcefireParserTest extends TestCase
 	 * @throws java.lang.Exception
 	 */
 	public static void tearDownAfterClass() throws Exception {
-		setSourceFireString("");
+		setSourceFireStrings(null);
 	}
 
 	/**
 	 * @throws java.lang.Exception
 	 */
 	public void setUp() throws Exception {
-		setSourceFireString("SFIMS: [Primary Detection Engine (a7213248-6423-11e3-8537-fac6a92b7d9d)][MTD Access Control] Connection Type: Start, User: Unknown, Client: Unknown, Application Protocol: Unknown, Web App: Unknown, Firewall Rule Name: MTD Access Control, Firewall Rule Action: Allow, Firewall Rule Reasons: Unknown, URL Category: Unknown, URL_Reputation: Risk unknown, URL: Unknown, Interface Ingress: s1p1, Interface Egress: N/A, Security Zone Ingress: Unknown, Security Zone Egress: N/A, Security Intelligence Matching IP: None, Security Intelligence Category: None, {TCP} 72.163.0.129:60517 -> 10.1.128.236:443");		assertNotNull(getSourceFireString());
-		sourceFireParser = new BasicSourcefireParser();		
+        super.setUp("com.opensoc.parsing.test.BasicSoureceFireParserTest");
+        setSourceFireStrings(super.readTestDataFromFile(this.getConfig().getString("logFile")));
+        sourceFireParser = new BasicSourcefireParser();
 	}
 
 	/**
@@ -67,41 +81,62 @@ public class BasicSourcefireParserTest extends TestCase
 	 */
 	@SuppressWarnings({ "rawtypes", "unused" })
 	public void testParse() {
-		JSONObject parsed = sourceFireParser.parse(getSourceFireString().getBytes());
-		assertNotNull(parsed);
+		for (String sourceFireString : getSourceFireStrings()) {
+		    byte[] srcBytes = sourceFireString.getBytes();
+			JSONObject parsed = sourceFireParser.parse(sourceFireString.getBytes());
+			assertNotNull(parsed);
 		
-		System.out.println(parsed);
-		JSONParser parser = new JSONParser();
-
-		Map json=null;
-		try {
-			json = (Map) parser.parse(parsed.toJSONString());
-		} catch (ParseException e) {
-			e.printStackTrace();
-		}
-		Iterator iter = json.entrySet().iterator();
+			System.out.println(parsed);
+			JSONParser parser = new JSONParser();
+
+			Map json=null;
+			try {
+				json = (Map) parser.parse(parsed.toJSONString());
+			} catch (ParseException e) {
+				e.printStackTrace();
+			}
+			Iterator iter = json.entrySet().iterator();
 			
 
-		while (iter.hasNext()) {
-			Map.Entry entry = (Map.Entry) iter.next();
-			String key = (String) entry.getKey();
-            String value = (String) json.get("original_string").toString();
-			assertNotNull(value);
+			while (iter.hasNext()) {
+				Map.Entry entry = (Map.Entry) iter.next();
+				String key = (String) entry.getKey();
+				String value = (String) json.get("original_string").toString();
+				assertNotNull(value);
+			}
 		}
 	}
 
 	/**
 	 * Returns SourceFire Input String
 	 */
-	public static String getSourceFireString() {
-		return sourceFireString;
+	public static String[] getSourceFireStrings() {
+		return sourceFireStrings;
 	}
 
 		
 	/**
 	 * Sets SourceFire Input String
 	 */	
-	public static void setSourceFireString(String sourceFireString) {
-		BasicSourcefireParserTest.sourceFireString = sourceFireString;
+	public static void setSourceFireStrings(String[] strings) {
+		BasicSourcefireParserTest.sourceFireStrings = strings;
 	}
+    /**
+    * Returns the sourceFireParser.
+    * @return the sourceFireParser.
+    */
+   
+   public BasicSourcefireParser getSourceFireParser() {
+       return sourceFireParser;
+   }
+
+   /**
+    * Sets the sourceFireParser.
+    * @param sourceFireParser the sourceFireParser.
+    */
+   
+   public void setSourceFireParser(BasicSourcefireParser sourceFireParser) {
+   
+       this.sourceFireParser = sourceFireParser;
+   }	
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BroParserTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BroParserTest.java b/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BroParserTest.java
index 6742011..6c800d1 100644
--- a/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BroParserTest.java
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/BroParserTest.java
@@ -11,9 +11,8 @@ import org.json.simple.JSONObject;
 import org.json.simple.parser.JSONParser;
 import org.json.simple.parser.ParseException;
 
-import junit.framework.TestCase;
-
 import com.opensoc.parsing.parsers.BasicBroParser;
+import com.opensoc.test.AbstractConfigTest;
 
 /**
  * <ul>
@@ -23,15 +22,34 @@ import com.opensoc.parsing.parsers.BasicBroParser;
  * </ul>
  * @version $Revision: 1.0 $
  */
-public class BroParserTest extends TestCase {
+
+ /**
+ * <ul>
+ * <li>Title: </li>
+ * <li>Description: </li>
+ * <li>Created: Feb 20, 2015 </li>
+ * </ul>
+ * @author $Author: $
+ * @version $Revision: 1.1 $
+ */
+public class BroParserTest extends AbstractConfigTest {
+	
 	
-	private static String broJsonString="";
-	private static BasicBroParser broParser=null;
+	/**
+	 * The inputStrings.
+	 */
+	private static String[] inputStrings;
+
+     /**
+     * The parser.
+     */
+    private BasicBroParser parser=null;
 	
     /**
      * Constructs a new <code>BroParserTest</code> instance.
+     * @throws Exception 
      */
-    public BroParserTest() {
+    public BroParserTest() throws Exception {
         super();
     }	
 
@@ -40,23 +58,21 @@ public class BroParserTest extends TestCase {
 	 * @throws java.lang.Exception
 	 */
 	public static void setUpBeforeClass() throws Exception {
-		
 	}
 
 	/**
 	 * @throws java.lang.Exception
 	 */
 	public static void tearDownAfterClass() throws Exception {
-		setBroJsonString("");
 	}
 
 	/**
 	 * @throws java.lang.Exception
 	 */
 	public void setUp() throws Exception {
-	    setBroJsonString("{\"http\":{\"ts\":1402307733473,\"uid\":\"CTo78A11g7CYbbOHvj\",\"id.orig_h\":\"192.249.113.37\",\"id.orig_p\":58808,\"id.resp_h\":\"72.163.4.161\",\"id.resp_p\":80,\"trans_depth\":1,\"method\":\"GET\",\"host\":\"www.cisco.com\",\"uri\":\"/\",\"user_agent\":\"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3\",\"request_body_len\":0,\"response_body_len\":25523,\"status_code\":200,\"status_msg\":\"OK\",\"tags\":[],\"resp_fuids\":[\"FJDyMC15lxUn5ngPfd\"],\"resp_mime_types\":[\"text/html\"]}}");	    
-		assertNotNull(getBroJsonString());
-		BroParserTest.setBroParser(new BasicBroParser());		
+        super.setUp("com.opensoc.parsing.test.BroParserTest");
+        setInputStrings(super.readTestDataFromFile(this.getConfig().getString("logFile")));
+        parser = new BasicBroParser();  
 	}
 	
 	/**
@@ -67,56 +83,64 @@ public class BroParserTest extends TestCase {
 	@SuppressWarnings({ "unused", "rawtypes" })
 	public void testParse() throws ParseException {
 
+		for (String inputString : getInputStrings()) {
+			JSONObject cleanJson = parser.parse(inputString.getBytes());
+			assertNotNull(cleanJson);
+			System.out.println(cleanJson);
 
-		BasicBroParser broparser = new BasicBroParser();
-		assertNotNull(getBroJsonString());
-		JSONObject cleanJson = broparser.parse(getBroJsonString().getBytes());
-        assertNotNull(cleanJson);		
-		System.out.println(cleanJson);
+			Pattern p = Pattern.compile("[^\\._a-z0-9 ]",
+					Pattern.CASE_INSENSITIVE);
 
+			JSONParser parser = new JSONParser();
 
-		Pattern p = Pattern.compile("[^\\._a-z0-9 ]", Pattern.CASE_INSENSITIVE);
+			Map json = (Map) cleanJson;
+			Map output = new HashMap();
+			Iterator iter = json.entrySet().iterator();
 
-		JSONParser parser = new JSONParser();
+			while (iter.hasNext()) {
+				Map.Entry entry = (Map.Entry) iter.next();
+				String key = (String) entry.getKey();
 
-		Map json = (Map) cleanJson;
-		Map output = new HashMap();
-		Iterator iter = json.entrySet().iterator();
-
-		while (iter.hasNext()) {
-			Map.Entry entry = (Map.Entry) iter.next();
-			String key = (String) entry.getKey();
-
-			Matcher m = p.matcher(key);
-			boolean b = m.find();
-			// Test False
-			assertFalse(b);
+				Matcher m = p.matcher(key);
+				boolean b = m.find();
+				// Test False
+				assertFalse(b);
+			}
 		}
 
 	}
-    /**
-     * Returns the instance of BroParser
-     */
-	public static BasicBroParser getBroParser() {
-		return broParser;
+
+	/**
+	 * Returns Input String
+	 */
+	public static String[] getInputStrings() {
+		return inputStrings;
 	}
-    /**
-     * Sets the instance of BroParser
-     */
-	public static void setBroParser(BasicBroParser broParser) {
-		BroParserTest.broParser = broParser;
+
+	/**
+	 * Sets SourceFire Input String
+	 */
+	public static void setInputStrings(String[] strings) {
+		BroParserTest.inputStrings = strings;
 	}
+	
     /**
-     * Return BroPaser JSON String
+     * Returns the parser.
+     * @return the parser.
      */
-	public static String getBroJsonString() {
-		return BroParserTest.broJsonString;
-	}
+    
+    public BasicBroParser getParser() {
+        return parser;
+    }
+
 
     /**
-     * Sets BroPaser JSON String
+     * Sets the parser.
+     * @param parser the parser.
      */
-	public static void setBroJsonString(String broJsonString) {
-		BroParserTest.broJsonString = broJsonString;
-	}	
+    
+    public void setParser(BasicBroParser parser) {
+    
+        this.parser = parser;
+    }	
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/GrokAsaParserTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/GrokAsaParserTest.java b/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/GrokAsaParserTest.java
new file mode 100644
index 0000000..3719634
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/java/com/opensoc/parsing/test/GrokAsaParserTest.java
@@ -0,0 +1,149 @@
+package com.opensoc.parsing.test;
+
+import java.util.Iterator;
+import java.util.Map;
+
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+import com.opensoc.parsing.parsers.GrokAsaParser;
+import com.opensoc.test.AbstractConfigTest;
+
+
+ /**
+ * <ul>
+ * <li>Title: </li>
+ * <li>Description: </li>
+ * <li>Created: Feb 17, 2015 by: </li>
+ * </ul>
+ * @author $Author:  $
+ * @version $Revision: 1.1 $
+ */
+public class GrokAsaParserTest extends AbstractConfigTest{
+     /**
+     * The grokAsaStrings.
+     */
+    private static String[] grokAsaStrings=null;
+ 
+     /**
+     * The grokAsaParser.
+     */
+     
+    private GrokAsaParser grokAsaParser=null;
+    
+     /**
+     * Constructs a new <code>GrokAsaParserTest</code> instance.
+     * @throws Exception
+     */
+     
+    public GrokAsaParserTest() throws Exception {
+          super();  
+        
+    }
+	/**
+	 * @throws java.lang.Exception
+	 */
+	public static void setUpBeforeClass() throws Exception {
+	}
+
+	/**
+	 * @throws java.lang.Exception
+	 */
+	public static void tearDownAfterClass() throws Exception {
+		setGrokAsaStrings(null);
+	}
+
+    /* 
+     * (non-Javadoc)
+     * @see junit.framework.TestCase#setUp()
+     */
+	public void setUp() throws Exception {
+          super.setUp("com.opensoc.parsing.test.GrokAsaParserTest");
+          setGrokAsaStrings(super.readTestDataFromFile(this.getConfig().getString("logFile")));
+          grokAsaParser = new GrokAsaParser();		
+	}
+
+		/**
+		 * 	
+		 * 	
+		 * @throws java.lang.Exception
+		 */
+		public void tearDown() throws Exception {
+			grokAsaParser = null;
+		}
+
+		/**
+		 * Test method for {@link com.opensoc.parsing.parsers.BasicSourcefireParser#parse(java.lang.String)}.
+		 */
+		@SuppressWarnings({ "rawtypes" })
+		public void testParse() {
+		    
+			for (String grokAsaString : getGrokAsaStrings()) {
+				JSONObject parsed = grokAsaParser.parse(grokAsaString.getBytes());
+				assertNotNull(parsed);
+			
+				System.out.println(parsed);
+				JSONParser parser = new JSONParser();
+
+				Map json=null;
+				try {
+					json = (Map) parser.parse(parsed.toJSONString());
+				} catch (ParseException e) {
+					e.printStackTrace();
+				}
+				//Ensure JSON returned is not null/empty
+				assertNotNull(json);
+				
+				Iterator iter = json.entrySet().iterator();
+				
+
+				while (iter.hasNext()) {
+					Map.Entry entry = (Map.Entry) iter.next();
+					assertNotNull(entry);
+					
+					String key = (String) entry.getKey();
+					assertNotNull(key);
+					
+					String value = (String) json.get("CISCO_TAGGED_SYSLOG").toString();
+					assertNotNull(value);
+				}
+			}
+		}
+
+		/**
+		 * Returns GrokAsa Input String
+		 */
+		public static String[] getGrokAsaStrings() {
+			return grokAsaStrings;
+		}
+
+			
+		/**
+		 * Sets GrokAsa Input String
+		 */	
+		public static void setGrokAsaStrings(String[] strings) {
+			GrokAsaParserTest.grokAsaStrings = strings;
+		}
+	    
+	    /**
+	     * Returns the grokAsaParser.
+	     * @return the grokAsaParser.
+	     */
+	    
+	    public GrokAsaParser getGrokAsaParser() {
+	        return grokAsaParser;
+	    }
+
+
+	    /**
+	     * Sets the grokAsaParser.
+	     * @param grokAsaParser the grokAsaParser.
+	     */
+	    
+	    public void setGrokAsaParser(GrokAsaParser grokAsaParser) {
+	    
+	        this.grokAsaParser = grokAsaParser;
+	    }
+		
+	}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/BroParserTest.log
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/BroParserTest.log b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/BroParserTest.log
new file mode 100644
index 0000000..e71f28e
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/BroParserTest.log
@@ -0,0 +1,3 @@
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"files":{"analyzers": ["X509","MD5","SHA1"],"conn_uids":["C4tygJ3qxJBEJEBCeh"],"depth": 0,"duration": 0.0,"fuid":"FZEBC33VySG0nHSoO9","is_orig": false,"local_orig": false,"md5": "eba37166385e3ef42464ed9752e99f1b","missing_bytes": 0,"overflow_bytes": 0,"protocol": "files","rx_hosts": ["10.220.15.205"],"seen_bytes": 1136,"sha1": "73e42686657aece354fbf685712361658f2f4357","source": "SSL","timedout": false,"ts": "1425845251334","tx_hosts": ["68.171.237.7"]}}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/FireEyeParserTest.log
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/FireEyeParserTest.log b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/FireEyeParserTest.log
new file mode 100644
index 0000000..6d7f04b
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/FireEyeParserTest.log
@@ -0,0 +1,8 @@
+<164>Mar 19 05:24:39 10.220.15.15 fenotify-851983.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 12:28:26 UTC dvc=10.201.78.57 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=54527 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851983 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\\=851983 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>Mar 19 05:24:39 10.220.15.15 fenotify-851987.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 12:33:41 UTC dvc=10.201.78.113 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=51218 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851987 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\\=851987 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS
+<164>Mar 19 05:24:39 10.220.15.15 fenotify-3483808.2.alert: 1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/mic
 roads/update/InjectScript.js HTTP
+<164>Mar 19 05:24:39 10.220.15.15 fenotify-793972.2.alert: Control: no-cache::~~::~~ dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Exploit.Kit.Magnitude
+<161>Apr  1 05:24:39 10.220.15.15 fenotify-864461.alert: CEF:0|FireEye|CMS|7.5.1.318703|DM|domain-match|1|rt=Mar 19 2015 12:23:47 UTC src=10.191.193.20 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=abc123.example.com proto=udp spt=60903 cs5Label=cncHost cs5=mfdclk001.org dvchost=ABC123 dvc=10.190.1.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=864461 cs4Label=link cs4=https:\/\/ABC123.example.com\/event_stream\/events_for_bot?ev_id\\=864461 act=notified dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS
+fireeye[-]: <161>Mar 19 05:24:39 10.220.15.15 fenotify-864461.alert: CEF:0|FireEye|CMS|7.5.1.318703|DM|domain-match|1|rt=Mar 19 2015 12:23:47 UTC src=10.191.193.20 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=abc123.example.com proto=udp spt=60903 cs5Label=cncHost cs5=mfdclk001.org dvchost=ABC123 dvc=10.190.1.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=864461 cs4Label=link cs4=https:\/\/ABC123.example.com\/event_stream\/events_for_bot?ev_id\\=864461 act=notified dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS  
+fireeye[-]: <161>Apr  1 02:49:49 10.220.15.15 fenotify-900702.alert: CEF:0|FireEye|CMS|7.5.1.318703|DM|domain-match|1|rt=Apr 01 2015 09:49:14 UTC src=10.1.97.20 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=abcd0060xzy03.example.com proto=udp spt=63100 cs5Label=cncHost cs5=mfdclk001.org dvchost=DEV1FEYE1 dvc=10.220.15.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=900702 cs4Label=link cs4=https://ABCD0040CMS01.example.com/event_stream/events_for_bot?ev_id\=900702 act=notified dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS   
+<161>Apr 11 05:24:39 10.220.15.15 fenotify-864461.alert: CEF:0|FireEye|CMS|7.5.1.318703|DM|domain-match|1|rt=Mar 19 2015 12:23:47 UTC src=10.191.193.20 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=abc123.example.com proto=udp spt=60903 cs5Label=cncHost cs5=mfdclk001.org dvchost=ABC123 dvc=10.190.1.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=864461 cs4Label=link cs4=https:\/\/ABC123.example.com\/event_stream\/events_for_bot?ev_id\\=864461 act=notified dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS  

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/GrokParserTest.log
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/GrokParserTest.log b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/GrokParserTest.log
new file mode 100644
index 0000000..3141d75
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/GrokParserTest.log
@@ -0,0 +1,12 @@
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168292 for DMZ-Inside:10.22.8.51/51231 to Inside-Trunk:10.22.8.174/40004 duration 0:00:00 bytes 2103 TCP FINs
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-106015: Deny TCP (no connection) from 186.111.72.11/80 to 204.111.72.226/45019 flags SYN ACK  on interface Outside_VPN
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-302014: Teardown TCP connection 17604987 for outside:209.111.72.151/443 to inside:10.22.8.188/64306 duration 0:00:31 bytes 10128 TCP FINs
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-302014: Teardown TCP connection 17604999 for outside:209.111.72.151/443 to inside:10.22.8.188/64307 duration 0:00:30 bytes 6370 TCP FINs
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488167347 for Outside_VPN:198.111.72.24/2134 to DMZ-Inside:10.22.8.53/443 duration 0:00:01 bytes 9785 TCP FINs
+<174>Jan  5 14:52:35 10.22.8.212 %ASA-6-302015: Built inbound UDP connection 76245506 for outside:10.22.8.110/49886 (10.22.8.110/49886) to inside:192.111.72.8/8612 (192.111.72.8/8612) (user.name)
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302014: Teardown TCP connection 212805993 for outside:10.22.8.89/56917(LOCAL\\user.name) to inside:216.111.72.126/443 duration 0:00:00 bytes 0 TCP FINs (user.name)",
+<167>Jan  5 08:52:35 10.22.8.216 %ASA-7-710005: UDP request discarded from 10.22.8.223/49192 to outside:224.111.72.252/5355
+<167>Jan  5 08:52:32 10.22.8.216 %ASA-7-609002: Teardown local-host inside:10.22.8.205 duration 0:00:00
+<167>Jan  5 08:52:35 10.22.8.216 %ASA-7-609001: Built local-host inside:10.22.8.205
+<166>Jan  5 15:52:35 10.22.8.33 : %ASA-6-305012: Teardown dynamic UDP translation from inside:192.111.72.2/62251 to outside:79.111.72.174/21311 duration 0:02:30
+<158>Mar  6 07:30:00 NSAN2FWMDF1 : %ASA-6-302021: Teardown ICMP connection for faddr 10.220.5.50/50074 gaddr 10.220.19.147/0 laddr 10.220.19.147/0
\ No newline at end of file


[12/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/.pmd
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/.pmd b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/.pmd
deleted file mode 100644
index b4dd643..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/.pmd
+++ /dev/null
@@ -1,1262 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<pmd>
-    <useProjectRuleSet>false</useProjectRuleSet>
-    <ruleSetFile>.ruleset</ruleSetFile>
-    <rules>
-        <rule>
-            <name>IfStmtsMustUseBraces</name>
-            <ruleset>Braces</ruleset>
-        </rule>
-        <rule>
-            <name>IfElseStmtsMustUseBraces</name>
-            <ruleset>Braces</ruleset>
-        </rule>
-        <rule>
-            <name>WhileLoopsMustUseBraces</name>
-            <ruleset>Braces</ruleset>
-        </rule>
-        <rule>
-            <name>ForLoopsMustUseBraces</name>
-            <ruleset>Braces</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryConstructor</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>NullAssignment</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>OnlyOneReturn</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AssignmentInOperand</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AtLeastOneConstructor</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>DontImportSun</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>SuspiciousOctalEscape</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>CallSuperInConstructor</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryParentheses</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>DefaultPackage</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>BooleanInversion</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>DataflowAnomalyAnalysis</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidFinalLocalVariable</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidUsingShortType</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidUsingVolatile</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidUsingNativeCode</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidAccessibilityAlteration</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>DoNotCallGarbageCollectionExplicitly</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>OneDeclarationPerLine</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidPrefixingMethodParameters</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidLiteralsInIfCondition</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>UseObjectForClearerAPI</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>UseConcurrentHashMap</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>UnusedPrivateField</name>
-            <ruleset>Unused Code</ruleset>
-        </rule>
-        <rule>
-            <name>UnusedLocalVariable</name>
-            <ruleset>Unused Code</ruleset>
-        </rule>
-        <rule>
-            <name>UnusedPrivateMethod</name>
-            <ruleset>Unused Code</ruleset>
-        </rule>
-        <rule>
-            <name>UnusedFormalParameter</name>
-            <ruleset>Unused Code</ruleset>
-        </rule>
-        <rule>
-            <name>UnusedModifier</name>
-            <ruleset>Unused Code</ruleset>
-        </rule>
-        <rule>
-            <name>MethodReturnsInternalArray</name>
-            <ruleset>Security Code Guidelines</ruleset>
-        </rule>
-        <rule>
-            <name>ArrayIsStoredDirectly</name>
-            <ruleset>Security Code Guidelines</ruleset>
-        </rule>
-        <rule>
-            <name>ProperCloneImplementation</name>
-            <ruleset>Clone Implementation</ruleset>
-        </rule>
-        <rule>
-            <name>CloneThrowsCloneNotSupportedException</name>
-            <ruleset>Clone Implementation</ruleset>
-        </rule>
-        <rule>
-            <name>CloneMethodMustImplementCloneable</name>
-            <ruleset>Clone Implementation</ruleset>
-        </rule>
-        <rule>
-            <name>JUnitStaticSuite</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>JUnitSpelling</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>JUnitAssertionsShouldIncludeMessage</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>JUnitTestsShouldIncludeAssert</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>TestClassWithoutTestCases</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryBooleanAssertion</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>UseAssertEqualsInsteadOfAssertTrue</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>UseAssertSameInsteadOfAssertTrue</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>UseAssertNullInsteadOfAssertTrue</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>SimplifyBooleanAssertion</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>JUnitTestContainsTooManyAsserts</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>UseAssertTrueInsteadOfAssertEquals</name>
-            <ruleset>JUnit</ruleset>
-        </rule>
-        <rule>
-            <name>CommentRequired</name>
-            <ruleset>Comments</ruleset>
-        </rule>
-        <rule>
-            <name>CommentSize</name>
-            <ruleset>Comments</ruleset>
-        </rule>
-        <rule>
-            <name>CommentContent</name>
-            <ruleset>Comments</ruleset>
-        </rule>
-        <rule>
-            <name>ShortVariable</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>LongVariable</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>ShortMethodName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>VariableNamingConventions</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>MethodNamingConventions</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>ClassNamingConventions</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>AbstractNaming</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidDollarSigns</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>MethodWithSameNameAsEnclosingClass</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>SuspiciousHashcodeMethodName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>SuspiciousConstantFieldName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>SuspiciousEqualsMethodName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidFieldNameMatchingTypeName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidFieldNameMatchingMethodName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>NoPackage</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>PackageCase</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>MisleadingVariableName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>BooleanGetMethodName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>ShortClassName</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>GenericsNaming</name>
-            <ruleset>Naming</ruleset>
-        </rule>
-        <rule>
-            <name>DuplicateImports</name>
-            <ruleset>Import Statements</ruleset>
-        </rule>
-        <rule>
-            <name>DontImportJavaLang</name>
-            <ruleset>Import Statements</ruleset>
-        </rule>
-        <rule>
-            <name>UnusedImports</name>
-            <ruleset>Import Statements</ruleset>
-        </rule>
-        <rule>
-            <name>ImportFromSamePackage</name>
-            <ruleset>Import Statements</ruleset>
-        </rule>
-        <rule>
-            <name>TooManyStaticImports</name>
-            <ruleset>Import Statements</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryFullyQualifiedName</name>
-            <ruleset>Import Statements</ruleset>
-        </rule>
-        <rule>
-            <name>ReplaceVectorWithList</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>ReplaceHashtableWithMap</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>ReplaceEnumerationWithIterator</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidEnumAsIdentifier</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidAssertAsIdentifier</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>IntegerInstantiation</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>ByteInstantiation</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>ShortInstantiation</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>LongInstantiation</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>JUnit4TestShouldUseBeforeAnnotation</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>JUnit4TestShouldUseAfterAnnotation</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>JUnit4TestShouldUseTestAnnotation</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>JUnit4SuitesShouldUseSuiteAnnotation</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>JUnitUseExpected</name>
-            <ruleset>Migration</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryParentheses</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryBlock</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>DontNestJsfInJstlIteration</name>
-            <ruleset>Basic JSF</ruleset>
-        </rule>
-        <rule>
-            <name>MistypedCDATASection</name>
-            <ruleset>Basic XML</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyCatchBlock</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyIfStmt</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyWhileStmt</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyTryBlock</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyFinallyBlock</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptySwitchStatements</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptySynchronizedBlock</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyStatementNotInLoop</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyInitializer</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyStatementBlock</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyStaticInitializer</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>CallSuperFirst</name>
-            <ruleset>Android</ruleset>
-        </rule>
-        <rule>
-            <name>CallSuperLast</name>
-            <ruleset>Android</ruleset>
-        </rule>
-        <rule>
-            <name>DoNotHardCodeSDCard</name>
-            <ruleset>Android</ruleset>
-        </rule>
-        <rule>
-            <name>JumbledIncrementer</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>ForLoopShouldBeWhileLoop</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>OverrideBothEqualsAndHashcode</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>DoubleCheckedLocking</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>ReturnFromFinallyBlock</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>UnconditionalIfStatement</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>BooleanInstantiation</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>CollapsibleIfStatements</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>ClassCastExceptionWithToArray</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidDecimalLiteralsInBigDecimalConstructor</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>MisplacedNullCheck</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidThreadGroup</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>BrokenNullCheck</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>BigIntegerInstantiation</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidUsingOctalValues</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidUsingHardCodedIP</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>CheckResultSet</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidMultipleUnaryOperators</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>ExtendsObject</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>CheckSkipResult</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidBranchingStatementAsLastInLoop</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>DontCallThreadRun</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>DontUseFloatTypeForLoopIndices</name>
-            <ruleset>Basic</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyCatchBlock</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyIfStmt</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyWhileStmt</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyTryBlock</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyFinallyBlock</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptySwitchStatements</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptySynchronizedBlock</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyStatementNotInLoop</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyInitializer</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyStatementBlock</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyStaticInitializer</name>
-            <ruleset>Empty Code</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryConversionTemporary</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryReturn</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryFinalModifier</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UselessOverridingMethod</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UselessOperationOnImmutable</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UnusedNullCheckInEquals</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UselessParentheses</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>CouplingBetweenObjects</name>
-            <ruleset>Coupling</ruleset>
-        </rule>
-        <rule>
-            <name>ExcessiveImports</name>
-            <ruleset>Coupling</ruleset>
-        </rule>
-        <rule>
-            <name>LooseCoupling</name>
-            <ruleset>Coupling</ruleset>
-        </rule>
-        <rule>
-            <name>LoosePackageCoupling</name>
-            <ruleset>Coupling</ruleset>
-        </rule>
-        <rule>
-            <name>LawOfDemeter</name>
-            <ruleset>Coupling</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryConversionTemporary</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryReturn</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryFinalModifier</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UselessOverridingMethod</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UselessOperationOnImmutable</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UnusedNullCheckInEquals</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>UselessParentheses</name>
-            <ruleset>Unnecessary</ruleset>
-        </rule>
-        <rule>
-            <name>NoLongScripts</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>NoScriptlets</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>NoInlineStyleInformation</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>NoClassAttribute</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>NoJspForward</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>IframeMissingSrcAttribute</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>NoHtmlComments</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>DuplicateJspImports</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>JspEncoding</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>NoInlineScript</name>
-            <ruleset>Basic JSP</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidCatchingThrowable</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>SignatureDeclareThrowsException</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>ExceptionAsFlowControl</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidCatchingNPE</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidThrowingRawExceptionTypes</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidThrowingNullPointerException</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidRethrowingException</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>DoNotExtendJavaLangError</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>DoNotThrowExceptionInFinally</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidThrowingNewInstanceOfSameException</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidCatchingGenericException</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidLosingExceptionInformation</name>
-            <ruleset>Strict Exceptions</ruleset>
-        </rule>
-        <rule>
-            <name>UseCorrectExceptionLogging</name>
-            <ruleset>Jakarta Commons Logging</ruleset>
-        </rule>
-        <rule>
-            <name>ProperLogger</name>
-            <ruleset>Jakarta Commons Logging</ruleset>
-        </rule>
-        <rule>
-            <name>GuardDebugLogging</name>
-            <ruleset>Jakarta Commons Logging</ruleset>
-        </rule>
-        <rule>
-            <name>LocalVariableCouldBeFinal</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>MethodArgumentCouldBeFinal</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidInstantiatingObjectsInLoops</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>UseArrayListInsteadOfVector</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>SimplifyStartsWith</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>UseStringBufferForStringAppends</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>UseArraysAsList</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidArrayLoops</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryWrapperObjectCreation</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>AddEmptyString</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>RedundantFieldInitializer</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>PrematureDeclaration</name>
-            <ruleset>Optimization</ruleset>
-        </rule>
-        <rule>
-            <name>UseProperClassLoader</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>MDBAndSessionBeanNamingConvention</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>RemoteSessionInterfaceNamingConvention</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>LocalInterfaceSessionNamingConvention</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>LocalHomeNamingConvention</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>RemoteInterfaceNamingConvention</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>DoNotCallSystemExit</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>StaticEJBFieldShouldBeFinal</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>DoNotUseThreads</name>
-            <ruleset>J2EE</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidDuplicateLiterals</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>StringInstantiation</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>StringToString</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>InefficientStringBuffering</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryCaseChange</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>UseStringBufferLength</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>AppendCharacterWithChar</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>ConsecutiveLiteralAppends</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>UseIndexOfChar</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>InefficientEmptyStringCheck</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>InsufficientStringBufferDeclaration</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>UselessStringValueOf</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>StringBufferInstantiationWithChar</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>UseEqualsToCompareStrings</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidStringBufferField</name>
-            <ruleset>String and StringBuffer</ruleset>
-        </rule>
-        <rule>
-            <name>MoreThanOneLogger</name>
-            <ruleset>Java Logging</ruleset>
-        </rule>
-        <rule>
-            <name>LoggerIsNotStaticFinal</name>
-            <ruleset>Java Logging</ruleset>
-        </rule>
-        <rule>
-            <name>SystemPrintln</name>
-            <ruleset>Java Logging</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidPrintStackTrace</name>
-            <ruleset>Java Logging</ruleset>
-        </rule>
-        <rule>
-            <name>UseConcatOnce</name>
-            <ruleset>XPath in XSL</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidAxisNavigation</name>
-            <ruleset>XPath in XSL</ruleset>
-        </rule>
-        <rule>
-            <name>AssignmentInOperand</name>
-            <ruleset>Controversial</ruleset>
-        </rule>
-        <rule>
-            <name>UnreachableCode</name>
-            <ruleset>Basic Ecmascript</ruleset>
-        </rule>
-        <rule>
-            <name>InnaccurateNumericLiteral</name>
-            <ruleset>Basic Ecmascript</ruleset>
-        </rule>
-        <rule>
-            <name>ConsistentReturn</name>
-            <ruleset>Basic Ecmascript</ruleset>
-        </rule>
-        <rule>
-            <name>ScopeForInVariable</name>
-            <ruleset>Basic Ecmascript</ruleset>
-        </rule>
-        <rule>
-            <name>EqualComparison</name>
-            <ruleset>Basic Ecmascript</ruleset>
-        </rule>
-        <rule>
-            <name>GlobalVariable</name>
-            <ruleset>Basic Ecmascript</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidTrailingComma</name>
-            <ruleset>Basic Ecmascript</ruleset>
-        </rule>
-        <rule>
-            <name>IfStmtsMustUseBraces</name>
-            <ruleset>Braces</ruleset>
-        </rule>
-        <rule>
-            <name>WhileLoopsMustUseBraces</name>
-            <ruleset>Braces</ruleset>
-        </rule>
-        <rule>
-            <name>IfElseStmtsMustUseBraces</name>
-            <ruleset>Braces</ruleset>
-        </rule>
-        <rule>
-            <name>ForLoopsMustUseBraces</name>
-            <ruleset>Braces</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyFinalizer</name>
-            <ruleset>Finalizer</ruleset>
-        </rule>
-        <rule>
-            <name>FinalizeOnlyCallsSuperFinalize</name>
-            <ruleset>Finalizer</ruleset>
-        </rule>
-        <rule>
-            <name>FinalizeOverloaded</name>
-            <ruleset>Finalizer</ruleset>
-        </rule>
-        <rule>
-            <name>FinalizeDoesNotCallSuperFinalize</name>
-            <ruleset>Finalizer</ruleset>
-        </rule>
-        <rule>
-            <name>FinalizeShouldBeProtected</name>
-            <ruleset>Finalizer</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidCallingFinalize</name>
-            <ruleset>Finalizer</ruleset>
-        </rule>
-        <rule>
-            <name>UseSingleton</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>SimplifyBooleanReturns</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>SimplifyBooleanExpressions</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>SwitchStmtsShouldHaveDefault</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidDeeplyNestedIfStmts</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidReassigningParameters</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>SwitchDensity</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>ConstructorCallsOverridableMethod</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AccessorClassGeneration</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>FinalFieldCouldBeStatic</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>CloseResource</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>NonStaticInitializer</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>DefaultLabelNotLastInSwitchStmt</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>NonCaseLabelInSwitchStatement</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>OptimizableToArrayCall</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>BadComparison</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>EqualsNull</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>ConfusingTernary</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>InstantiationToGetClass</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>IdempotentOperations</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>SimpleDateFormatNeedsLocale</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>ImmutableField</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>UseLocaleWithCaseConversions</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidProtectedFieldInFinalClass</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AssignmentToNonFinalStatic</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>MissingStaticMethodInNonInstantiatableClass</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidSynchronizedAtMethodLevel</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>MissingBreakInSwitch</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>UseNotifyAllInsteadOfNotify</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidInstanceofChecksInCatchClause</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AbstractClassWithoutAbstractMethod</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>SimplifyConditional</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>CompareObjectsWithEquals</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>PositionLiteralsFirstInComparisons</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>UnnecessaryLocalBeforeReturn</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>NonThreadSafeSingleton</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>UncommentedEmptyMethod</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>UncommentedEmptyConstructor</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AvoidConstantsInterface</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>UnsynchronizedStaticDateFormatter</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>PreserveStackTrace</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>UseCollectionIsEmpty</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>ClassWithOnlyPrivateConstructorsShouldBeFinal</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>EmptyMethodInAbstractClassShouldBeAbstract</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>SingularField</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>ReturnEmptyArrayRatherThanNull</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>AbstractClassWithoutAnyMethod</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>TooFewBranchesForASwitchStatement</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>LogicInversion</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>UseVarargs</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>FieldDeclarationsShouldBeAtStartOfClass</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>GodClass</name>
-            <ruleset>Design</ruleset>
-        </rule>
-        <rule>
-            <name>NPathComplexity</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>ExcessiveMethodLength</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>ExcessiveParameterList</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>ExcessiveClassLength</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>CyclomaticComplexity</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>ExcessivePublicCount</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>TooManyFields</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>NcssMethodCount</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>NcssTypeCount</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>NcssConstructorCount</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>TooManyMethods</name>
-            <ruleset>Code Size</ruleset>
-        </rule>
-        <rule>
-            <name>BeanMembersShouldSerialize</name>
-            <ruleset>JavaBeans</ruleset>
-        </rule>
-        <rule>
-            <name>MissingSerialVersionUID</name>
-            <ruleset>JavaBeans</ruleset>
-        </rule>
-    </rules>
-    <includeDerivedFiles>false</includeDerivedFiles>
-    <violationsAsErrors>true</violationsAsErrors>
-    <fullBuildEnabled>true</fullBuildEnabled>
-</pmd>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/README.txt
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/README.txt b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/README.txt
deleted file mode 100644
index 8aba23e..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/README.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-'hbase' module of 'opensoc' project contains the code to communicate with HBase. This module has several APIs ( refer IPcapGetter.java, IPcapScanner.java files ) 
-to fetch pcaps from HBase. Following APIs have been created under this module implementation.
-
-APIs ( in IPcapGetter.java) to get pcaps using keys :
- 1. public PcapsResponse getPcaps(List<String> keys, String lastRowKey, long startTime, long endTime, boolean includeReverseTraffic, boolean includeDuplicateLastRow, long maxResultSize) throws IOException;
- 2. public PcapsResponse getPcaps(String key, long startTime, long endTime, boolean includeReverseTraffic) throws IOException;
- 3. public PcapsResponse getPcaps(List<String> keys) throws IOException;
- 4. public PcapsResponse getPcaps(String key) throws IOException;
-
-APIs ( in IPcapScanner.java) to get pcaps using key range :
- 1. public byte[] getPcaps(String startKey, String endKey, long maxResponseSize, long startTime, long endTime) throws IOException;
- 2. public byte[] getPcaps(String startKey, String endKey) throws IOException;
- 
- 
-Refer the wiki documentation for further details : https://hwcsco.atlassian.net/wiki/pages/viewpage.action?pageId=5242892
- 	
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/dependency-reduced-pom.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/dependency-reduced-pom.xml b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/dependency-reduced-pom.xml
deleted file mode 100644
index 11efb2f..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/dependency-reduced-pom.xml
+++ /dev/null
@@ -1,230 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <parent>
-    <artifactId>managed-threat</artifactId>
-    <groupId>cisco</groupId>
-    <version>0.0.4-SNAPSHOT</version>
-  </parent>
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>cisco-hbase</groupId>
-  <artifactId>cisco-hbase</artifactId>
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-client</artifactId>
-      <version>0.96.0.2.0.6.0-76-hadoop2</version>
-      <scope>provided</scope>
-      <exclusions>
-        <exclusion>
-          <artifactId>hbase-common</artifactId>
-          <groupId>org.apache.hbase</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>hbase-protocol</artifactId>
-          <groupId>org.apache.hbase</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>commons-codec</artifactId>
-          <groupId>commons-codec</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>commons-io</artifactId>
-          <groupId>commons-io</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>commons-lang</artifactId>
-          <groupId>commons-lang</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>commons-logging</artifactId>
-          <groupId>commons-logging</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>guava</artifactId>
-          <groupId>com.google.guava</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>protobuf-java</artifactId>
-          <groupId>com.google.protobuf</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>netty</artifactId>
-          <groupId>io.netty</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>zookeeper</artifactId>
-          <groupId>org.apache.zookeeper</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>htrace-core</artifactId>
-          <groupId>org.cloudera.htrace</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jackson-mapper-asl</artifactId>
-          <groupId>org.codehaus.jackson</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>hadoop-auth</artifactId>
-          <groupId>org.apache.hadoop</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>hadoop-mapreduce-client-core</artifactId>
-          <groupId>org.apache.hadoop</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>hadoop-annotations</artifactId>
-          <groupId>org.apache.hadoop</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>findbugs-annotations</artifactId>
-          <groupId>com.github.stephenc.findbugs</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>junit</artifactId>
-          <groupId>junit</groupId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-common</artifactId>
-      <version>2.2.0.2.0.6.0-76</version>
-      <scope>provided</scope>
-      <exclusions>
-        <exclusion>
-          <artifactId>commons-cli</artifactId>
-          <groupId>commons-cli</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>commons-math</artifactId>
-          <groupId>org.apache.commons</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>xmlenc</artifactId>
-          <groupId>xmlenc</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>commons-httpclient</artifactId>
-          <groupId>commons-httpclient</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>commons-net</artifactId>
-          <groupId>commons-net</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>servlet-api</artifactId>
-          <groupId>javax.servlet</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jetty</artifactId>
-          <groupId>org.mortbay.jetty</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jetty-util</artifactId>
-          <groupId>org.mortbay.jetty</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jersey-core</artifactId>
-          <groupId>com.sun.jersey</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jersey-json</artifactId>
-          <groupId>com.sun.jersey</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jersey-server</artifactId>
-          <groupId>com.sun.jersey</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jasper-compiler</artifactId>
-          <groupId>tomcat</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jasper-runtime</artifactId>
-          <groupId>tomcat</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jsp-api</artifactId>
-          <groupId>javax.servlet.jsp</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>commons-el</artifactId>
-          <groupId>commons-el</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jets3t</artifactId>
-          <groupId>net.java.dev.jets3t</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>commons-configuration</artifactId>
-          <groupId>commons-configuration</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>slf4j-api</artifactId>
-          <groupId>org.slf4j</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>slf4j-log4j12</artifactId>
-          <groupId>org.slf4j</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jackson-core-asl</artifactId>
-          <groupId>org.codehaus.jackson</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>avro</artifactId>
-          <groupId>org.apache.avro</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jsch</artifactId>
-          <groupId>com.jcraft</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>commons-compress</artifactId>
-          <groupId>org.apache.commons</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>hadoop-annotations</artifactId>
-          <groupId>org.apache.hadoop</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>guava</artifactId>
-          <groupId>com.google.guava</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>commons-codec</artifactId>
-          <groupId>commons-codec</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>commons-io</artifactId>
-          <groupId>commons-io</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>commons-logging</artifactId>
-          <groupId>commons-logging</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>commons-lang</artifactId>
-          <groupId>commons-lang</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jackson-mapper-asl</artifactId>
-          <groupId>org.codehaus.jackson</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>protobuf-java</artifactId>
-          <groupId>com.google.protobuf</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>hadoop-auth</artifactId>
-          <groupId>org.apache.hadoop</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>zookeeper</artifactId>
-          <groupId>org.apache.zookeeper</groupId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-  </dependencies>
-</project>
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/pom.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/pom.xml b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/pom.xml
deleted file mode 100644
index f1f471f..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/pom.xml
+++ /dev/null
@@ -1,137 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<parent>
-		<groupId>com.cisco.opensoc</groupId>
-		<artifactId>opensoc</artifactId>
-		<version>0.1.3-SNAPSHOT</version>
-		<relativePath>../../../pom.xml</relativePath>
-	</parent>
-	<artifactId>opensoc-hbase</artifactId>
-
-	<dependencies>
-		<dependency>
-			<groupId>commons-beanutils</groupId>
-			<artifactId>commons-beanutils</artifactId>
-			<version>${commons-beanutils.version}</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.commons</groupId>
-			<artifactId>commons-jexl</artifactId>
-			<version>${commons-jexl.version}</version>
-		</dependency>
-
-		<dependency>
-			<artifactId>commons-configuration</artifactId>
-			<groupId>commons-configuration</groupId>
-			<version>${commons-configuration.version}</version>
-		</dependency>
-		<dependency>
-			<groupId>junit</groupId>
-			<artifactId>junit</artifactId>
-			<version>${junit.version}</version>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.powermock</groupId>
-			<artifactId>powermock-api-mockito</artifactId>
-			<version>1.5</version>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.powermock</groupId>
-			<artifactId>powermock-core</artifactId>
-			<version>1.5</version>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.powermock</groupId>
-			<artifactId>powermock-module-junit4</artifactId>
-			<version>1.5</version>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>joda-time</groupId>
-			<artifactId>joda-time</artifactId>
-			<version>2.3</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.hbase</groupId>
-			<artifactId>hbase-client</artifactId>
-			<version>${hbase.version}</version>
-			<scope>provided</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.hbase</groupId>
-			<artifactId>hbase-testing-util</artifactId>
-			<version>${hbase.version}</version>
-			<scope>provided</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.hadoop</groupId>
-			<artifactId>hadoop-common</artifactId>
-			<version>${hadoop.version}</version>
-			<scope>provided</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.hadoop</groupId>
-			<artifactId>hadoop-hdfs</artifactId>
-			<version>${hadoop.version}</version>
-			<scope>provided</scope>
-		</dependency>
-		<dependency>
-			<groupId>com.cisco.opensoc</groupId>
-			<artifactId>opensoc-common</artifactId>
-			<version>${project.parent.version}</version>
-		</dependency>
-		<dependency>
-			<groupId>com.cisco.opensoc</groupId>
-			<artifactId>opensoc-pcap</artifactId>
-			<version>${project.parent.version}</version>
-			<exclusions>
-				<exclusion>
-					<groupId>org.apache.hadoop</groupId>
-					<artifactId>hadoop-mapreduce-client-common</artifactId>
-				</exclusion>
-				<exclusion>
-					<groupId>org.apache.hadoop</groupId>
-					<artifactId>hadoop-common</artifactId>
-				</exclusion>
-				<exclusion>
-					<groupId>org.apache.hadoop</groupId>
-					<artifactId>hadoop-core</artifactId>
-				</exclusion>
-			</exclusions>
-		</dependency>
-		<dependency>
-			<groupId>org.springframework.integration</groupId>
-			<artifactId>spring-integration-http</artifactId>
-			<version>${spring.integration.version}</version>
-		</dependency>
-		<dependency>
-			<groupId>org.springframework</groupId>
-			<artifactId>spring-webmvc</artifactId>
-			<version>${spring.version}</version>
-		</dependency>
-		<dependency>
-			<groupId>log4j</groupId>
-			<artifactId>log4j</artifactId>
-			<version>${logger.version}</version>
-			<exclusions>
-				<exclusion>
-					<groupId>com.sun.jmx</groupId>
-					<artifactId>jmxri</artifactId>
-				</exclusion>
-				<exclusion>
-					<groupId>com.sun.jdmk</groupId>
-					<artifactId>jmxtools</artifactId>
-				</exclusion>
-				<exclusion>
-					<groupId>javax.jms</groupId>
-					<artifactId>jms</artifactId>
-				</exclusion>
-			</exclusions>
-		</dependency>
-
-	</dependencies>
-</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/CellTimestampComparator.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/CellTimestampComparator.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/CellTimestampComparator.java
deleted file mode 100644
index 18bf0e5..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/CellTimestampComparator.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package com.cisco.opensoc.hbase.client;
-
-import java.util.Comparator;
-
-import org.apache.hadoop.hbase.Cell;
-
-/**
- * Comparator created for sorting pcaps cells based on the timestamp (dsc).
- * 
- * @author Sayi
- */
-public class CellTimestampComparator implements Comparator<Cell> {
-
-  /*
-   * (non-Javadoc)
-   * 
-   * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
-   */
-  @Override
-  public int compare(Cell o1, Cell o2) {
-    return Long.valueOf(o2.getTimestamp()).compareTo(o1.getTimestamp());
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/ConfigurationUtil.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/ConfigurationUtil.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/ConfigurationUtil.java
deleted file mode 100644
index 7a1d486..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/ConfigurationUtil.java
+++ /dev/null
@@ -1,267 +0,0 @@
-package com.cisco.opensoc.hbase.client;
-
-import org.apache.commons.configuration.Configuration;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.springframework.util.Assert;
-
-import com.cisco.opensoc.common.config.ConfigurationManager;
-
-/**
- * utility class for this module which loads commons configuration to fetch
- * properties from underlying resources to communicate with hbase.
- * 
- * @author Sayi
- */
-public class ConfigurationUtil {
-
-	/** Configuration definition file name for fetching pcaps from hbase */
-	private static final String configDefFileName = "config-definition-hbase.xml";
-	
-	/** property configuration. */
-	private static Configuration propConfiguration = null;
-
-
-	/**
-	 * The Enum SizeUnit.
-	 */
-	public enum SizeUnit {
-
-		/** The kb. */
-		KB,
-		/** The mb. */
-		MB
-	};
-
-	/** The Constant DEFAULT_HCONNECTION_RETRY_LIMIT. */
-	private static final int DEFAULT_HCONNECTION_RETRY_LIMIT = 0;
-
-	/**
-	 * Loads configuration resources 
-	 * @return Configuration
-	 */
-	public static Configuration getConfiguration() {
-		if(propConfiguration == null){
-			propConfiguration =  ConfigurationManager.getConfiguration(configDefFileName);
-		}
-		return propConfiguration;
-	}
-
-	/**
-	 * Returns the configured default result size in bytes, if the user input is
-	 * null; otherwise, returns the user input after validating with the
-	 * configured max value. Throws IllegalArgumentException if : 1. input is
-	 * less than or equals to 0 OR 2. input is greater than configured
-	 * {hbase.scan.max.result.size} value
-	 * 
-	 * @param input
-	 *            the input
-	 * @return long
-	 */
-	public static long validateMaxResultSize(String input) {
-		if (input == null) {
-			return getDefaultResultSize();
-		}
-		// validate the user input
-		long value = convertToBytes(Long.parseLong(input), getResultSizeUnit());
-		Assert.isTrue(
-				isAllowableResultSize(value),
-				"'maxResponseSize' param value must be positive and less than {hbase.scan.max.result.size} value");
-		return convertToBytes(value, getResultSizeUnit());
-	}
-
-	/**
-	 * Checks if is allowable result size.
-	 * 
-	 * @param input
-	 *            the input
-	 * @return true, if is allowable result size
-	 */
-	public static boolean isAllowableResultSize(long input) {
-		if (input <= 0 || input > getMaxResultSize()) {
-			return false;
-		}
-		return true;
-	}
-
-	/**
-	 * Returns the configured default result size in bytes.
-	 * 
-	 * @return long
-	 */
-	public static long getDefaultResultSize() {
-		float value = ConfigurationUtil.getConfiguration().getFloat(
-				"hbase.scan.default.result.size");
-		return convertToBytes(value, getResultSizeUnit());
-	}
-
-	/**
-	 * Returns the configured max result size in bytes.
-	 * 
-	 * @return long
-	 */
-	public static long getMaxResultSize() {
-		float value = ConfigurationUtil.getConfiguration().getFloat(
-				"hbase.scan.max.result.size");
-		return convertToBytes(value, getResultSizeUnit());
-	}
-
-	/**
-	 * Returns the configured max row size in bytes.
-	 * 
-	 * @return long
-	 */
-	public static long getMaxRowSize() {
-		float maxRowSize = ConfigurationUtil.getConfiguration().getFloat(
-				"hbase.table.max.row.size");
-		return convertToBytes(maxRowSize, getRowSizeUnit());
-	}
-
-	/**
-	 * Gets the result size unit.
-	 * 
-	 * @return the result size unit
-	 */
-	public static SizeUnit getResultSizeUnit() {
-		return SizeUnit.valueOf(ConfigurationUtil.getConfiguration()
-				.getString("hbase.scan.result.size.unit"));
-	}
-
-	/**
-	 * Gets the row size unit.
-	 * 
-	 * @return the row size unit
-	 */
-	public static SizeUnit getRowSizeUnit() {
-		return SizeUnit.valueOf(ConfigurationUtil.getConfiguration()
-				.getString("hbase.table.row.size.unit"));
-	}
-
-	/**
-	 * Gets the connection retry limit.
-	 * 
-	 * @return the connection retry limit
-	 */
-	public static int getConnectionRetryLimit() {
-		return ConfigurationUtil.getConfiguration().getInt(
-				"hbase.hconnection.retries.number",
-				DEFAULT_HCONNECTION_RETRY_LIMIT);
-	}
-
-	/**
-	 * Checks if is default include reverse traffic.
-	 * 
-	 * @return true, if is default include reverse traffic
-	 */
-	public static boolean isDefaultIncludeReverseTraffic() {
-		return ConfigurationUtil.getConfiguration().getBoolean(
-				"pcaps.include.reverse.traffic");
-	}
-
-	/**
-	 * Gets the table name.
-	 * 
-	 * @return the table name
-	 */
-	public static byte[] getTableName() {
-		return Bytes.toBytes(ConfigurationUtil.getConfiguration().getString(
-				"hbase.table.name"));
-	}
-
-	/**
-	 * Gets the column family.
-	 * 
-	 * @return the column family
-	 */
-	public static byte[] getColumnFamily() {
-		return Bytes.toBytes(ConfigurationUtil.getConfiguration().getString(
-				"hbase.table.column.family"));
-	}
-
-	/**
-	 * Gets the column qualifier.
-	 * 
-	 * @return the column qualifier
-	 */
-	public static byte[] getColumnQualifier() {
-		return Bytes.toBytes(ConfigurationUtil.getConfiguration().getString(
-				"hbase.table.column.qualifier"));
-	}
-
-	/**
-	 * Gets the max versions.
-	 * 
-	 * @return the max versions
-	 */
-	public static int getMaxVersions() {
-		return ConfigurationUtil.getConfiguration().getInt(
-				"hbase.table.column.maxVersions");
-	}
-
-	/**
-	 * Gets the configured tokens in rowkey.
-	 * 
-	 * @return the configured tokens in rowkey
-	 */
-	public static int getConfiguredTokensInRowkey() {
-		return ConfigurationUtil.getConfiguration().getInt(
-				"hbase.table.row.key.tokens");
-	}
-
-	/**
-	 * Gets the minimum tokens in inputkey.
-	 * 
-	 * @return the minimum tokens in inputkey
-	 */
-	public static int getMinimumTokensInInputkey() {
-		return ConfigurationUtil.getConfiguration().getInt(
-				"rest.api.input.key.min.tokens");
-	}
-
-	/**
-	 * Gets the appending token digits.
-	 * 
-	 * @return the appending token digits
-	 */
-	public static int getAppendingTokenDigits() {
-		return ConfigurationUtil.getConfiguration().getInt(
-				"hbase.table.row.key.token.appending.digits");
-	}
-
-	/**
-	 * Convert to bytes.
-	 * 
-	 * @param value
-	 *            the value
-	 * @param unit
-	 *            the unit
-	 * @return the long
-	 */
-	public static long convertToBytes(float value, SizeUnit unit) {
-		if (SizeUnit.KB == unit) {
-			return (long) (value * 1024);
-		}
-		if (SizeUnit.MB == unit) {
-			return (long) (value * 1024 * 1024);
-		}
-		return (long) value;
-	}
-
-	/**
-	 * The main method.
-	 * 
-	 * @param args
-	 *            the arguments
-	 */
-	public static void main(String[] args) {
-		long r1 = getMaxRowSize();
-		System.out.println("getMaxRowSizeInBytes = " + r1);
-		long r2 = getMaxResultSize();
-		System.out.println("getMaxAllowableResultSizeInBytes = " + r2);
-
-		SizeUnit u1 = getRowSizeUnit();
-		System.out.println("getMaxRowSizeUnit = " + u1.toString());
-		SizeUnit u2 = getResultSizeUnit();
-		System.out.println("getMaxAllowableResultsSizeUnit = " + u2.toString());
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/HBaseConfigConstants.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/HBaseConfigConstants.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/HBaseConfigConstants.java
deleted file mode 100644
index 826bdda..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/HBaseConfigConstants.java
+++ /dev/null
@@ -1,40 +0,0 @@
-package com.cisco.opensoc.hbase.client;
-
-/**
- * HBase configuration properties.
- * 
- * @author Sayi
- */
-public class HBaseConfigConstants {
-
-  /** The Constant HBASE_ZOOKEEPER_QUORUM. */
-  public static final String HBASE_ZOOKEEPER_QUORUM = "hbase.zookeeper.quorum";
-
-  /** The Constant HBASE_ZOOKEEPER_CLIENT_PORT. */
-  public static final String HBASE_ZOOKEEPER_CLIENT_PORT = "hbase.zookeeper.clientPort";
-
-  /** The Constant HBASE_ZOOKEEPER_SESSION_TIMEOUT. */
-  public static final String HBASE_ZOOKEEPER_SESSION_TIMEOUT = "zookeeper.session.timeout";
-
-  /** The Constant HBASE_ZOOKEEPER_RECOVERY_RETRY. */
-  public static final String HBASE_ZOOKEEPER_RECOVERY_RETRY = "zookeeper.recovery.retry";
-
-  /** The Constant HBASE_CLIENT_RETRIES_NUMBER. */
-  public static final String HBASE_CLIENT_RETRIES_NUMBER = "hbase.client.retries.number";
-
-  /** The delimeter. */
-  String delimeter = "-";
-
-  /** The regex. */
-  String regex = "\\-";
-
-  /** The Constant PCAP_KEY_DELIMETER. */
-  public static final String PCAP_KEY_DELIMETER = "-";
-
-  /** The Constant START_KEY. */
-  public static final String START_KEY = "startKey";
-
-  /** The Constant END_KEY. */
-  public static final String END_KEY = "endKey";
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/HBaseConfigurationUtil.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/HBaseConfigurationUtil.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/HBaseConfigurationUtil.java
deleted file mode 100644
index c92a3e4..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/HBaseConfigurationUtil.java
+++ /dev/null
@@ -1,165 +0,0 @@
-/**
- * 
- */
-package com.cisco.opensoc.hbase.client;
-
-import java.io.IOException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
-import org.apache.log4j.Logger;
-import org.mortbay.log.Log;
-
-/**
- * Utility class which creates HConnection instance when the first request is
- * received and registers a shut down hook which closes the connection when the
- * JVM exits. Creates new connection to the cluster only if the existing
- * connection is closed for unknown reasons. Also creates Configuration with
- * HBase resources using configuration properties.
- * 
- * @author Sayi
- * 
- */
-public class HBaseConfigurationUtil {
-
-  /** The Constant LOGGER. */
-  private static final Logger LOGGER = Logger
-      .getLogger(HBaseConfigurationUtil.class);
-
-  /** Configuration which holds all HBase properties. */
-  private static Configuration config;
-
-  /**
-   * A cluster connection which knows how to find master node and locate regions
-   * on the cluster.
-   */
-  private static HConnection clusterConnection = null;
-
-  /**
-   * Creates HConnection instance when the first request is received and returns
-   * the same instance for all subsequent requests if the connection is still
-   * open.
-   * 
-   * @return HConnection instance
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  public static HConnection getConnection() throws IOException {
-    if (!connectionAvailable()) {
-      synchronized (HBaseConfigurationUtil.class) {
-        createClusterConncetion();
-      }
-    }
-    return clusterConnection;
-  }
-
-  /**
-   * Creates the cluster conncetion.
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  private static void createClusterConncetion() throws IOException {
-    try {
-      if (connectionAvailable()) {
-        return;
-      }
-      clusterConnection = HConnectionManager.createConnection(read());
-      addShutdownHook();
-      System.out.println("Created HConnection and added shutDownHook");
-    } catch (IOException e) {
-      LOGGER
-          .error(
-              "Exception occurred while creating HConnection using HConnectionManager",
-              e);
-      throw e;
-    }
-  }
-
-  /**
-   * Connection available.
-   * 
-   * @return true, if successful
-   */
-  private static boolean connectionAvailable() {
-    if (clusterConnection == null) {
-      System.out.println("clusterConnection=" + clusterConnection);
-      return false;
-    }
-    System.out.println("clusterConnection.isClosed()="
-        + clusterConnection.isClosed());
-    return clusterConnection != null && !clusterConnection.isClosed();
-  }
-
-  /**
-   * Adds the shutdown hook.
-   */
-  private static void addShutdownHook() {
-    Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
-      public void run() {
-        System.out
-            .println("Executing ShutdownHook HBaseConfigurationUtil : Closing HConnection");
-        try {
-          clusterConnection.close();
-        } catch (IOException e) {
-          Log.debug("Caught ignorable exception ", e);
-        }
-      }
-    }, "HBaseConfigurationUtilShutDown"));
-  }
-
-  /**
-   * Closes the underlying connection to cluster; ignores if any exception is
-   * thrown.
-   */
-  public static void closeConnection() {
-    if (clusterConnection != null) {
-      try {
-        clusterConnection.close();
-      } catch (IOException e) {
-        Log.debug("Caught ignorable exception ", e);
-      }
-    }
-  }
-
-  /**
-   * This method creates Configuration with HBase resources using configuration
-   * properties. The same Configuration object will be used to communicate with
-   * all HBase tables;
-   * 
-   * @return Configuration object
-   */
-  public static Configuration read() {
-    if (config == null) {
-      synchronized (HBaseConfigurationUtil.class) {
-        if (config == null) {
-          config = HBaseConfiguration.create();
-
-          config.set(
-              HBaseConfigConstants.HBASE_ZOOKEEPER_QUORUM,
-              ConfigurationUtil.getConfiguration().getString(
-                  "hbase.zookeeper.quorum"));
-          config.set(
-              HBaseConfigConstants.HBASE_ZOOKEEPER_CLIENT_PORT,
-              ConfigurationUtil.getConfiguration().getString(
-                  "hbase.zookeeper.clientPort"));
-          config.set(
-              HBaseConfigConstants.HBASE_CLIENT_RETRIES_NUMBER,
-              ConfigurationUtil.getConfiguration().getString(
-                  "hbase.client.retries.number"));
-          config.set(
-              HBaseConfigConstants.HBASE_ZOOKEEPER_SESSION_TIMEOUT,
-              ConfigurationUtil.getConfiguration().getString(
-                  "zookeeper.session.timeout"));
-          config.set(
-              HBaseConfigConstants.HBASE_ZOOKEEPER_RECOVERY_RETRY,
-              ConfigurationUtil.getConfiguration().getString(
-                  "zookeeper.recovery.retry"));
-        }
-      }
-    }
-    return config;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/IPcapGetter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/IPcapGetter.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/IPcapGetter.java
deleted file mode 100644
index 7dd9c1e..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/IPcapGetter.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/**
- * 
- */
-package com.cisco.opensoc.hbase.client;
-
-import java.io.IOException;
-import java.util.List;
-
-/**
- * interface to all 'keys' based pcaps fetching methods.
- * 
- * @author Sayi
- */
-public interface IPcapGetter {
-
-  /**
-   * Gets the pcaps for the input list of keys and lastRowKey.
-   * 
-   * @param keys
-   *          the list of keys for which pcaps are to be retrieved
-   * @param lastRowKey
-   *          last row key from the previous partial response
-   * @param startTime
-   *          the start time in system milliseconds to be used to filter the
-   *          pcaps. The value is set to '0' if the caller sends negative value
-   * @param endTime
-   *          the end time in system milliseconds to be used to filter the
-   *          pcaps. The value is set to Long.MAX_VALUE if the caller sends
-   *          negative value. 'endTime' must be greater than the 'startTime'.
-   * @param includeReverseTraffic
-   *          indicates whether or not to include pcaps from the reverse traffic
-   * @param includeDuplicateLastRow
-   *          indicates whether or not to include the last row from the previous
-   *          partial response
-   * @param maxResultSize
-   *          the max result size
-   * @return PcapsResponse with all matching pcaps merged together
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  public PcapsResponse getPcaps(List<String> keys, String lastRowKey,
-      long startTime, long endTime, boolean includeReverseTraffic,
-      boolean includeDuplicateLastRow, long maxResultSize) throws IOException;
-
-  /**
-   * Gets the pcaps for the input key.
-   * 
-   * @param key
-   *          the key for which pcaps is to be retrieved.
-   * @param startTime
-   *          the start time in system milliseconds to be used to filter the
-   *          pcaps. The value is set to '0' if the caller sends negative value
-   * @param endTime
-   *          the end time in system milliseconds to be used to filter the
-   *          pcaps.The value is set to Long.MAX_VALUE if the caller sends
-   *          negative value. 'endTime' must be greater than the 'startTime'.
-   * @param includeReverseTraffic
-   *          indicates whether or not to include pcaps from the reverse traffic
-   * @return PcapsResponse with all matching pcaps merged together
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  public PcapsResponse getPcaps(String key, long startTime, long endTime,
-      boolean includeReverseTraffic) throws IOException;
-
-  /**
-   * Gets the pcaps for the input list of keys.
-   * 
-   * @param keys
-   *          the list of keys for which pcaps are to be retrieved.
-   * @return PcapsResponse with all matching pcaps merged together
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  public PcapsResponse getPcaps(List<String> keys) throws IOException;
-
-  /**
-   * Gets the pcaps for the input key.
-   * 
-   * @param key
-   *          the key for which pcaps is to be retrieved.
-   * @return PcapsResponse with all matching pcaps merged together
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  public PcapsResponse getPcaps(String key) throws IOException;
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/IPcapReceiver.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/IPcapReceiver.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/IPcapReceiver.java
deleted file mode 100644
index a06ba6e..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/IPcapReceiver.java
+++ /dev/null
@@ -1,109 +0,0 @@
-package com.cisco.opensoc.hbase.client;
-
-import java.io.IOException;
-import java.util.List;
-
-import org.springframework.http.ResponseEntity;
-import org.springframework.web.bind.annotation.RequestParam;
-
-/**
- * Single point of entry for all REST calls. Exposes methods to fetch pcaps for
- * the given list of keys or range of keys and optional start time and end time.
- * If the caller doesn't provide start time and end time, all pcaps from
- * beginning of the time to until now are returned.
- * 
- * @author Sayi
- * 
- */
-public interface IPcapReceiver {
-
-  /**
-   * Gets the pcaps for the given list of keys and optional startTime and
-   * endTime.
-   * 
-   * @param keys
-   *          the list of keys for which pcaps are to be retrieved
-   * @param lastRowKey
-   *          last row key from the previous partial response
-   * @param startTime
-   *          the start time in system milliseconds to be used to filter the
-   *          pcaps.
-   * @param endTime
-   *          the end time in system milliseconds to be used to filter the
-   *          pcaps. The default value is set to Long.MAX_VALUE. 'endTime' must
-   *          be greater than the 'startTime'.
-   * @param includeReverseTraffic
-   *          indicates whether or not to include pcaps from the reverse traffic
-   * @param includeDuplicateLastRow
-   *          indicates whether or not to include the last row from the previous
-   *          partial response
-   * @param maxResponseSize
-   *          indicates the maximum response size in MegaBytes. User needs to
-   *          pass positive value and must be less than 60 (MB)
-   * @return byte array with all matching pcaps merged together
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  public ResponseEntity<byte[]> getPcapsByKeys(@RequestParam List<String> keys,
-      @RequestParam String lastRowKey, @RequestParam long startTime,
-      @RequestParam long endTime, @RequestParam boolean includeReverseTraffic,
-      @RequestParam boolean includeDuplicateLastRow,
-      @RequestParam String maxResponseSize) throws IOException;
-
-  /**
-   * get pcaps for a given key range.
-   * 
-   * @param startKey
-   *          the start key of a key range for which pcaps are to be retrieved
-   * @param endKey
-   *          the end key of a key range for which pcaps are to be retrieved
-   * @param maxResponseSize
-   *          indicates the maximum response size in MegaBytes. User needs to
-   *          pass positive value and must be less than 60 (MB)
-   * @param startTime
-   *          the start time in system milliseconds to be used to filter the
-   *          pcaps.
-   * @param endTime
-   *          the end time in system milliseconds to be used to filter the
-   *          pcaps. 'endTime' must be greater than the 'startTime'.
-   * @return byte array with all matching pcaps merged together
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  public ResponseEntity<byte[]> getPcapsByKeyRange(
-      @RequestParam String startKey, @RequestParam String endKey,
-      @RequestParam String maxResponseSize, @RequestParam long startTime,
-      @RequestParam long endTime) throws IOException;
-
-  /**
-   * get pcaps for the given identifiers.
-   * 
-   * @param srcIp
-   *          source ip address
-   * @param destIp
-   *          destination ip address
-   * @param protocol
-   *          network protocol
-   * @param srcPort
-   *          source port
-   * @param destPort
-   *          destination port
-   * @param startTime
-   *          the start time in system milliseconds to be used to filter the
-   *          pcaps.
-   * @param endTime
-   *          the end time in system milliseconds to be used to filter the
-   *          pcaps. 'endTime' must be greater than the 'startTime'.
-   * @param includeReverseTraffic
-   *          indicates whether or not to include pcaps from the reverse traffic
-   * @return byte array with all matching pcaps merged together
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  public ResponseEntity<byte[]> getPcapsByIdentifiers(
-      @RequestParam String srcIp, @RequestParam String destIp,
-      @RequestParam String protocol, @RequestParam String srcPort,
-      @RequestParam String destPort, @RequestParam long startTime,
-      @RequestParam long endTime, @RequestParam boolean includeReverseTraffic)
-      throws IOException;
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/IPcapScanner.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/IPcapScanner.java b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/IPcapScanner.java
deleted file mode 100644
index c8c19ef..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/hbase/src/main/java/com/cisco/opensoc/hbase/client/IPcapScanner.java
+++ /dev/null
@@ -1,49 +0,0 @@
-package com.cisco.opensoc.hbase.client;
-
-import java.io.IOException;
-
-/**
- * The Interface for all pcaps fetching methods based on key range.
- */
-public interface IPcapScanner {
-
-  /**
-   * Gets the pcaps for between startKey (inclusive) and endKey (exclusive).
-   * 
-   * @param startKey
-   *          the start key of a key range for which pcaps is to be retrieved.
-   * @param endKey
-   *          the end key of a key range for which pcaps is to be retrieved.
-   * @param maxResponseSize
-   *          indicates the maximum response size in MegaBytes(MB). User needs
-   *          to pass positive value and must be less than 60 (MB)
-   * @param startTime
-   *          the start time in system milliseconds to be used to filter the
-   *          pcaps. The value is set to '0' if the caller sends negative value
-   * @param endTime
-   *          the end time in system milliseconds to be used to filter the
-   *          pcaps. The value is set Long.MAX_VALUE if the caller sends
-   *          negative value
-   * @return byte array with all matching pcaps merged together
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  public byte[] getPcaps(String startKey, String endKey, long maxResponseSize,
-      long startTime, long endTime) throws IOException;
-
-  /**
-   * Gets the pcaps for between startKey (inclusive) and endKey (exclusive).
-   * 
-   * @param startKey
-   *          the start key (inclusive) of a key range for which pcaps is to be
-   *          retrieved.
-   * @param endKey
-   *          the end key (exclusive) of a key range for which pcaps is to be
-   *          retrieved.
-   * @return byte array with all matching pcaps merged together
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  public byte[] getPcaps(String startKey, String endKey) throws IOException;
-
-}


[05/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/SampleInput/FireeyeExampleOutput
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/SampleInput/FireeyeExampleOutput b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/SampleInput/FireeyeExampleOutput
new file mode 100644
index 0000000..0210010
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/SampleInput/FireeyeExampleOutput
@@ -0,0 +1,90 @@
+<164>fenotify-3483808.2.alert: 1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js 
 HTTP
+<164>fenotify-793972.2.alert: ontrol: no-cache::~~::~~ dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Exploit.Kit.Magnitude 
+<164>fenotify-797180.2.alert: 0.8::~~User-Agent: Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36::~~Accept-Encoding: gzip, deflate, sdch::~~Accept-Language: en-US,en;q\=0.8::~~::~~ dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Redirector 
+<164>fenotify-3483808.3.alert: /1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microad cs2Label=anomaly cs2=misc-anomaly cs1Label=sname cs1=Malware.Binary 
+<164>fenotify-791429.2.alert: t: rapidvideohere.pw::~~Connection: Keep-Alive::~~::~~ dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Exploit.Kit.Magnitude 
+<164>fenotify-851777.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 00:27:43 UTC dvc=10.201.78.190 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=61395 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851777 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851777 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851901.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 08:56:45 UTC dvc=10.201.78.6 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=59131 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851901 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851901 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851980.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 12:23:51 UTC dvc=10.201.78.20 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=53295 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851980 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851980 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851795.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 02:19:05 UTC dvc=10.201.78.37 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=54975 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851795 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851795 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851805.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 03:23:14 UTC dvc=10.201.78.113 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50807 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851805 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851805 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851844.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 06:19:41 UTC dvc=10.201.78.59 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50767 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851844 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851844 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851782.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 01:18:22 UTC dvc=10.201.78.59 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50940 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851782 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851782 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851940.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 10:57:19 UTC dvc=10.201.78.85 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50646 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851940 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851940 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851881.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 08:13:15 UTC dvc=10.201.78.84 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=61237 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851881 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851881 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851839.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 05:33:19 UTC dvc=10.201.78.10 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=49186 dvc=10.100.25.5 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=143 externalId=851839 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851839 dmac=00:1b:17:00:09:01 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851983.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 12:28:26 UTC dvc=10.201.78.57 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=54527 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851983 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851983 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851987.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 12:33:41 UTC dvc=10.201.78.113 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=51218 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851987 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851987 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852010.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 13:15:08 UTC dvc=10.201.78.12 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=55203 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852010 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852010 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852053.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 14:16:45 UTC dvc=10.201.78.84 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=62235 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852053 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852053 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852455.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 17:28:38 UTC dvc=10.201.78.34 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=65175 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852455 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852455 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851887.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 08:24:54 UTC dvc=10.201.78.44 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=56334 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851887 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851887 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851822.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 04:41:49 UTC dvc=10.201.78.54 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=49732 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851822 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851822 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851832.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 05:19:15 UTC dvc=10.201.78.160 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=62962 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851832 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851832 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851780.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 00:56:46 UTC dvc=10.201.78.12 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=54301 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851780 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851780 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851792.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 02:15:06 UTC dvc=10.201.78.194 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=64831 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851792 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851792 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851806.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 03:24:05 UTC dvc=10.201.78.57 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=53417 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851806 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851806 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851840.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 06:00:58 UTC dvc=10.201.78.40 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50709 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851840 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851840 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851929.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 10:37:14 UTC dvc=10.201.78.87 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=62909 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851929 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851929 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851918.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 10:17:41 UTC dvc=10.201.78.34 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=63483 dvc=10.100.25.5 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=143 externalId=851918 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851918 dmac=00:1b:17:00:09:01 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851842.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 06:03:05 UTC dvc=10.201.78.68 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=59908 dvc=10.100.25.16 smac=00:00:0c:07:ac:5a cn1Label=vlan cn1=0 externalId=851842 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851842 dmac=00:09:0f:33:4f:48 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851948.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 11:13:18 UTC dvc=10.201.78.86 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=51327 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851948 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851948 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852008.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 13:13:25 UTC dvc=10.201.78.20 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=63619 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852008 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852008 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852072.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 14:30:09 UTC dvc=10.201.78.37 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=53467 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=852072 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852072 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852077.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 14:31:58 UTC dvc=10.201.78.11 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=58546 dvc=10.100.25.5 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=143 externalId=852077 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852077 dmac=00:1b:17:00:09:01 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852110.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 14:56:32 UTC dvc=10.201.78.160 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=61983 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=852110 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852110 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852378.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 17:03:31 UTC dvc=10.201.78.85 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=49942 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852378 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852378 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851787.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 01:57:21 UTC dvc=10.201.78.44 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=55199 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851787 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851787 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851800.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 02:54:32 UTC dvc=10.201.78.34 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50605 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851800 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851800 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851941.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 10:58:30 UTC dvc=10.201.78.54 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=51721 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851941 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851941 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851850.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 06:29:59 UTC dvc=10.201.78.113 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50606 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851850 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851850 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851885.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 08:22:40 UTC dvc=10.201.78.37 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=53481 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851885 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851885 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851801.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 02:55:09 UTC dvc=10.201.78.6 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=59875 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851801 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851801 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851884.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 08:20:10 UTC dvc=10.201.78.194 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50039 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851884 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851884 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851815.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 04:06:05 UTC dvc=10.201.78.11 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=53889 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851815 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851815 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851825.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 04:49:07 UTC dvc=10.201.78.85 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=51906 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851825 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851825 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851966.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 11:50:43 UTC dvc=10.201.78.10 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50758 dvc=10.100.25.5 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=143 externalId=851966 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851966 dmac=00:1b:17:00:09:01 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852112.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 14:58:20 UTC dvc=10.201.78.6 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=60631 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852112 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852112 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852126.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 15:03:43 UTC dvc=10.201.78.60 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=65017 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=852126 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852126 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852407.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 17:15:10 UTC dvc=10.201.78.54 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=49620 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852407 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852407 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852417.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 17:17:11 UTC dvc=10.201.78.86 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=51333 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852417 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852417 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852431.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 17:20:08 UTC dvc=10.201.78.11 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=53525 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852431 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852431 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852438.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 17:21:21 UTC dvc=10.201.78.84 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=62464 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852438 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852438 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-3483822.1.alert: CEF:0|FireEye|CMS|7.2.1.244420|MO|malware-object|4|rt=Feb 09 2015 07:24:06 UTC dvc=10.201.78.216 cn3Label=cncPort cn3=80 dst=191.235.179.140 fileHash=6126d97e5bd4e6d93e3e3579cc5b3ce0 filePath=/analysis/191.235.179.140_80-10.220.55.216_56118--833719413_9204551_T.pcoff cs5Label=cncHost cs5=api.shamenchik.info cs3Label=osinfo cs3=Microsoft WindowsXP 32-bit 5.1 sp3 14.0528 proto=tcp dvchost=DEVFEYE1 dvc=10.100.25.16 cn1Label=vlan cn1=0 externalId=3483822 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ma_id\=3483822 cs6Label=channel cs6=POST /api/sdm HTTP/1.1::~~Content-Type: application/x-json::~~Accept: */*::~~User-Agent: Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; .NET CLR 2.0.50727; .NET CLR 3.0.04506.648; .NET CLR 3.5.21022; .NET4.0C; .NET4.0E)::~~Host: api.shamenchik.info::~~Content-Length: 800::~~Connection: Keep-Alive::~~Cache-Control: no-cache::~~::~~g+3CouWsTcAym6cirpXcrPeCqh2q2xYh//aNKX15/lgvTM
+<164>fenotify-851890.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 08:36:36 UTC dvc=10.201.78.160 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=63018 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851890 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851890 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851861.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 07:11:45 UTC dvc=10.201.78.20 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=62660 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851861 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851861 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851781.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 01:10:09 UTC dvc=10.201.78.20 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=63319 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851781 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851781 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851837.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 05:30:01 UTC dvc=10.201.78.60 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=49533 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851837 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851837 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851846.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 06:26:50 UTC dvc=10.201.78.57 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=53933 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851846 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851846 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851920.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 10:26:37 UTC dvc=10.201.78.51 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=60410 dvc=10.100.25.16 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=0 externalId=851920 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851920 dmac=5c:5e:ab:eb:ab:0d cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851818.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 04:25:02 UTC dvc=10.201.78.51 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=60319 dvc=10.100.25.16 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=0 externalId=851818 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851818 dmac=5c:5e:ab:eb:ab:0d cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851866.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 07:13:28 UTC dvc=10.201.78.12 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=54836 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851866 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851866 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851773.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 00:01:29 UTC dvc=10.201.78.68 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=60239 dvc=10.100.25.16 smac=00:00:0c:07:ac:5a cn1Label=vlan cn1=0 externalId=851773 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851773 dmac=00:09:0f:33:4f:48 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851935.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 10:48:18 UTC dvc=10.201.78.11 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=54362 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851935 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851935 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851970.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 12:04:50 UTC dvc=10.201.78.40 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50327 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851970 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851970 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851975.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 12:21:18 UTC dvc=10.201.78.59 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=51420 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851975 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851975 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852454.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 17:28:34 UTC dvc=10.201.78.44 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=55348 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852454 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852454 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-3483798.2.alert: act;Trojan.Kuloz;Trojan.Kuluoz 
+<164>fenotify-834781.2.alert: Connection: Keep-Alive::~~::~~ dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Malicious.URL 
+<164>fenotify-3483794.3.alert: 0d3cc7cc055f8d686a1b5d5c30db85c5423620e6bd231d592266782cf5e1647ae575e77b HTTP/1.1::~~Accept: */*::~~Proxy-Authorization: Basic ::~~User-Agent: Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36::~~Host: 5aqobwcp1xuqztwht.0eq0w6k.com::~~Connection: Keep-Alive::~~::~~ cs2Label=anomaly cs2=misc-anomaly cs1Label=sname cs1=FE_Evasion_Sandboxie;FE_Evasion_VMDetect 
+<164>fenotify-3483796.2.alert: jan.Kuloz;Trojan.Kuluoz 
+<164>fenotify-851894.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 08:45:48 UTC dvc=10.201.78.60 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=49433 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851894 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851894 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851899.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 08:54:50 UTC dvc=10.201.78.34 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50711 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851899 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851899 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851851.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 06:31:05 UTC dvc=10.201.78.190 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=61134 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851851 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851851 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851845.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 06:20:46 UTC dvc=10.201.78.20 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=55294 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851845 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851845 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851789.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 02:03:48 UTC dvc=10.201.78.84 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=62782 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851789 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851789 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851820.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 04:33:45 UTC dvc=10.201.78.87 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=63559 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851820 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851820 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851828.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 05:09:07 UTC dvc=10.201.78.86 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=52967 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851828 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851828 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851816.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 04:16:05 UTC dvc=10.201.78.34 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=61806 dvc=10.100.25.5 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=143 externalId=851816 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851816 dmac=00:1b:17:00:09:01 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851831.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 05:14:31 UTC dvc=10.201.78.11 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=58655 dvc=10.100.25.5 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=143 externalId=851831 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851831 dmac=00:1b:17:00:09:01 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851950.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 11:16:07 UTC dvc=10.201.78.11 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=58855 dvc=10.100.25.5 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=143 externalId=851950 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851950 dmac=00:1b:17:00:09:01 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851988.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 12:35:26 UTC dvc=10.201.78.190 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=61427 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851988 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851988 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852013.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 13:18:29 UTC dvc=10.201.78.34 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=61630 dvc=10.100.25.5 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=143 externalId=852013 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852013 dmac=00:1b:17:00:09:01 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852070.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 14:27:45 UTC dvc=10.201.78.44 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=54769 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852070 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852070 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852082.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 14:35:15 UTC dvc=10.201.78.68 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=60922 dvc=10.100.25.16 smac=00:00:0c:07:ac:5a cn1Label=vlan cn1=0 externalId=852082 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852082 dmac=00:09:0f:33:4f:48 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852114.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 14:59:08 UTC dvc=10.201.78.194 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50396 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852114 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852114 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852295.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 16:30:40 UTC dvc=10.201.78.51 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=60266 dvc=10.100.25.16 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=0 externalId=852295 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852295 dmac=5c:5e:ab:eb:ab:0d cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-3483807.2.alert: z0Q6RNzwu2BoLSVUhiBihE4z0mlPDacuE1Waqs86Z9VVYg6iM2MlFH8GZgagnlOuzfB2JHdKPc/GwnzFk5DPfUPJAe8DH9Y6hwohv0t6XFVWx5UDSGARW8w3GAop9R+9iaSCuomuLU26/gaqL4gfjZqjLHzoDx+vhOCiOP6RnGMio5v2kcKxitPL7pPVu5FJ6MwUG7QOLecwONRzQsFh/jXFT4gyR2iS/EbufAeRofQVZHsj9dhgHZKNLcsFPnw/8lWlvgku7s28l57lAGxtp99jkzYD58jPgBm1nGbJPubFTL47ZmBkPPNsc1XjRcNvPz5/nzb0eWctXZ2cKocAQnT2zHOgBxRM6my9QW/Lg0JWaQyqBO2EOyTfej6KgVlHoIf0E3bv6C5PgVrJunAIqqlO6EvKvILlDYk2yoklvP3Fry5p4Nrw2isE95Used9Zqsoxx0bWInNcfyQhoqqlmYKiZZb+aBwGvJEL634pmoTMEBMdn4s3gz2a7aLV+vOVULQbgR15PygsYQdOnymv7uWZtdKOp7ut21GwNu9ZxJGMrssW0gzvaZiZDs7FSordVPUiUqcfS6ciU1cl29fNTWnmRkq4vk+vBgvUQLxTTAleV9k5svtB237GvvolWE72ugJQXUun51WxAqOAZpV0c6tEbK5qd6Z55z8Rs/LpN8VM4/nbZmfB5XY+eCCLfULjisVoStUUeH67&report\=p509XA27GEFLLes0RJ8pJJdIkbJ+3YkVUv2qjhuxlRPlVrrEZckeXFIaD+4/a1xulR8kKMx9GrPD2uc/wC+NxgKg/ok/kttHH45shX4YjPLsS4QtXUHugcE5Rr1238CYegHwOKWzAp3g5Mpt7loabRTBtmzXXeLBV4cFKv3zWpxQ7+CBGpsDfsvkD2Qgst3FX05VQHBpnJfXgRqdRrLyUjezF1tlIgvvNCv6hQ+zffxKk0WcD
 oUe8
+<164>fenotify-3483794.2.alert: 53 Safari/537.36::~~Host: 5aqobwcp1xuqztwht.0eq0w6k.com::~~Connection: Keep-Alive::~~::~~GET /93ea73bcdaf32d5074e62be84ee83a84cacefa8dcf855c265457842d6b05f469863ca7110d3cc7cc055f8d686a1b5d5c30db85c5423620e6bd231d592266782cf5e1647ae575e77b HTTP/1.1::~~Accept: */*::~~Proxy-Authorization: Basic ::~~User-Agent: Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36::~~Host: 5aqobwcp1xuqztwht.0eq0w6k.com::~~Connection: Keep-Alive::~~::~~GET /93ea73bcdaf32d5074e62be84ee83a84cacefa8dcf855c265457842d6b05f469863ca7110d3cc7cc055f8d686a1b5d5c30db85c5423620e6bd231d592266782cf5e1647ae575e77b HTTP/1.1::~~Accept: */*::~~Proxy-Authorization: Basic ::~~User-Agent: Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36::~~Host: 5aqobwcp1xuqztwht.0eq0w6k.com::~~Connection: Keep-Alive::~~::~~GET /93ea73bcdaf32d5074e62be84ee83a84cacefa8dcf855c265457842d6b05f469863c
 a711
+<164>fenotify-3483799.2.alert: L, like Gecko) Chrome/35.0.1916.153 Safari/537.36::~~Host: pkeyqcot5gzamu.5t9dyvo2.com::~~Connection: Keep-Alive::~~::~~ cs2Label=anomaly cs2=misc-anomaly cs1Label=sname cs1=FE_PUP_Softpulse;FE_Evasion_VMDetect;FE_Evasion_DBGDetect_Files;FE_Evasion_Sandboxie 
+<164>fenotify-3483807.3.alert: n6o4JWRQX2V1jsLkx8LFQz3nXe7Bbiuuc1sMcdS/lEv7f9zpw09qs0LvVpRJe4tZjE4Gsghh7Xh5OAxE2A7HBLnWjloIazv6jvun+R1BpF1vuujyEdDgKWIv4BeMmQQJ6p66O/U0jHvWelTBMT+RTVFERsryrpWE+g7AHeRyzDIERgWxHxzA9y6cQ9JYp2/JOPdUzWnLWM24Be6fWmlJ37J90GuEvHh+WXWsaewcBg8xUAhlQBfEHP01PGcuX2yJin2rQ8/GhkiF210HCJUCIbxxz6rZuf6CaksKSXPIeXf1Iifha58Rtm cs2Label=anomaly cs2=misc-anomaly cs1Label=sname cs1=Malware.Binary 
\ No newline at end of file



[06/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/fireeye/topology.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/fireeye/topology.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/fireeye/topology.conf
new file mode 100644
index 0000000..d50a079
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/fireeye/topology.conf
@@ -0,0 +1,110 @@
+include = ../../etc/env/environment_common.conf
+include = ../../etc/env/es_connection.conf
+include = ../../etc/env/hdfs_connection.conf
+include = ../../etc/env/mysql_connection.conf
+include = metrics.conf
+include = features_enabled.conf
+
+#Global Properties
+
+debug.mode=true
+local.mode=true
+num.workers=1
+
+#Standard 5-tuple fields
+
+source.ip=ip_src_addr
+source.port=ip_src_port
+dest.ip=ip_dst_addr
+dest.port=ip_dst_port
+protocol=protocol
+
+#Test Spout
+spout.test.parallelism.repeat=false
+
+#Kafka Spout
+spout.kafka.topic=fireeye_raw
+
+#Parser Bolt
+bolt.parser.adapter=com.opensoc.parsing.parsers.BasicFireEyeParser
+
+#Host Enrichment
+
+bolt.enrichment.host.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.host.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.host.enrichment_tag=host
+
+
+#GeoEnrichment
+
+bolt.enrichment.geo.enrichment_tag=geo
+bolt.enrichment.geo.adapter.table=GEO
+bolt.enrichment.geo.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.geo.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.geo.fields=ip_src_addr,ip_dst_addr
+
+#WhoisEnrichment
+
+bolt.enrichment.whois.hbase.table.name=whois
+bolt.enrichment.whois.enrichment_tag=whois
+bolt.enrichment.whois.fields=host
+bolt.enrichment.whois.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.whois.MAX_TIME_RETAIN_MINUTES=10
+
+#CIF Enrichment
+bolt.enrichment.cif.tablename=cif_table
+bolt.enrichment.cif.fields.host=host
+bolt.enrichment.cif.fields.email=email
+bolt.enrichment.cif.fields.ip=ip_src_addr,ip_dst_addr
+bolt.enrichment.cif.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.cif.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.cif.enrichment_tag=cif
+
+#Threat Enrichment
+bolt.enrichment.threat.tablename=threat_table
+bolt.enrichment.threat.fields=host,ip_src_addr,ip_dst_addr
+bolt.enrichment.threat.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.threat.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.threat.enrichment_tag=threat
+
+#Indexing Bolt
+bolt.indexing.indexname=fireeye_index
+bolt.indexing.timestamp=yyyy.MM.ww
+bolt.indexing.documentname=fireeye_doc
+bolt.indexing.bulk=1
+bolt.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
+
+#Alerts Indexing Bolt
+bolt.alerts.indexing.indexname=alert
+bolt.alerts.indexing.timestamp=yyyy.MM.ww
+bolt.alerts.indexing.documentname=fireeye_alert
+bolt.alerts.indexing.bulk=1
+bolt.alerts.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
+
+#Error Indexing Bolt
+bolt.error.indexing.indexname=error
+bolt.error.indexing.timestamp=yyyy.MM
+bolt.error.indexing.documentname=fireeye_error
+bolt.error.indexing.bulk=1
+bolt.error.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
+
+#Alerts Bolt
+bolt.alerts.adapter=com.opensoc.alerts.adapters.CIFAlertsAdapter
+com.opensoc.alerts.adapters.CIFAlertsAdapter.whitelist_table_name = ip_whitelist
+com.opensoc.alerts.adapters.CIFAlertsAdapter.blacklist_table_name = ip_blacklist
+com.opensoc.alerts.adapters.CIFAlertsAdapter.quorum=zkpr1,zkpr2,zkpr3
+com.opensoc.alerts.adapters.CIFAlertsAdapter.port=2181
+com.opensoc.alerts.adapters.CIFAlertsAdapter._MAX_CACHE_SIZE_OBJECTS_NUM=3600
+com.opensoc.alerts.adapters.CIFAlertsAdapter._MAX_TIME_RETAIN_MINUTES=1000
+
+#HDFS Bolt
+bolt.hdfs.batch.size=5000
+bolt.hdfs.field.delimiter=|
+bolt.hdfs.file.rotation.size.in.mb=5
+bolt.hdfs.file.system.url=hdfs://nn1:8020
+bolt.hdfs.wip.file.path=/fireeye/wip
+bolt.hdfs.finished.file.path=/fireeye/rotated
+bolt.hdfs.compression.codec.class=org.apache.hadoop.io.compress.SnappyCodec
+
+#Kafka Bolt
+bolt.kafka.topic=fireeye_enriched
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/fireeye/topology_identifier.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/fireeye/topology_identifier.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/fireeye/topology_identifier.conf
new file mode 100644
index 0000000..3f1e560
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/fireeye/topology_identifier.conf
@@ -0,0 +1,4 @@
+#Each topology must have a unique identifier.  This setting is required
+
+topology.id=fireeye
+instance.id=FE001
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/ise/features_enabled.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/ise/features_enabled.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/ise/features_enabled.conf
index 486eea5..730935d 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/ise/features_enabled.conf
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/ise/features_enabled.conf
@@ -19,7 +19,7 @@ spout.kafka.parallelism.hint=1
 #Feature: Parser Bolt
 ##Feature Description: Parses telemetry from its native format into a native JSON
 
-parser.bolt.name=ParserBolt
+bolt.parser.name=ParserBolt
 bolt.parser.enabled=true
 bolt.parser.num.tasks=1
 bolt.parser.parallelism.hint=1
@@ -56,6 +56,14 @@ bolt.enrichment.cif.enabled=false
 bolt.enrichment.cif.num.tasks=1
 bolt.enrichment.cif.parallelism.hint=1
 
+#Feature: Threat Enrichment
+##Feature Description: Appends information from Threat intelligence feeds to a telemetry message
+
+bolt.enrichment.threat.name=ThreatBolt
+bolt.enrichment.threat.enabled=false
+bolt.enrichment.threat.num.tasks=1
+bolt.enrichment.threat.parallelism.hint=1
+
 #Feature: Rules-Based Alerts
 ##Feature Description: Tags messages with rules-based alerts
 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/ise/topology.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/ise/topology.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/ise/topology.conf
index 7fbc9ff..f986bea 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/ise/topology.conf
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/ise/topology.conf
@@ -25,12 +25,13 @@ spout.test.parallelism.repeat=false
 #Kafka Spout
 spout.kafka.topic=ise_raw
 
-
+#Parser Bolt
+bolt.parser.adapter=com.opensoc.parsing.parsers.BasicIseParser
 
 #Host Enrichment
 
-bolt.enrichment.host.MAX_CACHE_SIZE=10000
-bolt.enrichment.host.MAX_TIME_RETAIN=10
+bolt.enrichment.host.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.host.MAX_TIME_RETAIN_MINUTES=10
 bolt.enrichment.host.enrichment_tag=host
 
 
@@ -38,40 +39,54 @@ bolt.enrichment.host.enrichment_tag=host
 
 bolt.enrichment.geo.enrichment_tag=geo
 bolt.enrichment.geo.adapter.table=GEO
-bolt.enrichment.geo.MAX_CACHE_SIZE=10000
-bolt.enrichment.geo.MAX_TIME_RETAIN=10
+bolt.enrichment.geo.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.geo.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.geo.fields=ip_src_addr,ip_dst_addr
 
 #WhoisEnrichment
 
 bolt.enrichment.whois.hbase.table.name=whois
 bolt.enrichment.whois.enrichment_tag=whois
-bolt.enrichment.whois.source=tld
-bolt.enrichment.whois.MAX_CACHE_SIZE=10000
-bolt.enrichment.whois.MAX_TIME_RETAIN=10
+bolt.enrichment.whois.fields=host
+bolt.enrichment.whois.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.whois.MAX_TIME_RETAIN_MINUTES=10
 
 #CIF Enrichment
 bolt.enrichment.cif.tablename=cif_table
-bolt.enrichment.cif.host=tld
-bolt.enrichment.cif.email=email
-bolt.enrichment.cif.MAX_CACHE_SIZE=10000
-bolt.enrichment.cif.MAX_TIME_RETAIN=10
+bolt.enrichment.cif.fields.host=host
+bolt.enrichment.cif.fields.email=email
+bolt.enrichment.cif.fields.ip=ip_src_addr,ip_dst_addr
+bolt.enrichment.cif.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.cif.MAX_TIME_RETAIN_MINUTES=10
 bolt.enrichment.cif.enrichment_tag=cif
 
+#Threat Enrichment
+bolt.enrichment.threat.tablename=threat_table
+bolt.enrichment.threat.fields=host,ip_src_addr,ip_dst_addr
+bolt.enrichment.threat.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.threat.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.threat.enrichment_tag=threat
 
 #Indexing Bolt
 bolt.indexing.indexname=ise_index
+bolt.indexing.timestamp=yyyy.MM.ww
 bolt.indexing.documentname=ise_doc
 bolt.indexing.bulk=200
+bolt.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
 
 #Alerts Indexing Bolt
 bolt.alerts.indexing.indexname=alert
+bolt.alerts.indexing.timestamp=yyyy.MM.ww
 bolt.alerts.indexing.documentname=ise_alert
 bolt.alerts.indexing.bulk=1
+bolt.alerts.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
 
 #Error Indexing Bolt
 bolt.error.indexing.indexname=error
+bolt.error.indexing.timesatmp=yyyy.MM
 bolt.error.indexing.documentname=ise_error
 bolt.error.indexing.bulk=1
+bolt.error.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
 
 #HDFS Bolt
 bolt.hdfs.batch.size=5000

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/lancope/features_enabled.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/lancope/features_enabled.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/lancope/features_enabled.conf
index 765dde3..a4dc14d 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/lancope/features_enabled.conf
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/lancope/features_enabled.conf
@@ -19,7 +19,7 @@ spout.kafka.parallelism.hint=1
 #Feature: Parser Bolt
 ##Feature Description: Parses telemetry from its native format into a native JSON
 
-parser.bolt.name=ParserBolt
+bolt.parser.name=ParserBolt
 bolt.parser.enabled=true
 bolt.parser.num.tasks=1
 bolt.parser.parallelism.hint=1
@@ -56,6 +56,14 @@ bolt.enrichment.cif.enabled=false
 bolt.enrichment.cif.num.tasks=1
 bolt.enrichment.cif.parallelism.hint=1
 
+#Feature: Threat Enrichment
+##Feature Description: Appends information from Threat intelligence feeds to a telemetry message
+
+bolt.enrichment.threat.name=ThreatBolt
+bolt.enrichment.threat.enabled=false
+bolt.enrichment.threat.num.tasks=1
+bolt.enrichment.threat.parallelism.hint=1
+
 #Feature: Rules-Based Alerts
 ##Feature Description: Tags messages with rules-based alerts
 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/lancope/topology.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/lancope/topology.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/lancope/topology.conf
index 8c50580..7da2a49 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/lancope/topology.conf
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/lancope/topology.conf
@@ -25,12 +25,13 @@ spout.test.parallelism.repeat=false
 #Kafka Spout
 spout.kafka.topic=lancope_raw
 
-
+#Parser Bolt
+bolt.parser.adapter=com.opensoc.parsing.parsers.BasicLancopeParser
 
 #Host Enrichment
 
-bolt.enrichment.host.MAX_CACHE_SIZE=10000
-bolt.enrichment.host.MAX_TIME_RETAIN=10
+bolt.enrichment.host.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.host.MAX_TIME_RETAIN_MINUTES=10
 bolt.enrichment.host.enrichment_tag=host
 
 
@@ -38,40 +39,54 @@ bolt.enrichment.host.enrichment_tag=host
 
 bolt.enrichment.geo.enrichment_tag=geo
 bolt.enrichment.geo.adapter.table=GEO
-bolt.enrichment.geo.MAX_CACHE_SIZE=10000
-bolt.enrichment.geo.MAX_TIME_RETAIN=10
+bolt.enrichment.geo.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.geo.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.geo.fields=ip_src_addr,ip_dst_addr
 
 #WhoisEnrichment
 
 bolt.enrichment.whois.hbase.table.name=whois
 bolt.enrichment.whois.enrichment_tag=whois
-bolt.enrichment.whois.source=tld
-bolt.enrichment.whois.MAX_CACHE_SIZE=10000
-bolt.enrichment.whois.MAX_TIME_RETAIN=10
+bolt.enrichment.whois.fields=host
+bolt.enrichment.whois.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.whois.MAX_TIME_RETAIN_MINUTES=10
 
 #CIF Enrichment
 bolt.enrichment.cif.tablename=cif_table
-bolt.enrichment.cif.host=tld
-bolt.enrichment.cif.email=email
-bolt.enrichment.cif.MAX_CACHE_SIZE=10000
-bolt.enrichment.cif.MAX_TIME_RETAIN=10
+bolt.enrichment.cif.fields.host=host
+bolt.enrichment.cif.fields.email=email
+bolt.enrichment.cif.fields.ip=ip_src_addr,ip_dst_addr
+bolt.enrichment.cif.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.cif.MAX_TIME_RETAIN_MINUTES=10
 bolt.enrichment.cif.enrichment_tag=cif
 
+#Threat Enrichment
+bolt.enrichment.threat.tablename=threat_table
+bolt.enrichment.threat.fields=host,ip_src_addr,ip_dst_addr
+bolt.enrichment.threat.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.threat.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.threat.enrichment_tag=threat
 
 #Indexing Bolt
 bolt.indexing.indexname=lancope_index
+bolt.indexing.timestamp=yyyy.MM.ww
 bolt.indexing.documentname=lancope_doc
 bolt.indexing.bulk=200
+bolt.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
 
 #Alerts Indexing Bolt
 bolt.alerts.indexing.indexname=alert
+bolt.alerts.indexing.timestamp=yyyy.MM.ww
 bolt.alerts.indexing.documentname=lancope_alert
 bolt.alerts.indexing.bulk=1
+bolt.alerts.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
 
 #Error Indexing Bolt
 bolt.error.indexing.indexname=error
+bolt.error.indexing.timestamp=yyyy.MM
 bolt.error.indexing.documentname=lancope_error
 bolt.error.indexing.bulk=1
+bolt.error.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
 
 #HDFS Bolt
 bolt.hdfs.batch.size=5000

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/paloalto/features_enabled.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/paloalto/features_enabled.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/paloalto/features_enabled.conf
new file mode 100644
index 0000000..29ea06d
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/paloalto/features_enabled.conf
@@ -0,0 +1,113 @@
+#Enable and disable features for each topology
+
+#Feature: Test spout 
+##Feature Description: Reads telemetry from file and ingests it into topology.  Used for testing or bulk loading the topology
+
+spout.test.name=TestSpout
+spout.test.enabled=true
+spout.test.num.tasks=1
+spout.test.parallelism.hint=1
+
+#Feature: Kafka spout
+##Feature Description: Acts as a Kafka consumer.  Takes messages from a Kafka topic and ingests them into a topology
+
+spout.kafka.name=KafkaSpout
+spout.kafka.enabled=false
+spout.kafka.num.tasks=1
+spout.kafka.parallelism.hint=1
+
+#Feature: Parser Bolt
+##Feature Description: Parses telemetry from its native format into a native JSON
+
+bolt.parser.name=ParserBolt
+bolt.parser.enabled=true
+bolt.parser.num.tasks=1
+bolt.parser.parallelism.hint=1
+
+#Feature: Host Enrichment
+##Feature Description: Appends information about known hosts to a telemetry message
+
+bolt.enrichment.host.name=HostEnrichment
+bolt.enrichment.host.enabled=true
+bolt.enrichment.host.num.tasks=1
+bolt.enrichment.host.parallelism.hint=1
+
+#Feature: Geo Enrichment
+##Feature Description: Appends geo information about known non-local IPs to a telemetry message
+
+bolt.enrichment.geo.name=GeoEnrichment 
+bolt.enrichment.geo.enabled=true
+bolt.enrichment.geo.num.tasks=1
+bolt.enrichment.geo.parallelism.hint=1
+
+#Feature: Whois Enrichment
+##Feature Description: Appends whois information about known domains to a telemetry message
+
+bolt.enrichment.whois.name=WhoisEnrichment
+bolt.enrichment.whois.enabled=true
+bolt.enrichment.whois.num.tasks=1
+bolt.enrichment.whois.parallelism.hint=1
+
+#Feature: CIF Enrichment
+##Feature Description: Appends information from CIF threat intelligence feeds to a telemetry message
+
+bolt.enrichment.cif.name=CIFBolt
+bolt.enrichment.cif.enabled=true
+bolt.enrichment.cif.num.tasks=1
+bolt.enrichment.cif.parallelism.hint=1
+
+#Feature: Threat Enrichment
+##Feature Description: Appends information from Threat intelligence feeds to a telemetry message
+
+bolt.enrichment.threat.name=ThreatBolt
+bolt.enrichment.threat.enabled=false
+bolt.enrichment.threat.num.tasks=1
+bolt.enrichment.threat.parallelism.hint=1
+
+#Feature: Rules-Based Alerts
+##Feature Description: Tags messages with rules-based alerts
+
+bolt.alerts.name=Alerts
+bolt.alerts.enabled=true
+bolt.alerts.num.tasks=1
+bolt.alerts.parallelism.hint=1
+
+#Feature: Indexer
+##Feature Description: Indexes telemetry messages in ElasticSearch or Solr
+
+bolt.indexing.name=IndexBolt
+bolt.indexing.enabled=true
+bolt.indexing.num.tasks=1
+bolt.indexing.parallelism.hint=1
+
+#Feature: Alerts Indexer
+##Feature Description: Indexes alert messages in ElasticSearch or Solr
+
+bolt.alerts.indexing.name=AlertIndexBolt
+bolt.alerts.indexing.enabled=true
+bolt.alerts.indexing.num.tasks=1
+bolt.alerts.indexing.parallelism.hint=1
+
+#Feature: Error Indexer
+##Feature Description: Indexes error messages in ElasticSearch or Solr
+
+bolt.error.indexing.name=ErrorIndexBolt
+bolt.error.indexing.enabled=true
+bolt.error.indexing.num.tasks=1
+bolt.error.indexing.parallelism.hint=1
+
+#Feature: Kafka Bolt
+##Feature Description: Writes telemetry messages back into a Kafka topic
+
+bolt.kafka.name=KafkaBolt
+bolt.kafka.enabled=false
+bolt.kafka.num.tasks=1
+bolt.kafka.parallelism.hint=1
+
+#Feature: HDFS Bolt
+##Feature Description: Writes telemetry messages into HDFS
+
+bolt.hdfs.name=HDFSBolt
+bolt.hdfs.enabled=false
+bolt.hdfs.num.tasks=1
+bolt.hdfs.parallelism.hint=1
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/paloalto/metrics.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/paloalto/metrics.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/paloalto/metrics.conf
new file mode 100644
index 0000000..1daef3d
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/paloalto/metrics.conf
@@ -0,0 +1,26 @@
+#reporters
+com.opensoc.metrics.reporter.graphite=true
+com.opensoc.metrics.reporter.console=false
+com.opensoc.metrics.reporter.jmx=false
+
+#Graphite Addresses
+
+com.opensoc.metrics.graphite.address=localhost
+com.opensoc.metrics.graphite.port=2023
+
+#TelemetryParserBolt
+com.opensoc.metrics.TelemetryParserBolt.acks=true
+com.opensoc.metrics.TelemetryParserBolt.emits=true
+com.opensoc.metrics.TelemetryParserBolt.fails=true
+
+
+#GenericEnrichmentBolt
+com.opensoc.metrics.GenericEnrichmentBolt.acks=true
+com.opensoc.metrics.GenericEnrichmentBolt.emits=true
+com.opensoc.metrics.GenericEnrichmentBolt.fails=true
+
+
+#TelemetryIndexingBolt
+com.opensoc.metrics.TelemetryIndexingBolt.acks=true
+com.opensoc.metrics.TelemetryIndexingBolt.emits=true
+com.opensoc.metrics.TelemetryIndexingBolt.fails=true

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/paloalto/topology.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/paloalto/topology.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/paloalto/topology.conf
new file mode 100644
index 0000000..a92c7f3
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/paloalto/topology.conf
@@ -0,0 +1,113 @@
+include = ../../etc/env/environment_common.conf
+include = ../../etc/env/es_connection.conf
+include = ../../etc/env/hdfs_connection.conf
+include = ../../etc/env/mysql_connection.conf
+include = metrics.conf
+include = features_enabled.conf
+
+#Global Properties
+
+debug.mode=true
+local.mode=true
+num.workers=1
+
+#Standard 5-tuple fields
+
+source.ip=ip_src_addr
+source.port=ip_src_port
+dest.ip=ip_dst_addr
+dest.port=ip_dst_port
+protocol=protocol
+
+#Test Spout
+spout.test.parallelism.repeat=false
+
+#Kafka Spout
+spout.kafka.topic=paloalto_raw
+
+#Parser Bolt
+bolt.parser.adapter=com.opensoc.parsing.parsers.BasicPaloAltoFirewallParser
+
+#Host Enrichment
+
+bolt.enrichment.host.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.host.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.host.enrichment_tag=host
+
+
+#GeoEnrichment
+
+bolt.enrichment.geo.enrichment_tag=geo
+bolt.enrichment.geo.adapter.table=GEO
+bolt.enrichment.geo.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.geo.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.geo.fields=ip_src_addr,ip_dst_addr
+
+#WhoisEnrichment
+
+bolt.enrichment.whois.hbase.table.name=whois
+bolt.enrichment.whois.enrichment_tag=whois
+bolt.enrichment.whois.fields=host
+bolt.enrichment.whois.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.whois.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.whois.fields=host
+
+#CIF Enrichment
+bolt.enrichment.cif.tablename=cif_table
+bolt.enrichment.cif.fields.host=host
+bolt.enrichment.cif.fields.email=email
+bolt.enrichment.cif.fields.ip=ip_src_addr,ip_dst_addr
+bolt.enrichment.cif.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.cif.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.cif.enrichment_tag=cif
+bolt.enrichment.cif.host=host
+
+
+#Threat Enrichment
+bolt.enrichment.threat.tablename=threat_table
+bolt.enrichment.threat.fields=host,ip_src_addr,ip_dst_addr
+bolt.enrichment.threat.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.threat.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.threat.enrichment_tag=threat
+
+#Indexing Bolt
+bolt.indexing.indexname=paloalto_index
+bolt.indexing.timestamp=yyyy.MM.ww
+bolt.indexing.documentname=paloalto_doc
+bolt.indexing.bulk=1
+bolt.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
+
+#Alerts Indexing Bolt
+bolt.alerts.indexing.indexname=alert
+bolt.alerts.indexing.timestamp=yyyy.MM.ww
+bolt.alerts.indexing.documentname=paloalto_alert
+bolt.alerts.indexing.bulk=1
+bolt.alerts.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
+
+#Error Indexing Bolt
+bolt.error.indexing.indexname=error
+bolt.error.indexing.timestamp.yyyy.MM
+bolt.error.indexing.documentname=paloalto_error
+bolt.error.indexing.bulk=1
+bolt.error.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
+
+#Alerts Bolt
+bolt.alerts.adapter=com.opensoc.alerts.adapters.CIFAlertsAdapter
+com.opensoc.alerts.adapters.CIFAlertsAdapter.whitelist_table_name = ip_whitelist
+com.opensoc.alerts.adapters.CIFAlertsAdapter.blacklist_table_name = ip_blacklist
+com.opensoc.alerts.adapters.CIFAlertsAdapter.quorum=zkpr1,zkpr2,zkpr3
+com.opensoc.alerts.adapters.CIFAlertsAdapter.port=2181
+com.opensoc.alerts.adapters.CIFAlertsAdapter._MAX_CACHE_SIZE_OBJECTS_NUM=3600
+com.opensoc.alerts.adapters.CIFAlertsAdapter._MAX_TIME_RETAIN_MINUTES=1000
+
+#HDFS Bolt
+bolt.hdfs.batch.size=5000
+bolt.hdfs.field.delimiter=|
+bolt.hdfs.file.rotation.size.in.mb=5
+bolt.hdfs.file.system.url=hdfs://nn1:8020
+bolt.hdfs.wip.file.path=/paloalto/wip
+bolt.hdfs.finished.file.path=/paloalto/rotated
+bolt.hdfs.compression.codec.class=org.apache.hadoop.io.compress.SnappyCodec
+
+#Kafka Bolt
+bolt.kafka.topic=paloalto_enriched
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/paloalto/topology_identifier.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/paloalto/topology_identifier.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/paloalto/topology_identifier.conf
new file mode 100644
index 0000000..7601122
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/paloalto/topology_identifier.conf
@@ -0,0 +1,4 @@
+#Each topology must have a unique identifier.  This setting is required
+
+topology.id=paloalto
+instance.id=PA001
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/pcap/features_enabled.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/pcap/features_enabled.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/pcap/features_enabled.conf
index a79d7ee..9b41fa2 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/pcap/features_enabled.conf
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/pcap/features_enabled.conf
@@ -19,7 +19,7 @@ spout.kafka.parallelism.hint=1
 #Feature: Parser Bolt
 ##Feature Description: Parses telemetry from its native format into a native JSON
 
-parser.bolt.name=ParserBolt
+bolt.parser.name=ParserBolt
 bolt.parser.enabled=true
 bolt.parser.num.tasks=1
 bolt.parser.parallelism.hint=1
@@ -56,6 +56,14 @@ bolt.enrichment.cif.enabled=false
 bolt.enrichment.cif.num.tasks=1
 bolt.enrichment.cif.parallelism.hint=1
 
+#Feature: Threat Enrichment
+##Feature Description: Appends information from Threat intelligence feeds to a telemetry message
+
+bolt.enrichment.threat.name=ThreatBolt
+bolt.enrichment.threat.enabled=false
+bolt.enrichment.threat.num.tasks=1
+bolt.enrichment.threat.parallelism.hint=1
+
 #Feature: Rules-Based Alerts
 ##Feature Description: Tags messages with rules-based alerts
 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/pcap/topology.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/pcap/topology.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/pcap/topology.conf
index bd5bc59..30c3ef3 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/pcap/topology.conf
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/pcap/topology.conf
@@ -44,8 +44,8 @@ spout.kafka.topic=pcap_raw
 
 #Host Enrichment
 
-bolt.enrichment.host.MAX_CACHE_SIZE=10000
-bolt.enrichment.host.MAX_TIME_RETAIN=10
+bolt.enrichment.host.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.host.MAX_TIME_RETAIN_MINUTES=10
 bolt.enrichment.host.enrichment_tag=host
 
 
@@ -53,40 +53,53 @@ bolt.enrichment.host.enrichment_tag=host
 
 bolt.enrichment.geo.enrichment_tag=geo
 bolt.enrichment.geo.adapter.table=GEO
-bolt.enrichment.geo.MAX_CACHE_SIZE=10000
-bolt.enrichment.geo.MAX_TIME_RETAIN=10
+bolt.enrichment.geo.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.geo.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.geo.source=ip_src_addr,ip_dst_addr
 
 #WhoisEnrichment
 
 bolt.enrichment.whois.hbase.table.name=whois
 bolt.enrichment.whois.enrichment_tag=whois
-bolt.enrichment.whois.source=tld
-bolt.enrichment.whois.MAX_CACHE_SIZE=10000
-bolt.enrichment.whois.MAX_TIME_RETAIN=10
+bolt.enrichment.whois.fields=host
+bolt.enrichment.whois.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.whois.MAX_TIME_RETAIN_MINUTES=10
 
 #CIF Enrichment
 bolt.enrichment.cif.tablename=cif_table
-bolt.enrichment.cif.host=tld
-bolt.enrichment.cif.email=email
-bolt.enrichment.cif.MAX_CACHE_SIZE=10000
-bolt.enrichment.cif.MAX_TIME_RETAIN=10
+bolt.enrichment.cif.fields.host=host
+bolt.enrichment.cif.fields.email=email
+bolt.enrichment.cif.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.cif.MAX_TIME_RETAIN_MINUTES=10
 bolt.enrichment.cif.enrichment_tag=cif
+bolt.enrichment.cif.ip=ip_src_addr,_ip_dst_addr
 
+#Threat Enrichment
+bolt.enrichment.threat.tablename=threat_table
+bolt.enrichment.threat.fields=host,ip_src_addr,ip_dst_addr
+bolt.enrichment.threat.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.threat.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.threat.enrichment_tag=threat
 
 #Indexing Bolt
-bolt.indexing.indexname=pcap_index_test
+bolt.indexing.indexname=pcap_index
+bolt.indexing.timestamp=yyyy.MM.dd.hh
 bolt.indexing.documentname=pcap_doc
 bolt.indexing.bulk=1
+bolt.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
 
 #Alerts Indexing Bolt
 bolt.alerts.indexing.indexname=pcap_alert_test
 bolt.alerts.indexing.documentname=pcap_alert
 bolt.alerts.indexing.bulk=1
+bolt.alerts.indexing.adapter=com.opensoc.indexing.adapters.ESBaseBulkAdapter
 
 #Error Indexing Bolt
-bolt.error.indexing.indexname=pcap_error_test
+bolt.error.indexing.indexname=error
+bolt.error.indexing.timestamp=yyyy.MM
 bolt.error.indexing.documentname=pcap_error
 bolt.error.indexing.bulk=1
+bolt.error.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
 
 #HDFS Bolt
 bolt.hdfs.batch.size=5000
@@ -110,3 +123,28 @@ bolt.hbase.enable.batching=false
 bolt.hbase.write.buffer.size.in.bytes=2000000
 bolt.hbase.durability=SKIP_WAL
 bolt.hbase.partitioner.region.info.refresh.interval.mins=60
+
+
+#Extra [Optional] Storm Configuration Options
+
+optional.settings.bolt.index.search.transport.tcp.compress=true
+optional.settings.bolt.index.search.discovery.zen.ping.multicast.enabled:true
+optional.settings.bolt.index.search.discovery.zen.ping.unicast.hosts=ctrl01:9300\,ctrl02:9300\,ctrl03:9300
+optional.settings.bolt.index.search.http.port=19200
+optional.settings.bolt.index.search.transport.tcp.port=19300
+optional.settings.bolt.index.search.node.name=node.name_{index}
+optional.settings.bolt.index.search.path.data=/tmp/es_data_client_{index}
+optional.settings.bolt.index.search.path.work=/tmp/es_work_client_{index}
+optional.settings.bolt.index.search.path.logs=/var/log/elasticsearch/client_{index}
+optional.settings.bolt.index.search.http.enabled=true
+optional.settings.bolt.index.search.discovery.zen.minimum_master_nodes=1
+optional.settings.bolt.index.search.discovery.zen.ping.multicast.ttl=60
+optional.settings.bolt.index.search.discovery.zen.ping_timeout=500
+optional.settings.bolt.index.search.discovery.zen.fd.ping_timeout=500
+optional.settings.bolt.index.search.discovery.zen.fd.ping_interval=60
+optional.settings.bolt.index.search.discovery.zen.fd.ping_retries=60
+optional.settings.bolt.index.search.client.transport.ping_timeout=60s
+optional.settings.bolt.index.search.multicast.enabled=false
+optional.settings.bolt.index.search.index.refresh_interval=2m
+optional.settings.bolt.index.search.index.merge.async=true
+optional.settings.bolt.index.search.action.write_consistency=one

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/sourcefire/features_enabled.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/sourcefire/features_enabled.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/sourcefire/features_enabled.conf
index f6c9ab8..5b45dde 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/sourcefire/features_enabled.conf
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/sourcefire/features_enabled.conf
@@ -19,7 +19,7 @@ spout.kafka.parallelism.hint=1
 #Feature: Parser Bolt
 ##Feature Description: Parses telemetry from its native format into a native JSON
 
-parser.bolt.name=ParserBolt
+bolt.parser.name=ParserBolt
 bolt.parser.enabled=true
 bolt.parser.num.tasks=1
 bolt.parser.parallelism.hint=1
@@ -56,6 +56,14 @@ bolt.enrichment.cif.enabled=false
 bolt.enrichment.cif.num.tasks=1
 bolt.enrichment.cif.parallelism.hint=1
 
+#Feature: Threat Enrichment
+##Feature Description: Appends information from Threat intelligence feeds to a telemetry message
+
+bolt.enrichment.threat.name=ThreatBolt
+bolt.enrichment.threat.enabled=false
+bolt.enrichment.threat.num.tasks=1
+bolt.enrichment.threat.parallelism.hint=1
+
 #Feature: Rules-Based Alerts
 ##Feature Description: Tags messages with rules-based alerts
 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/sourcefire/topology.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/sourcefire/topology.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/sourcefire/topology.conf
index 02d77a1..29d682a 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/sourcefire/topology.conf
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/sourcefire/topology.conf
@@ -25,12 +25,13 @@ spout.test.parallelism.repeat=false
 #Kafka Spout
 spout.kafka.topic=sourcefire_raw
 
-
+#Parser Bolt
+bolt.parser.adapter=com.opensoc.parsing.parsers.BasicSourcefireParser
 
 #Host Enrichment
 
-bolt.enrichment.host.MAX_CACHE_SIZE=10000
-bolt.enrichment.host.MAX_TIME_RETAIN=10
+bolt.enrichment.host.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.host.MAX_TIME_RETAIN_MINUTES=10
 bolt.enrichment.host.enrichment_tag=host
 
 
@@ -38,40 +39,63 @@ bolt.enrichment.host.enrichment_tag=host
 
 bolt.enrichment.geo.enrichment_tag=geo
 bolt.enrichment.geo.adapter.table=GEO
-bolt.enrichment.geo.MAX_CACHE_SIZE=10000
-bolt.enrichment.geo.MAX_TIME_RETAIN=10
+bolt.enrichment.geo.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.geo.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.geo.fields=ip_src_addr,ip_dst_addr
 
 #WhoisEnrichment
 
 bolt.enrichment.whois.hbase.table.name=whois
 bolt.enrichment.whois.enrichment_tag=whois
-bolt.enrichment.whois.source=tld
-bolt.enrichment.whois.MAX_CACHE_SIZE=10000
-bolt.enrichment.whois.MAX_TIME_RETAIN=10
+bolt.enrichment.whois.fields=host
+bolt.enrichment.whois.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.whois.MAX_TIME_RETAIN_MINUTES=10
 
 #CIF Enrichment
 bolt.enrichment.cif.tablename=cif_table
-bolt.enrichment.cif.host=tld
-bolt.enrichment.cif.email=email
-bolt.enrichment.cif.MAX_CACHE_SIZE=10000
-bolt.enrichment.cif.MAX_TIME_RETAIN=10
+bolt.enrichment.cif.fields.host=host
+bolt.enrichment.cif.fields.email=email
+bolt.enrichment.cif.fields.ip=ip_src_addr,ip_dst_addr
+bolt.enrichment.cif.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.cif.MAX_TIME_RETAIN_MINUTES=10
 bolt.enrichment.cif.enrichment_tag=cif
 
+#Threat Enrichment
+bolt.enrichment.threat.tablename=threat_table
+bolt.enrichment.threat.fields=host,ip_src_addr,ip_dst_addr
+bolt.enrichment.threat.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.threat.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.threat.enrichment_tag=threat
 
 #Indexing Bolt
 bolt.indexing.indexname=sourcefire_index
+bolt.indexing.timestamp=yyyy.MM.ww
 bolt.indexing.documentname=sourcefire_doc
 bolt.indexing.bulk=1
+bolt.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
 
 #Alerts Indexing Bolt
 bolt.alerts.indexing.indexname=alert
+bolt.alerts.indexing.timestamp=yyyy.MM.ww
 bolt.alerts.indexing.documentname=sourcefire_alert
 bolt.alerts.indexing.bulk=1
+bolt.alerts.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
 
 #Error Indexing Bolt
 bolt.error.indexing.indexname=error
+bolt.error.indexing.timestamp=yyyy.MM
 bolt.error.indexing.documentname=sourcefire_error
 bolt.error.indexing.bulk=1
+bolt.error.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
+
+#Alerts Bolt
+bolt.alerts.adapter=com.opensoc.alerts.adapters.AllAlertAdapter
+com.opensoc.alerts.adapters.AllAlertAdapter.whitelist_table_name = ip_whitelist
+com.opensoc.alerts.adapters.AllAlertAdapter.blacklist_table_name = ip_blacklist
+com.opensoc.alerts.adapters.AllAlertAdapter.quorum=zkpr1,zkpr2,zkpr3
+com.opensoc.alerts.adapters.AllAlertAdapter.port=2181
+com.opensoc.alerts.adapters.AllAlertAdapter._MAX_CACHE_SIZE_OBJECTS_NUM=3600
+com.opensoc.alerts.adapters.AllAlertAdapter._MAX_TIME_RETAIN_MINUTES=1000
 
 #HDFS Bolt
 bolt.hdfs.batch.size=5000

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/SampleInput/AsaOutput
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/SampleInput/AsaOutput b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/SampleInput/AsaOutput
new file mode 100644
index 0000000..6009d48
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/SampleInput/AsaOutput
@@ -0,0 +1,100 @@
+<167>Jan  5 08:52:35 10.22.8.216 %ASA-7-609001: Built local-host inside:10.22.8.205
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302021: Teardown ICMP connection for faddr 10.22.8.74/0(LOCAL\user.name) gaddr 10.22.8.205/0 laddr 10.22.8.205/0
+<167>Jan  5 08:52:35 10.22.8.216 %ASA-7-609002: Teardown local-host inside:10.22.8.205 duration 0:00:00
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488167725 for Outside_VPN:147.111.72.16/26436 to DMZ-Inside:10.22.8.53/443 duration 0:00:00 bytes 9687 TCP FINs
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302014: Teardown TCP connection 212805593 for outside:10.22.8.223/59614(LOCAL\user.name) to inside:10.22.8.78/8102 duration 0:00:07 bytes 3433 TCP FINs (user.name)
+<174>Jan  5 14:52:35 10.22.8.212 %ASA-6-302013: Built inbound TCP connection 76245503 for outside:10.22.8.233/54209 (10.22.8.233/54209) to inside:198.111.72.238/443 (198.111.72.238/443) (user.name)
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302013: Built inbound TCP connection 212806031 for outside:10.22.8.17/58633 (10.22.8.17/58633)(LOCAL\user.name) to inside:10.22.8.12/389 (10.22.8.12/389) (user.name)
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168292 for DMZ-Inside:10.22.8.51/51231 to Inside-Trunk:10.22.8.174/40004 duration 0:00:00 bytes 2103 TCP FINs
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-106015: Deny TCP (no connection) from 186.111.72.11/80 to 204.111.72.226/45019 flags SYN ACK  on interface Outside_VPN
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-302014: Teardown TCP connection 17604987 for outside:209.111.72.151/443 to inside:10.22.8.188/64306 duration 0:00:31 bytes 10128 TCP FINs
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-302014: Teardown TCP connection 17604999 for outside:209.111.72.151/443 to inside:10.22.8.188/64307 duration 0:00:30 bytes 6370 TCP FINs
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488167347 for Outside_VPN:198.111.72.24/2134 to DMZ-Inside:10.22.8.53/443 duration 0:00:01 bytes 9785 TCP FINs
+<174>Jan  5 14:52:35 10.22.8.212 %ASA-6-302015: Built inbound UDP connection 76245506 for outside:10.22.8.110/49886 (10.22.8.110/49886) to inside:192.111.72.8/8612 (192.111.72.8/8612) (user.name)
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302014: Teardown TCP connection 212805993 for outside:10.22.8.89/56917(LOCAL\user.name) to inside:216.111.72.126/443 duration 0:00:00 bytes 0 TCP FINs (user.name)
+<167>Jan  5 08:52:35 10.22.8.216 %ASA-7-710005: UDP request discarded from 10.22.8.223/49192 to outside:224.111.72.252/5355
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488166143 for Outside_VPN:198.111.72.64/80 to Inside-Trunk:10.22.8.39/54883 duration 0:00:04 bytes 1148 TCP FINs
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-106015: Deny TCP (no connection) from 10.22.8.84/445 to 10.22.8.219/60726 flags ACK  on interface inside
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168344 for DMZ-Inside:10.22.8.53/61682 to Inside-Trunk:10.22.8.174/40004 duration 0:00:00 bytes 5648 TCP FINs
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168345 for DMZ-Inside:10.22.8.16/31454 to Inside-Trunk:10.22.8.21/443 duration 0:00:00 bytes 756 TCP FINs
+<182>Jan  5 20:22:35 10.22.8.4 %ASA-6-302020: Built inbound ICMP connection for faddr 10.22.8.12/0 gaddr 10.22.8.45/1 laddr 10.22.8.45/1
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-106015: Deny TCP (no connection) from 50.111.72.230/80 to 204.111.72.254/53077 flags RST  on interface Outside_VPN
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-302016: Teardown UDP connection 17603649 for outside:206.111.72.2/161 to inside:10.22.8.48/63297 duration 0:02:01 bytes 209
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-302016: Teardown UDP connection 17603650 for outside:207.111.72.122/161 to inside:10.22.8.48/63298 duration 0:02:01 bytes 209
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-302016: Teardown UDP connection 17603652 for outside:206.111.72.2/161 to inside:10.22.8.48/63300 duration 0:02:01 bytes 115
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-302016: Teardown UDP connection 17603657 for outside:206.111.72.2/161 to inside:10.22.8.48/63306 duration 0:02:01 bytes 115
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168436 for DMZ-Inside:10.22.8.51/51235 to Inside-Trunk:10.22.8.174/40004 duration 0:00:00 bytes 2497 TCP FINs
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488167656 for Outside_VPN:69.111.72.70/21560 to DMZ-Inside:10.22.8.53/443 duration 0:00:01 bytes 11410 TCP FINs
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302015: Built inbound UDP connection 212806050 for outside:10.22.8.62/53965 (10.22.8.62/53965)(LOCAL\user.name) to inside:10.22.8.85/53 (10.22.8.85/53) (user.name)
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302013: Built inbound TCP connection 212806052 for outside:10.22.8.62/56500 (10.22.8.62/56500)(LOCAL\user.name) to inside:198.111.72.83/443 (198.111.72.83/443) (user.name)
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302013: Built inbound TCP connection 212806054 for outside:10.22.8.62/56502 (10.22.8.62/56502)(LOCAL\user.name) to inside:50.111.72.252/443 (50.111.72.252/443) (user.name)
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-305011: Built dynamic TCP translation from inside:10.22.8.188/64340 to outside:206.111.72.41/2013
+<166>Jan  5 15:52:35 10.22.8.33 %ASA-6-305012: Teardown dynamic UDP translation from inside:192.111.72.2/62251 to outside:79.111.72.174/21311 duration 0:02:30
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302015: Built inbound UDP connection 212806058 for outside:10.22.8.221/56631 (10.22.8.221/56631)(LOCAL\user.name) to inside:10.22.8.26/389 (10.22.8.26/389) (user.name)
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168189 for Outside_VPN:209.111.72.10/56619 to DMZ-Inside:10.22.8.53/443 duration 0:00:00 bytes 2477 TCP FINs
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-106015: Deny TCP (no connection) from 10.22.8.112/52235 to 198.111.72.227/80 flags ACK  on interface Inside-Trunk
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488167192 for Outside_VPN:115.111.72.7/49196 to DMZ-Inside:10.22.8.57/443 duration 0:00:02 bytes 20588 TCP Reset-O
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302016: Teardown UDP connection 212806055 for outside:10.22.8.62/55383(LOCAL\user.name) to inside:10.22.8.85/53 duration 0:00:00 bytes 349 (user.name)
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168380 for Outside_VPN:74.111.72.12/443 to Inside-Trunk:10.22.8.39/54894 duration 0:00:00 bytes 5701 TCP FINs
+<174>Jan  5 14:52:35 10.22.8.212 %ASA-6-302013: Built inbound TCP connection 76245522 for outside:10.22.8.147/56343 (10.22.8.147/56343) to inside:209.111.72.151/443 (209.111.72.151/443) (user.name)
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168443 for Outside_VPN:23.111.72.27/80 to Inside-Trunk:10.22.8.81/64713 duration 0:00:00 bytes 2426 TCP FINs
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488111566 for Outside_VPN:131.111.72.49/443 to Inside-Trunk:10.22.8.127/56558 duration 0:01:57 bytes 3614 TCP Reset-O
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302013: Built inbound TCP connection 212806061 for outside:10.22.8.17/58635 (10.22.8.17/58635)(LOCAL\user.name) to inside:10.22.8.12/389 (10.22.8.12/389) (user.name)
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302014: Teardown TCP connection 212806010 for outside:10.22.8.33/60223(LOCAL\user.name) to inside:10.22.8.86/389 duration 0:00:00 bytes 416 TCP Reset-I (user.name)
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302015: Built inbound UDP connection 212806062 for outside:10.22.8.221/56632 (10.22.8.221/56632)(LOCAL\user.name) to inside:10.22.8.73/389 (10.22.8.73/389) (user.name)
+<167>Jan  5 08:52:35 10.22.8.216 %ASA-7-609002: Teardown local-host inside:10.22.8.205 duration 0:00:00
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168231 for Outside_VPN:204.111.72.243/3011 to Inside-Trunk:10.22.8.208/60037 duration 0:00:00 bytes 19415 TCP FINs
+<166>Jan  5 16:52:35 10.22.8.41 %ASA-6-302013: Built inbound TCP connection 45476108 for Outside:10.22.8.97/53484 (10.22.8.97/53484)(LOCAL\user.name) to Inside:141.111.72.70/7576 (141.111.72.70/7576) (user.name)
+<174>Jan  5 14:52:35 10.22.8.212 %ASA-6-302013: Built inbound TCP connection 76245527 for outside:10.22.8.97/65195 (10.22.8.97/65195) to inside:17.111.72.212/5223 (17.111.72.212/5223) (user.name)
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302014: Teardown TCP connection 212806018 for outside:10.22.8.17/58632(LOCAL\user.name) to inside:10.22.8.12/389 duration 0:00:00 bytes 0 TCP FINs (user.name)
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168562 for DMZ-Inside:10.22.8.51/51236 to Inside-Trunk:10.22.8.174/40004 duration 0:00:00 bytes 2273 TCP FINs
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302015: Built inbound UDP connection 212806065 for outside:10.22.8.62/59829 (10.22.8.62/59829)(LOCAL\user.name) to inside:10.22.8.85/53 (10.22.8.85/53) (user.name)
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302013: Built inbound TCP connection 212806067 for outside:10.22.8.143/62675 (10.22.8.143/62675)(LOCAL\user.name) to inside:141.111.72.12/389 (141.111.72.12/389) (user.name)
+<167>Jan  5 08:52:35 10.22.8.216 %ASA-7-710005: UDP request discarded from 10.22.8.223/61122 to outside:224.111.72.252/5355
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302020: Built inbound ICMP connection for faddr 10.22.8.143/0(LOCAL\user.name) gaddr 141.111.72.12/0 laddr 141.111.72.12/0 (user.name)
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168547 for Outside_VPN:107.111.72.102/80 to Inside-Trunk:10.22.8.54/61676 duration 0:00:00 bytes 1030 TCP FINs
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302015: Built inbound UDP connection 212806078 for outside:10.22.8.221/56633 (10.22.8.221/56633)(LOCAL\user.name) to inside:10.22.8.20/389 (10.22.8.20/389) (user.name)
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-305011: Built dynamic TCP translation from inside:10.22.8.83/59915 to outside:206.111.72.41/22776
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168044 for Outside_VPN:50.111.72.39/80 to Inside-Trunk:10.22.8.75/60877 duration 0:00:01 bytes 13304 TCP FINs
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488118326 for Outside_VPN:23.111.72.27/80 to Inside-Trunk:10.22.8.229/57901 duration 0:01:45 bytes 1942 TCP FINs
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488160565 for Outside_VPN:72.111.72.29/80 to Inside-Trunk:10.22.8.42/57520 duration 0:00:15 bytes 1025 TCP FINs
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488096423 for Outside_VPN:72.111.72.43/80 to Inside-Trunk:10.22.8.127/59096 duration 0:02:27 bytes 99347 TCP Reset-O
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488095522 for Outside_VPN:72.111.72.43/80 to Inside-Trunk:10.22.8.127/59087 duration 0:02:29 bytes 154785 TCP Reset-O
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488106557 for Outside_VPN:72.111.72.43/80 to Inside-Trunk:10.22.8.127/59134 duration 0:02:09 bytes 25319 TCP Reset-O
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488096426 for Outside_VPN:72.111.72.43/80 to Inside-Trunk:10.22.8.127/59099 duration 0:02:27 bytes 26171 TCP Reset-O
+<166>Jan  5 08:52:36 10.22.8.216 %ASA-6-302014: Teardown TCP connection 212806005 for outside:10.22.8.17/58630(LOCAL\user.name) to inside:10.22.8.12/389 duration 0:00:00 bytes 3942 TCP FINs (user.name)
+<166>Jan  5 08:52:36 10.22.8.216 %ASA-6-302015: Built inbound UDP connection 212806085 for outside:10.22.8.143/54018 (10.22.8.143/54018)(LOCAL\user.name) to inside:10.22.8.85/53 (10.22.8.85/53) (user.name)
+<174>Jan  5 14:52:36 10.22.8.212 %ASA-6-302020: Built inbound ICMP connection for faddr 10.22.8.96/2708 gaddr 10.22.8.30/0 laddr 10.22.8.30/0 (user.name)
+<174>Jan  5 14:52:36 10.22.8.212 %ASA-6-302015: Built inbound UDP connection 76245537 for outside:10.22.8.110/49886 (10.22.8.110/49886) to inside:192.111.72.11/8612 (192.111.72.11/8612) (user.name)
+<166>Jan  5 16:52:36 10.22.8.41 %ASA-6-106015: Deny TCP (no connection) from 10.22.8.85/58359 to 10.22.8.11/88 flags RST ACK  on interface Outside
+<166>Jan  5 08:52:36 10.22.8.216 %ASA-6-302021: Teardown ICMP connection for faddr 10.22.8.82/0(LOCAL\user.name) gaddr 10.22.8.205/0 laddr 10.22.8.205/0
+<166>Jan  5 08:52:36 10.22.8.216 %ASA-6-302016: Teardown UDP connection 212799832 for outside:10.22.8.230/55549(LOCAL\user.name) to inside:10.22.8.11/389 duration 0:02:01 bytes 354 (user.name)
+<166>Jan  5 08:52:36 10.22.8.216 %ASA-6-302016: Teardown UDP connection 212799867 for outside:10.22.8.240/138(LOCAL\user.name) to inside:10.22.8.255/138 duration 0:02:01 bytes 214 (user.name)
+<167>Jan  5 08:52:36 10.22.8.216 %ASA-7-609001: Built local-host inside:67.111.72.204
+<174>Jan  5 14:52:36 10.22.8.212 %ASA-6-302013: Built inbound TCP connection 76245544 for outside:10.22.8.227/54540 (10.22.8.227/54540) to inside:63.111.72.124/80 (63.111.72.124/80) (user.name)
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168135 for Outside_VPN:198.111.72.66/36797 to DMZ-Inside:10.22.8.53/80 duration 0:00:01 bytes 89039 TCP FINs
+<166>Jan  5 08:52:36 10.22.8.216 %ASA-6-302014: Teardown TCP connection 212805836 for outside:10.22.8.62/56471(LOCAL\user.name) to inside:208.111.72.1/443 duration 0:00:04 bytes 1700 TCP FINs (user.name)
+<174>Jan  5 14:52:36 10.22.8.212 %ASA-6-302013: Built inbound TCP connection 76245546 for outside:10.22.8.227/54542 (10.22.8.227/54542) to inside:63.111.72.124/80 (63.111.72.124/80) (user.name)
+<166>Jan  5 08:52:36 10.22.8.216 %ASA-6-302021: Teardown ICMP connection for faddr 10.22.8.74/0(LOCAL\user.name) gaddr 10.22.8.205/0 laddr 10.22.8.205/0
+<174>Jan  5 14:52:36 10.22.8.212 %ASA-6-302020: Built outbound ICMP connection for faddr 10.22.8.96/2708 gaddr 10.22.8.30/0 laddr 10.22.8.30/0
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168388 for DMZ-Inside:10.22.8.10/49771 to Inside-Trunk:10.22.8.128/443 duration 0:00:00 bytes 19132 TCP Reset-O
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168692 for DMZ-Inside:10.22.8.53/61694 to Inside-Trunk:10.22.8.174/40004 duration 0:00:00 bytes 5660 TCP FINs
+<174>Jan  5 14:52:36 10.22.8.212 %ASA-6-302013: Built inbound TCP connection 76245552 for outside:10.22.8.92/51042 (10.22.8.92/51042) to inside:10.22.8.193/9100 (10.22.8.193/9100) (user.name)
+<166>Jan  5 16:52:36 10.22.8.41 %ASA-6-302016: Teardown UDP connection 45474680 for Outside:10.22.8.49/137(LOCAL\user.name) to Inside:10.22.8.12/137 duration 0:02:03 bytes 486 (user.name)
+<166>Jan  5 16:52:36 10.22.8.41 %ASA-6-302016: Teardown UDP connection 45474694 for Outside:10.22.8.49/138(LOCAL\user.name) to Inside:10.22.8.12/138 duration 0:02:01 bytes 184 (user.name)
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488167720 for Outside_VPN:198.111.72.75/1033 to DMZ-Inside:10.22.8.53/443 duration 0:00:01 bytes 9634 TCP FINs
+<142>Jan  5 08:52:32 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488165627 for Outside_VPN:170.111.72.22/27463 to DMZ-Inside:10.22.8.53/443 duration 0:00:01 bytes 9756 TCP FINs
+<166>Jan  5 08:52:32 10.22.8.216 %ASA-6-302016: Teardown UDP connection 212805854 for outside:10.22.8.62/54704(LOCAL\user.name) to inside:10.22.8.85/53 duration 0:00:00 bytes 114 (user.name)
+<166>Jan  5 09:52:32 10.22.8.12 %ASA-6-302020: Built inbound ICMP connection for faddr 207.111.72.122/0 gaddr 206.111.72.24/512 laddr 10.22.8.57/512
+<166>Jan  5 09:52:32 10.22.8.12 %ASA-6-302013: Built outbound TCP connection 17605397 for outside:69.111.72.0/80 (69.111.72.0/80) to inside:10.22.8.102/55659 (206.111.72.41/40627)
+<174>Jan  5 14:52:32 10.22.8.212 %ASA-6-302015: Built inbound UDP connection 76245230 for outside:10.22.8.96/123 (10.22.8.96/123) to inside:10.22.8.12/123 (10.22.8.12/123) (user.name)
+<142>Jan  5 08:52:32 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488031413 for Outside_VPN:184.111.72.216/50341 to DMZ-Inside:10.22.8.57/443 duration 0:05:01 bytes 13543 TCP Reset-O
+<166>Jan  5 16:52:32 10.22.8.41 %ASA-6-302020: Built inbound ICMP connection for faddr 10.22.8.95/1(LOCAL\user.name) gaddr 10.22.8.12/0 laddr 10.22.8.12/0 (user.name)
+<142>Jan  5 08:52:32 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488030393 for DMZ-Inside:[10.22.8.10/57109 to Inside-Trunk:10.22.8.128/443 duration 0:05:04 bytes 13541 TCP Reset-O
+<166>Jan  5 09:52:32 10.22.8.12 %ASA-6-305012: Teardown dynamic TCP translation from inside:10.22.8.149/62156 to outside:206.111.72.41/19576 duration 0:00:44
+<166>Jan  5 09:52:32 10.22.8.12 %ASA-6-305012: Teardown dynamic TCP translation from inside:10.22.8.149/62159 to outside:206.111.72.41/39634 duration 0:00:44
+<142>Jan  5 08:52:32 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488031793 for Outside_VPN:198.111.72.146/28026 to DMZ-Inside:10.22.8.53/443 duration 0:05:00 bytes 119 TCP FINs
+<142>Jan  5 08:52:32 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488030810 for DMZ-Inside:10.22.8.10/56930 to Inside-Trunk:10.22.8.128/443 duration 0:05:03 bytes 13543 TCP Reset-O
+<142>Jan  5 08:52:32 10.22.8.201 %ASA-6-106015: Deny TCP (no connection) from 186.111.72.11/80 to 204.111.72.199/61438 flags SYN ACK  on interface Outside_VPN
+<166>Jan  5 08:52:32 10.22.8.216 %ASA-6-302013: Built inbound TCP connection 212805863 for outside:10.22.8.144/61999 (10.22.8.144/61999)(LOCAL\user.name) to inside:10.22.8.163/80 (10.22.8.163/80) (user.name)
+<167>Jan  5 08:52:32 10.22.8.216 %ASA-7-609002: Teardown local-host inside:10.22.8.205 duration 0:00:00
\ No newline at end of file


[03/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/SampleInput/PaloaltoOutput
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/SampleInput/PaloaltoOutput b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/SampleInput/PaloaltoOutput
new file mode 100644
index 0000000..16793a2
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/SampleInput/PaloaltoOutput
@@ -0,0 +1,100 @@
+<11>Jan  5 05:38:59 PAN1.exampleCustomer.com 1,2015/01/05 05:38:58,0006C110285,THREAT,vulnerability,1,2015/01/05 05:38:58,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 05:38:58,12031,1,54180,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=67AF705D60B1119C0F18BEA336F9",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347368099,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109656,, 
+<11>Jan  5 05:38:59 PAN1.exampleCustomer.com 1,2015/01/05 05:38:59,0006C110285,THREAT,vulnerability,1,2015/01/05 05:38:59,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 05:38:59,9399,1,54185,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=8;tile=1;ord=F7315B6954238BE7FAE19D6EE0ECD",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347368106,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109661,, 
+<11>Jan  5 05:39:00 PAN1.exampleCustomer.com 1,2015/01/05 05:38:59,0006C110285,THREAT,vulnerability,1,2015/01/05 05:38:59,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 05:38:59,50636,1,54181,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=ECA531364D3B6522F9B89EE09381",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347368111,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109663,, 
+<11>Jan  5 05:39:00 PAN1.exampleCustomer.com 1,2015/01/05 05:38:59,0006C110285,THREAT,vulnerability,1,2015/01/05 05:38:59,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 05:38:59,19582,1,54177,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=160x600&id=14;tile=1;ord=9DB9E71EB91389C954E499B68203",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347368112,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109664,, 
+<11>Jan  5 05:39:00 PAN1.exampleCustomer.com 1,2015/01/05 05:38:59,0006C110285,THREAT,vulnerability,1,2015/01/05 05:38:59,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 05:38:59,38426,1,54202,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=336x288&id=4;tile=1;ord=B1B8DA9446290140922C4F6E092D8",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347368119,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109668,, 
+<11>Jan  5 07:11:37 PAN1.exampleCustomer.com 1,2015/01/05 07:11:36,0006C110285,THREAT,vulnerability,1,2015/01/05 07:11:36,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 07:11:36,28124,1,56475,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=6;tile=1;ord=E526836F078EB22491799C6373ED3",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347431967,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109692,, 
+<11>Jan  5 07:11:37 PAN1.exampleCustomer.com 1,2015/01/05 07:11:37,0006C110285,THREAT,vulnerability,1,2015/01/05 07:11:37,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 07:11:37,36574,1,56485,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=6;tile=1;ord=E526836F078EB22491799C6373ED3",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347431978,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109694,, 
+<11>Jan  5 07:11:37 PAN1.exampleCustomer.com 1,2015/01/05 07:11:37,0006C110285,THREAT,vulnerability,1,2015/01/05 07:11:37,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 07:11:37,3892,1,56486,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=E052042F211E553D6E1E44921E49",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347431979,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109695,, 
+<11>Jan  5 07:15:23 PAN1.exampleCustomer.com 1,2015/01/05 07:15:23,0006C110285,THREAT,vulnerability,1,2015/01/05 07:15:23,10.0.0.115,216.0.10.230,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 07:15:23,15102,1,56706,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=EB863BEB8809A5598F62C4CEDED7",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347434790,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109701,, 
+<11>Jan  5 07:15:23 PAN1.exampleCustomer.com 1,2015/01/05 07:15:23,0006C110285,THREAT,vulnerability,1,2015/01/05 07:15:23,10.0.0.115,216.0.10.230,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 07:15:23,54920,1,56704,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=4FB22ED5B7A0C344DB28AB34C1B3",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347434799,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109706,, 
+<11>Jan  5 06:57:50 PAN1.exampleCustomer.com 1,2015/01/05 06:57:50,0006C110285,THREAT,vulnerability,1,2015/01/05 06:57:50,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 06:57:50,59603,1,56051,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=6845CCF1045EE15B60F30B807684",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347421830,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109684,, 
+<11>Jan  5 06:57:50 PAN1.exampleCustomer.com 1,2015/01/05 06:57:50,0006C110285,THREAT,vulnerability,1,2015/01/05 06:57:50,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 06:57:50,24223,1,56042,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=256A9BBB8867977D118E2E511742",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347421831,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109685,, 
+<11>Jan  5 06:57:50 PAN1.exampleCustomer.com 1,2015/01/05 06:57:50,0006C110285,THREAT,vulnerability,1,2015/01/05 06:57:50,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 06:57:50,61627,1,56043,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=6845CCF1045EE15B60F30B807684",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347421828,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109682,, 
+<11>Jan  5 07:11:36 PAN1.exampleCustomer.com 1,2015/01/05 07:11:36,0006C110285,THREAT,vulnerability,1,2015/01/05 07:11:36,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 07:11:36,37087,1,56307,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=E052042F211E553D6E1E44921E49",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347431965,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109691,, 
+<11>Jan  5 05:48:38 PAN1.exampleCustomer.com 1,2015/01/05 05:48:38,0006C110285,THREAT,vulnerability,1,2015/01/05 05:48:38,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 05:48:38,48136,1,54557,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=EDD821C39BC0A49777874E02F7FA",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347373997,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109676,, 
+<11>Jan  5 05:39:01 PAN1.exampleCustomer.com 1,2015/01/05 05:39:00,0006C110285,THREAT,vulnerability,1,2015/01/05 05:39:00,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 05:39:00,60649,1,54209,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=728x90&id=1;tile=1;ord=6510BF66C3B427ED44AC521752E695",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347368140,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109674,, 
+<12>Jan  5 06:41:35 PAN1.exampleCustomer.com 1,2015/01/05 06:41:34,0006C113118,THREAT,virus,1,2015/01/05 06:41:34,94.0.0.3,10.0.0.208,94.0.0.3,211.0.10.226,EX-Allow,,example\user.name,web-browsing,vsys1,untrust,trust,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 06:41:34,16864,2,80,60194,80,56595,0x404000,tcp,deny,"FreemakeVideoConverterSetup.exe",Virus/Win32.WGeneric.dyxeh(2367869),any,medium,server-to-client,40462931,0x0,GB,10.0.0.0-10.255.255.255,0,,0,, 
+<10>Jan  5 05:58:47 PAN1 1,2015/01/05 05:58:46,009401011564,THREAT,vulnerability,1,2015/01/05 05:58:46,10.0.0.38,10.3.0.31,0.0.0.0,0.0.0.0,INT_out,,,ms-ds-smb,vsys1,v_internal,v_external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 05:58:46,44183,1,60510,445,0,0,0x80004000,tcp,reset-both,"",Microsoft Windows SMBv2 Remote Code Execution Vulnerability(32541),any,critical,client-to-server,724178,0x0,Unknown,Unknown,0,,1200515273392656547,, 
+<11>Jan  5 07:41:48 PAN1.exampleCustomer.com 1,2015/01/05 07:41:47,0006C110285,THREAT,vulnerability,1,2015/01/05 07:41:47,10.0.0.115,216.0.10.230,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 07:41:47,20240,1,65530,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=9944D12C8FB4EB798036CAD371C6",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347454781,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109719,, 
+<11>Jan  5 07:41:48 PAN1.exampleCustomer.com 1,2015/01/05 07:41:47,0006C110285,THREAT,vulnerability,1,2015/01/05 07:41:47,10.0.0.115,216.0.10.230,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 07:41:47,2518,1,65531,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=8;tile=1;ord=E0827A4B1C6179DF64205E13AECDF",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347454775,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109715,, 
+<12>Jan  5 09:08:53 PAN1.exampleCustomer.com 1,2015/01/05 09:08:52,0011C103117,THREAT,virus,1,2015/01/05 09:08:52,61.0.0.202,10.0.0.81,0.0.0.0,0.0.0.0,EX-Allow,,example\user.name,web-browsing,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 09:08:52,72686,1,80,60538,0,0,0x4000,tcp,deny,"haozip_v5.0_up6.exe",Virus/Win32.WGeneric.dpqqf(2516743),any,medium,server-to-client,3422073984,0x0,CN,10.0.0.0-10.255.255.255,0,,0,, 
+<12>Jan  5 09:10:14 PAN1.exampleCustomer.com 1,2015/01/05 09:10:13,001606003946,THREAT,virus,1,2015/01/05 09:10:13,8.30.222.22,10.0.0.109,8.30.222.22,172.13.0.21,EX-Allow,,example\user.name,web-browsing,vsys1,untrust,trust,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 09:10:13,17060,1,80,64672,80,21754,0x404000,tcp,deny,"youdaogouwu-3.13-dictupdate.exe",Virus/Win32.WGeneric.dyugt(2272380),any,medium,server-to-client,38698043,0x0,US,10.0.0.0-10.255.255.255,0,,0,, 
+<11>Jan  5 09:10:37 PAN1 1,2015/01/05 09:10:36,0003C105690,THREAT,vulnerability,1,2015/01/05 09:10:36,10.0.0.222,95.0.0.154,192.168.100.11,95.0.0.154,Guest_to_Internet,,,web-browsing,vsys1,GuestAccess,untrust,vlan.84,vlan.200,LOG-Default,2015/01/05 09:10:36,97395,1,59784,80,46548,80,0x80400000,tcp,reset-both,"8-134.0-87.0.zip",HTTP Unauthorized Brute-force Attack(40031),any,high,client-to-server,247195018,0x0,10.0.0.0-10.255.255.255,IT,0,,1200340530903386781,, 
+<11>Jan  5 09:02:24 PAN1 1,2015/01/05 09:02:24,0003C105690,THREAT,vulnerability,1,2015/01/05 09:02:24,10.0.0.222,95.0.0.154,192.168.100.11,95.0.0.154,Guest_to_Internet,,,web-browsing,vsys1,GuestAccess,untrust,vlan.84,vlan.200,LOG-Default,2015/01/05 09:02:24,137904,1,59762,80,7021,80,0x80400000,tcp,reset-both,"8-136.0-83.0.zip",HTTP Unauthorized Brute-force Attack(40031),any,high,client-to-server,247188168,0x0,10.0.0.0-10.255.255.255,IT,0,,1200340530903386777,, 
+<11>Jan  5 09:23:52 PAN1 1,2015/01/05 09:23:51,009401011564,THREAT,vulnerability,1,2015/01/05 09:23:51,10.0.0.135,10.1.0.42,0.0.0.0,0.0.0.0,INT_out,,,sccp,vsys1,v_internal,v_external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 09:23:51,15299,1,49643,2000,0,0,0x80004000,tcp,reset-both,"",Digium Asterisk Skinny Channel NULL-Pointer Dereference Vulnerability(35378),any,high,client-to-server,732393,0x0,Unknown,Unknown,0,,1200515273392656561,, 
+<10>Jan  5 10:03:58 PAN1 1,2015/01/05 10:03:58,009401011564,THREAT,vulnerability,1,2015/01/05 10:03:58,10.0.0.38,10.3.0.37,0.0.0.0,0.0.0.0,INT_out,,,ms-ds-smb,vsys1,v_internal,v_external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 10:03:58,57935,1,11648,445,0,0,0x80004000,tcp,reset-both,"",Microsoft Windows SMBv2 Remote Code Execution Vulnerability(32541),any,critical,client-to-server,733522,0x0,Unknown,Unknown,0,,1200515273392656570,, 
+<11>Jan  5 07:19:09 PAN1 1,2015/01/05 07:19:08,009401011564,THREAT,vulnerability,1,2015/01/05 07:19:08,10.0.0.135,10.1.0.42,0.0.0.0,0.0.0.0,INT_out,,,sccp,vsys1,v_internal,v_external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 07:19:08,22557,1,49638,2000,0,0,0x80004000,tcp,reset-both,"",Digium Asterisk Skinny Channel NULL-Pointer Dereference Vulnerability(35378),any,high,client-to-server,727520,0x0,Unknown,Unknown,0,,1200515273392656555,, 
+<10>Jan  5 10:04:00 PAN1 1,2015/01/05 10:04:00,009401011564,THREAT,vulnerability,1,2015/01/05 10:04:00,10.0.0.38,10.2.0.40,0.0.0.0,0.0.0.0,INT_out,,,ms-ds-smb,vsys1,v_internal,v_external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 10:04:00,37972,1,43861,445,0,0,0x80004000,tcp,reset-both,"",Microsoft Windows SMBv2 Remote Code Execution Vulnerability(32541),any,critical,client-to-server,733536,0x0,Unknown,Unknown,0,,1200515273392656584,, 
+<10>Jan  5 10:04:01 PAN1 1,2015/01/05 10:04:01,009401011564,THREAT,vulnerability,1,2015/01/05 10:04:01,10.0.0.38,172.13.0.68,0.0.0.0,0.0.0.0,INT_out,,,ms-ds-smb,vsys1,v_internal,v_external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 10:04:01,49163,1,43869,445,0,0,0x80004000,tcp,reset-both,"",Microsoft Windows SMBv2 Remote Code Execution Vulnerability(32541),any,critical,client-to-server,733543,0x0,Unknown,US,0,,1200515273392656591,, 
+<10>Jan  5 02:16:00 PAN1.exampleCustomer.com 1,2015/01/05 02:16:00,009401009421,THREAT,spyware,1,2015/01/05 02:16:00,10.0.0.67,54.0.0.140,68.1.100.154,54.0.0.140,EX-Allow,,,web-browsing,vsys1,internal,external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 02:16:00,2898,1,50429,80,13954,80,0x400000,tcp,reset-both,"install.ashx",WGeneric.Gen Command and Control Traffic(13600),any,critical,client-to-server,3841944,0x0,10.0.0.0-10.255.255.255,US,0,,0,, 
+<10>Jan  5 02:16:17 PAN1.exampleCustomer.com 1,2015/01/05 02:16:17,009401009421,THREAT,spyware,1,2015/01/05 02:16:17,10.0.0.67,54.0.0.140,68.1.100.154,54.0.0.140,EX-Allow,,,web-browsing,vsys1,internal,external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 02:16:17,21959,1,50459,80,45933,80,0x400000,tcp,reset-both,"install.ashx",WGeneric.Gen Command and Control Traffic(13600),any,critical,client-to-server,3842040,0x0,10.0.0.0-10.255.255.255,US,0,,0,, 
+<10>Jan  5 10:55:21 PAN1.exampleCustomer.com 1,2015/01/05 10:55:21,0011C103117,THREAT,vulnerability,1,2015/01/05 10:55:21,172.13.0.44,10.0.0.48,0.0.0.0,0.0.0.0,EX-Allow,,,ssl,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 10:55:21,116502,1,55910,443,0,0,0x80004000,tcp,reset-both,"bar.exampleCustomer.com/",OpenSSL SSL/TLS MITM vulnerability(36485),any,critical,client-to-server,3422361316,0x0,NO,10.0.0.0-10.255.255.255,0,,1200269920802300348,, 
+<12>Jan  5 11:31:36 PAN1.exampleCustomer.com 1,2015/01/05 11:31:36,0011C103117,THREAT,vulnerability,1,2015/01/05 11:31:36,31.0.0.198,10.0.0.210,0.0.0.0,0.0.0.0,EX-Allow,,,twitter-base,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 11:31:36,181928,1,55325,443,0,0,0x80004000,tcp,alert,"foo.exampleCustomer.com/",OpenSSL TLS Malformed Heartbeat Request Found - Heartbleed(36397),any,medium,client-to-server,3422463820,0x0,CH,10.0.0.0-10.255.255.255,0,,1200269920802300355,, 
+<12>Jan  5 11:31:17 PAN1.exampleCustomer.com 1,2015/01/05 11:31:17,0011C103117,THREAT,vulnerability,1,2015/01/05 11:31:17,31.0.0.198,10.0.0.56,0.0.0.0,0.0.0.0,EX-Allow,,,twitter-base,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 11:31:17,33936654,1,55325,443,0,0,0x80004000,tcp,alert,"*.exampleCustomer.com/",OpenSSL TLS Malformed Heartbeat Request Found - Heartbleed(36397),any,medium,client-to-server,3422463061,0x0,CH,10.0.0.0-10.255.255.255,0,,1344385108878191554,, 
+<12>Jan  5 11:07:20 PAN1.exampleCustomer.com 1,2015/01/05 11:07:20,0011C103117,THREAT,vulnerability,1,2015/01/05 11:07:20,31.0.0.198,10.0.0.70,0.0.0.0,0.0.0.0,EX-EasyAV,,,twitter-base,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 11:07:20,142520,1,55325,443,0,0,0x4000,tcp,alert,"fizzbuzz.exampleCustomer.com/",OpenSSL TLS Malformed Heartbeat Request Found - Heartbleed(36397),any,medium,client-to-server,3422395620,0x0,CH,10.0.0.0-10.255.255.255,0,,0,, 
+<10>Jan  5 10:04:06 PAN1 1,2015/01/05 10:04:05,009401011564,THREAT,vulnerability,1,2015/01/05 10:04:05,10.0.0.38,10.2.0.20,0.0.0.0,0.0.0.0,INT_out,,,ms-ds-smb,vsys1,v_internal,v_external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 10:04:05,58977,1,43882,445,0,0,0x80004000,tcp,reset-both,"",Microsoft Windows SMBv2 Remote Code Execution Vulnerability(32541),any,critical,client-to-server,733556,0x0,Unknown,Unknown,0,,1200515273392656603,, 
+<11>Jan  5 11:20:02 PAN1 1,2015/01/05 11:20:02,009401011564,THREAT,vulnerability,1,2015/01/05 11:20:02,10.0.0.131,10.1.0.42,0.0.0.0,0.0.0.0,INT_out,,,sccp,vsys1,v_internal,v_external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 11:20:02,25219,1,49569,2000,0,0,0x80004000,tcp,reset-both,"",Digium Asterisk Skinny Channel NULL-Pointer Dereference Vulnerability(35378),any,high,client-to-server,735575,0x0,Unknown,Unknown,0,,1200515273392656605,, 
+<11>Jan  5 12:31:01 PAN1.exampleCustomer.com 1,2015/01/05 12:31:01,0006C110285,THREAT,vulnerability,1,2015/01/05 12:31:01,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:31:01,12971,1,56879,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=160x600&id=14;tile=1;ord=339DEA400FDFBF9127DA196347F1",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347631498,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109742,, 
+<11>Jan  5 12:31:01 PAN1.exampleCustomer.com 1,2015/01/05 12:31:01,0006C110285,THREAT,vulnerability,1,2015/01/05 12:31:01,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:31:01,52846,1,56881,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=160x600&id=14;tile=1;ord=A501E1CAA93F3B256222F902C051",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347631499,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109743,, 
+<11>Jan  5 12:31:01 PAN1.exampleCustomer.com 1,2015/01/05 12:31:01,0006C110285,THREAT,vulnerability,1,2015/01/05 12:31:01,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:31:01,132,1,56880,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=A01019D3E75E253C81B9DBE60AF0",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347631500,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109744,, 
+<11>Jan  5 11:39:28 PAN1.exampleCustomer.com 1,2015/01/05 11:39:28,0006C110285,THREAT,vulnerability,1,2015/01/05 11:39:28,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 11:39:28,55273,1,55241,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=160x600&id=13;tile=1;ord=F20325FB397BD62AFCE60C004651",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347599433,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109725,, 
+<11>Jan  5 12:09:04 PAN1.exampleCustomer.com 1,2015/01/05 12:09:03,0006C110285,THREAT,vulnerability,1,2015/01/05 12:09:03,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:09:03,40131,1,61994,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=9C998477823511B311AA24EC53D6",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347617382,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109736,, 
+<12>Jan  5 13:45:24 PAN1.exampleCustomer.com 1,2015/01/05 13:45:23,0011C103117,THREAT,vulnerability,1,2015/01/05 13:45:23,31.0.0.198,10.0.0.60,0.0.0.0,0.0.0.0,EX-Allow,,,twitter-base,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 13:45:23,179279,1,55325,443,0,0,0x80004000,tcp,alert,"*.exampleCustomer.com/",Unknown(36397),any,medium,client-to-server,3423036992,0x0,CH,10.0.0.0-10.255.255.255,0,,1200269920802300367,, 
+<12>Jan  5 13:45:24 PAN1.exampleCustomer.com 1,2015/01/05 13:45:23,0011C103117,THREAT,vulnerability,1,2015/01/05 13:45:23,10.0.0.10,10.1.0.81,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,twitter-base,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 13:45:23,32298,1,55325,443,0,0,0x80004000,tcp,alert,"*.exampleCustomer.com/",OpenSSL TLS Malformed Heartbeat Request Found - Heartbleed(36397),any,medium,client-to-server,3423036994,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,,1200269920802300369,, 
+<10>Jan  5 04:24:30 PAN1.exampleCustomer.com 1,2015/01/05 04:24:29,009401009421,THREAT,spyware,1,2015/01/05 04:24:29,10.0.0.67,54.0.0.133,68.1.100.154,54.0.0.133,EX-Allow,,,web-browsing,vsys1,internal,external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 04:24:29,18282,1,49800,80,13532,80,0x400000,tcp,reset-both,"install.ashx",WGeneric.Gen Command and Control Traffic(13600),any,critical,client-to-server,3875271,0x0,10.0.0.0-10.255.255.255,US,0,,0,, 
+<12>Jan  5 11:32:12 PAN1.exampleCustomer.com 1,2015/01/05 11:32:12,0011C103117,THREAT,vulnerability,1,2015/01/05 11:32:12,31.0.0.198,10.0.0.102,0.0.0.0,0.0.0.0,EX-Allow,,,twitter-base,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 11:32:12,255259,1,55325,443,0,0,0x80004000,tcp,alert,"foo.exampleCustomer.com/",OpenSSL TLS Malformed Heartbeat Request Found - Heartbleed(36397),any,medium,client-to-server,3422465396,0x0,CH,10.0.0.0-10.255.255.255,0,,1200269920802300360,, 
+<12>Jan  5 11:31:46 PAN1.exampleCustomer.com 1,2015/01/05 11:31:46,0011C103117,THREAT,vulnerability,1,2015/01/05 11:31:46,31.0.0.198,10.0.0.50,0.0.0.0,0.0.0.0,EX-Allow,,,twitter-base,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 11:31:46,33699961,1,55325,443,0,0,0x80004000,tcp,alert,"*.exampleCustomer.com/",OpenSSL TLS Malformed Heartbeat Request Found - Heartbleed(36397),any,medium,client-to-server,3422464320,0x0,CH,10.0.0.0-10.255.255.255,0,,1344385108878191555,, 
+<12>Jan  5 11:36:03 PAN1.exampleCustomer.com 1,2015/01/05 11:36:02,0006C113555,THREAT,vulnerability,1,2015/01/05 11:36:02,10.0.0.62,10.1.0.11,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,msrpc,vsys1,Inside,Inside,ethernet1/4,tunnel.1,LOG-Default,2015/01/05 11:36:02,16469,1,51461,445,0,0,0x80004000,tcp,alert,"",Microsoft DCE RPC Big Endian Evasion Vulnerability(33510),any,medium,client-to-server,46375536,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,,1200283142590569503,, 
+<11>Jan  5 13:26:50 PAN1.exampleCustomer.com 1,2015/01/05 13:26:49,0011C103117,THREAT,vulnerability,1,2015/01/05 13:26:49,10.0.0.167,10.1.0.41,0.0.0.0,0.0.0.0,EX-EasyAV,example\user.name.hernandez,,ssh,vsys1,v_internal,v_external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 13:26:49,121926,1,49754,9101,0,0,0x4000,tcp,reset-both,"",SSH User Authentication Brute-force Attempt(40015),any,high,client-to-server,3422922092,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,,0,, 
+<11>Jan  5 10:18:37 NTOR1FWPAN1 1,2015/01/05 10:18:37,009401008933,THREAT,vulnerability,1,2015/01/05 10:18:37,10.0.0.50,54.0.0.7,38.140.11.98,54.0.0.7,TOR-outbound,,,web-browsing,vsys1,Inside,Outside,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 10:18:37,7226,1,51724,80,58706,80,0x80400000,tcp,reset-both,"_PhotoXML.php",Microsoft Office Sharepoint Server Elevation of Privilege Vulnerability(32001),any,high,client-to-server,1252593,0x0,10.0.0.0-10.255.255.255,US,0,,1200584606076633093,, 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.53,10.1.0.174,0.0.0.0,0.0.0.0,EX-EasyAV2,,,mssql-db,vsys1,v_dmz-internal,v_dmz-external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 12:51:33,34103936,1,54270,40004,0,0,0x401c,tcp,allow,5385,3299,2086,26,2015/01/05 12:51:01,30,any,0,17754932047,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,11,15 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.22,10.1.0.28,0.0.0.0,0.0.0.0,EX-Allow,,example\user.name,vmware,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,33888863,1,62961,902,0,0,0x4019,udp,allow,108,108,0,1,2015/01/05 12:51:01,30,any,0,17754932051,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,1,0 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,172.13.0.2,10.0.0.32,0.0.0.0,0.0.0.0,EX-Allow,,,dns,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,33841444,1,17294,53,0,0,0x4019,udp,allow,94,94,0,1,2015/01/05 12:51:01,30,any,0,17754932054,0x0,US,10.0.0.0-10.255.255.255,0,1,0 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,71.0.0.174,10.0.0.32,0.0.0.0,0.0.0.0,EX-Allow,,,dns,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,33992062,1,57783,53,0,0,0x4019,udp,allow,247,86,161,2,2015/01/05 12:51:01,30,any,0,17754932055,0x0,US,10.0.0.0-10.255.255.255,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,58.0.0.196,10.0.0.17,0.0.0.0,0.0.0.0,EX-Allow,,,ssl,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,34310602,1,25678,443,0,0,0x4053,tcp,allow,21935,11456,10479,44,2015/01/05 12:48:44,167,EX-Allowed,0,17754932059,0x0,IN,10.0.0.0-10.255.255.255,0,20,24 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.39,10.1.0.163,0.0.0.0,0.0.0.0,EX-Allow,,example\user.name,ms-ds-smb,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,33760927,1,52688,445,0,0,0x401a,tcp,allow,2229,1287,942,10,2015/01/05 12:51:01,30,any,0,17754932062,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,6,4 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.39,10.1.0.163,0.0.0.0,0.0.0.0,EX-Allow,,example\user.name,ms-ds-smb,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,33595018,1,52689,445,0,0,0x401a,tcp,allow,2229,1287,942,10,2015/01/05 12:51:01,30,any,0,17754932064,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,6,4 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.7,10.1.0.81,0.0.0.0,0.0.0.0,EX-Allow,,,netbios-ns,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,34098107,1,137,137,0,0,0x4019,udp,allow,532,220,312,6,2015/01/05 12:51:01,30,any,0,17754932070,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,3,3 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.39,10.1.0.163,0.0.0.0,0.0.0.0,EX-Allow,,example\user.name,ms-ds-smb,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,34326343,1,52690,445,0,0,0x401a,tcp,allow,2229,1287,942,10,2015/01/05 12:51:01,30,any,0,17754932071,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,6,4 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,172.13.0.15,10.0.0.53,0.0.0.0,0.0.0.0,EX-EasyAV,,,eset-remote-admin,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,33859365,1,23390,443,0,0,0x405e,tcp,allow,725,405,320,11,2015/01/05 12:51:01,30,any,0,17754932073,0x0,US,10.0.0.0-10.255.255.255,0,6,5 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.53,10.1.0.174,0.0.0.0,0.0.0.0,EX-EasyAV2,,,mssql-db,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,33621086,1,54266,40004,0,0,0x401c,tcp,allow,5325,3299,2026,25,2015/01/05 12:51:01,30,any,0,17754932075,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,11,14 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,96.0.0.102,10.0.0.57,0.0.0.0,0.0.0.0,EX-Allow,,,ssl,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,33924142,1,51230,443,0,0,0x4053,tcp,allow,18350,9280,9070,41,2015/01/05 12:51:01,30,EX-Allowed,0,17754932080,0x0,US,10.0.0.0-10.255.255.255,0,19,22 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,72.0.0.131,10.0.0.174,0.0.0.0,0.0.0.0,EX-Allow,,,ssl,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,34186774,1,28203,443,0,0,0x4053,tcp,allow,4121,2209,1912,20,2015/01/05 12:51:01,30,EX-Allowed,0,17754932086,0x0,US,10.0.0.0-10.255.255.255,0,10,10 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,216.0.10.244,10.0.0.53,0.0.0.0,0.0.0.0,EX-EasyAV,,,ssl,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,33988765,1,45150,443,0,0,0x401c,tcp,allow,626,358,268,9,2015/01/05 12:50:41,50,any,0,17754932095,0x0,US,10.0.0.0-10.255.255.255,0,5,4 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,172.12.216.82,10.0.0.53,0.0.0.0,0.0.0.0,EX-EasyAV,,,eset-update,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,33577240,1,3882,80,0,0,0x401c,tcp,allow,94947,2570,92377,106,2015/01/05 12:50:47,44,EX-Allowed,0,17754932107,0x0,US,10.0.0.0-10.255.255.255,0,38,68 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.33,10.1.0.85,0.0.0.0,0.0.0.0,EX-Allow,,,zabbix,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,34078885,1,46056,10050,0,0,0x405e,tcp,allow,728,367,361,11,2015/01/05 12:51:01,30,any,0,17754932117,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,6,5 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.11,10.1.0.33,0.0.0.0,0.0.0.0,EX-Allow,,,incomplete,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,34222137,1,59966,443,0,0,0x401c,tcp,allow,404,198,206,7,2015/01/05 12:51:01,30,any,0,17754932131,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,4,3 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.12,172.13.0.23,0.0.0.0,0.0.0.0,EX-Allow,,,dns,vsys1,v_dmz-internal,v_dmz-external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 12:51:33,33560784,1,52991,53,0,0,0x4019,udp,allow,815,96,719,2,2015/01/05 12:51:01,30,any,0,17754932142,0x0,10.0.0.0-10.255.255.255,US,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.52,10.1.0.174,0.0.0.0,0.0.0.0,EX-EasyAV2,,,mssql-db,vsys1,v_dmz-internal,v_dmz-external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 12:51:33,109384,1,50721,40004,0,0,0x401c,tcp,allow,4211,2125,2086,25,2015/01/05 12:51:02,30,any,0,17754932194,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,10,15 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.53,10.1.0.174,0.0.0.0,0.0.0.0,EX-EasyAV2,,,mssql-db,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,134519,1,54273,40004,0,0,0x401c,tcp,allow,5375,3289,2086,26,2015/01/05 12:51:02,30,any,0,17754932204,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,11,15 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.53,10.1.0.174,0.0.0.0,0.0.0.0,EX-EasyAV2,,,mssql-db,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,15005,1,54268,40004,0,0,0x401c,tcp,allow,7084,3787,3297,26,2015/01/05 12:51:02,30,any,0,17754932228,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,11,15 
+<14>Jan  5 12:51:34 PAN1 1,2015/01/05 12:51:33,0003C105690,TRAFFIC,drop,1,2015/01/05 12:51:33,10.0.0.219,10.3.0.21,0.0.0.0,0.0.0.0,catch all deny,,,not-applicable,vsys1,GuestAccess,trust,vlan.84,,LOG-Default,2015/01/05 12:51:33,0,1,62063,389,0,0,0x0,tcp,deny,70,70,0,1,2015/01/05 12:51:34,0,any,0,956329030,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,1,0 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0006C113555,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.217,172.13.0.168,186.225.121.238,172.13.0.168,Guest WiFi to Internet,,,skype-probe,vsys1,Guest WiFi,Ext_Internet,ethernet1/3.109,ethernet1/2,LOG-Default,2015/01/05 12:51:33,46888,1,11566,40023,55962,40023,0x404050,udp,allow,1446,79,1367,2,2015/01/05 12:51:03,0,any,0,265102737,0x0,10.0.0.0-10.255.255.255,US,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0006C113555,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.20,10.1.0.28,0.0.0.0,0.0.0.0,EX-Allow,,example\user.name,vmware,vsys1,Inside,Inside,ethernet1/4,tunnel.1,LOG-Default,2015/01/05 12:51:33,46821,1,61199,902,0,0,0x4019,udp,allow,108,108,0,1,2015/01/05 12:51:03,0,any,0,265102739,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,1,0 
+<14>Jan  5 12:51:34 PAN1 1,2015/01/05 12:51:33,0003C105690,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.147,4.2.2.2,192.168.100.11,4.2.2.2,Guest_to_Internet,,,dns,vsys1,GuestAccess,untrust,vlan.84,vlan.200,LOG-Default,2015/01/05 12:51:33,188024,1,57269,53,59952,53,0x400019,udp,allow,194,73,121,2,2015/01/05 12:50:49,0,any,0,956329037,0x0,10.0.0.0-10.255.255.255,US,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.11,172.13.0.110,0.0.0.0,0.0.0.0,EX-Allow,,,dns,vsys1,v_internal,v_external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 12:51:33,51569,1,60390,53,0,0,0x4019,udp,allow,815,96,719,2,2015/01/05 12:51:02,30,any,0,17754932369,0x0,10.0.0.0-10.255.255.255,US,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.11,10.1.0.81,0.0.0.0,0.0.0.0,EX-Allow,,,ping,vsys1,v_dmz-internal,v_dmz-external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 12:51:33,185459,1,0,0,0,0,0x4019,icmp,allow,120,60,60,2,2015/01/05 12:51:29,0,any,0,17754932372,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.11,10.1.0.44,0.0.0.0,0.0.0.0,EX-Allow,,,ping,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,84730,1,0,0,0,0,0x4019,icmp,allow,120,60,60,2,2015/01/05 12:51:29,0,any,0,17754932379,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0006C110285,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.73,10.1.0.12,0.0.0.0,0.0.0.0,EX-Allow,,,dns,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,12561,1,57334,53,0,0,0x4019,udp,allow,206,95,111,2,2015/01/05 12:51:03,0,any,0,803406326,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0006C110285,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.35,10.3.0.65,0.0.0.0,0.0.0.0,EX-Allow,,,web-browsing,vsys1,external,internal,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 12:51:33,3286,1,57095,80,0,0,0x401c,tcp,allow,3506,899,2607,12,2015/01/05 12:51:03,0,private-ip-addresses,0,803406334,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,7,5 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0006C110285,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.35,10.3.0.65,0.0.0.0,0.0.0.0,EX-Allow,,,web-browsing,vsys1,external,internal,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 12:51:33,42426,1,57096,80,0,0,0x401c,tcp,allow,3386,1390,1996,12,2015/01/05 12:51:03,0,private-ip-addresses,0,803406335,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,7,5 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0006C110285,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.35,10.3.0.65,0.0.0.0,0.0.0.0,EX-Allow,,,web-browsing,vsys1,external,internal,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 12:51:33,15733,1,57130,80,0,0,0x401c,tcp,allow,1661,926,735,12,2015/01/05 12:51:03,0,private-ip-addresses,0,803406337,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,7,5 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.11,10.1.0.60,0.0.0.0,0.0.0.0,EX-Allow,,,ping,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,239420,1,0,0,0,0,0x4019,icmp,allow,120,60,60,2,2015/01/05 12:51:29,0,any,0,17754932383,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,1,1 
+<14>Jan  5 04:51:34 PAN1.exampleCustomer.com 1,2015/01/05 04:51:33,009401009421,TRAFFIC,end,1,2015/01/05 04:51:33,10.0.0.67,63.0.0.78,68.1.100.154,63.0.0.78,EX-Allow,,,web-browsing,vsys1,internal,external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 04:51:33,13687,1,53152,80,64294,80,0x40001c,tcp,allow,1039,576,463,12,2015/01/05 04:51:03,1,search-engines,0,8195211,0x0,10.0.0.0-10.255.255.255,US,0,6,6 
+<14>Jan  5 04:51:34 PAN1.exampleCustomer.com 1,2015/01/05 04:51:33,009401009421,TRAFFIC,end,1,2015/01/05 04:51:33,10.0.0.67,77.0.0.59,68.1.100.154,77.0.0.59,EX-Allow,,,web-browsing,vsys1,internal,external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 04:51:33,36193,1,53155,80,48756,80,0x40001c,tcp,allow,946,540,406,10,2015/01/05 04:51:04,0,computer-and-internet-security,0,8195212,0x0,10.0.0.0-10.255.255.255,CZ,0,5,5 
+<14>Jan  5 04:51:34 PAN1.exampleCustomer.com 1,2015/01/05 04:51:33,009401009421,TRAFFIC,end,1,2015/01/05 04:51:33,10.0.0.67,63.0.0.78,68.1.100.154,63.0.0.78,EX-Allow,,,web-browsing,vsys1,internal,external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 04:51:33,8727,1,53154,80,6852,80,0x40001c,tcp,allow,1039,576,463,12,2015/01/05 04:51:04,0,search-engines,0,8195213,0x0,10.0.0.0-10.255.255.255,US,0,6,6 
+<14>Jan  5 04:51:34 PAN1.exampleCustomer.com 1,2015/01/05 04:51:33,009401009421,TRAFFIC,end,1,2015/01/05 04:51:33,10.0.0.67,77.0.0.59,68.1.100.154,77.0.0.59,EX-Allow,,,web-browsing,vsys1,internal,external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 04:51:33,16955,1,53153,80,19440,80,0x40001c,tcp,allow,946,540,406,10,2015/01/05 04:51:03,1,computer-and-internet-security,0,8195216,0x0,10.0.0.0-10.255.255.255,CZ,0,5,5 
+<14>Jan  5 04:51:34 PAN1.exampleCustomer.com 1,2015/01/05 04:51:33,009401009421,TRAFFIC,end,1,2015/01/05 04:51:33,10.0.0.101,23.200,10,217,68.0.0.154,23.200,10,217,EX-WebControlRestrict,,,itunes-base,vsys1,internal,external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 04:51:33,14851,1,55137,443,29553,443,0x400019,tcp,allow,654,580,74,7,2015/01/05 04:50:34,0,shopping,0,8195217,0x0,10.0.0.0-10.255.255.255,US,0,6,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:34,0006C113555,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.46,172.13.0.2,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,incomplete,vsys1,Inside,Inside,ethernet1/4,tunnel.1,LOG-Default,2015/01/05 12:51:34,57850,1,65286,139,0,0,0x4019,tcp,allow,62,62,0,1,2015/01/05 12:51:29,0,any,0,265102746,0x0,10.0.0.0-10.255.255.255,192.168.0.0-192.168.255.255,0,1,0 
+<14>Jan  5 12:51:34 PAN1 1,2015/01/05 12:51:34,0003C105690,TRAFFIC,end,1,2015/01/05 12:51:34,216.0.10.194,192.168.100.11,0.0.0.0,0.0.0.0,Internet to Internet,,,insufficient-data,vsys1,untrust,untrust,vlan.200,vlan.200,LOG-Default,2015/01/05 12:51:34,259007,1,80,11347,0,0,0xc,udp,allow,90,90,0,1,2015/01/05 12:50:25,0,any,0,956329050,0x0,US,US,0,1,0 
+<14>Jan  5 12:51:34 PAN1 1,2015/01/05 12:51:34,0003C105690,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.147,4.2.2.2,192.168.100.11,4.2.2.2,Guest_to_Internet,,,dns,vsys1,GuestAccess,untrust,vlan.84,vlan.200,LOG-Default,2015/01/05 12:51:34,13024,1,56694,53,51398,53,0x400019,udp,allow,222,82,140,2,2015/01/05 12:50:49,0,any,0,956329055,0x0,10.0.0.0-10.255.255.255,US,0,1,1 
+<14>Jan  5 12:51:34 PAN1 1,2015/01/05 12:51:34,0003C105690,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.147,4.2.2.2,192.168.100.11,4.2.2.2,Guest_to_Internet,,,dns,vsys1,GuestAccess,untrust,vlan.84,vlan.200,LOG-Default,2015/01/05 12:51:34,62999,1,58277,53,5576,53,0x400019,udp,allow,328,96,232,2,2015/01/05 12:50:49,0,any,0,956329056,0x0,10.0.0.0-10.255.255.255,US,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:34,001606007155,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.156,96.0.0.138,172.13.0.35,96.0.0.138,EX-Allow,example\user.name,,web-browsing,vsys1,trust,untrust,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:34,61348,1,65231,80,48623,80,0x40401a,tcp,allow,50316,4297,46019,67,2015/01/05 12:51:03,1,travel,0,179851307,0x0,10.0.0.0-10.255.255.255,US,0,28,39 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:34,001606007155,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.148,96.0.0.35,172.13.0.35,96.0.0.35,EX-Allow,example\user.name,,symantec-av-update,vsys1,trust,untrust,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:34,61220,1,60900,80,12964,80,0x40401a,tcp,allow,39350,3087,36263,56,2015/01/05 12:50:07,57,computer-and-internet-security,0,179851311,0x0,10.0.0.0-10.255.255.255,US,0,23,33 
+<14>Jan  5 12:51:34 PAN1 1,2015/01/05 12:51:34,009401003136,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.138,213.0.10.101,172.13.0.142,213.0.10.101,Outbound,,,ssl,vsys1,internal,external,ethernet1/4,ethernet1/1,LOG-Default,2015/01/05 12:51:34,62600,1,55014,443,22537,443,0x40001c,tcp,allow,2956,1853,1103,20,2015/01/05 12:51:04,0,travel,0,54644537,0x0,10.0.0.0-10.255.255.255,CH,0,9,11 
+<14>Jan  5 12:51:34 PAN1 1,2015/01/05 12:51:34,009401003136,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.138,213.0.10.101,172.13.0.142,213.0.10.101,Outbound,,,ssl,vsys1,internal,external,ethernet1/4,ethernet1/1,LOG-Default,2015/01/05 12:51:34,45328,1,55025,443,48646,443,0x40001c,tcp,allow,2828,1845,983,18,2015/01/05 12:51:04,0,travel,0,54644544,0x0,10.0.0.0-10.255.255.255,CH,0,9,9 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:34,0004C103634,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.165,93.0.0.200,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,ssl,vsys1,v_internal,v_external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:34,15787,1,53105,443,0,0,0x4053,tcp,allow,10222,1275,8947,22,2015/01/05 12:48:03,181,business-and-economy,0,307579464,0x0,10.0.0.0-10.255.255.255,EU,0,10,12 
+<14>Jan  5 12:51:35 PAN1 1,2015/01/05 12:51:34,0003C105690,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.11,10.3.0.26,0.0.0.0,0.0.0.0,ICMP DMZ to In,,,ping,vsys1,F5_DMZ_WAN,trust,vlan.81,vlan.399,LOG-Default,2015/01/05 12:51:34,33876,1,0,0,0,0,0x19,icmp,allow,128,64,64,2,2015/01/05 12:51:20,0,any,0,956329058,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,1,1 
+<14>Jan  5 12:51:35 PAN1.exampleCustomer.com 1,2015/01/05 12:51:34,0006C113555,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.53,8.8.8.8,172.13.0.238,8.8.8.8,Guest WiFi to Internet,,,dns,vsys1,Guest WiFi,Ext_Internet,ethernet1/3.109,ethernet1/2,LOG-Default,2015/01/05 12:51:34,53079,1,59288,53,31746,53,0x404019,udp,allow,194,91,103,2,2015/01/05 12:51:04,0,any,0,265102750,0x0,10.0.0.0-10.255.255.255,US,0,1,1 
+<14>Jan  5 12:51:35 PAN1.exampleCustomer.com 1,2015/01/05 12:51:34,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.53,10.1.0.174,0.0.0.0,0.0.0.0,EX-EasyAV2,,,mssql-db,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:34,141372,1,54279,40004,0,0,0x401c,tcp,allow,3783,1697,2086,25,2015/01/05 12:51:03,30,any,0,17754932394,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,10,15 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/TopologyConfigs_old/lancope.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/TopologyConfigs_old/lancope.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/TopologyConfigs_old/lancope.conf
index dde089f..c0487d2 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/TopologyConfigs_old/lancope.conf
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/TopologyConfigs_old/lancope.conf
@@ -30,8 +30,8 @@ bolt.alerts.cluster=preprod
 #Host Enrichment
 bolt.enrichment.host.num.tasks=1
 bolt.enrichment.host.parallelism.hint=1
-bolt.enrichment.host.MAX_CACHE_SIZE=10000
-bolt.enrichment.host.MAX_TIME_RETAIN=10
+bolt.enrichment.host.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.host.MAX_TIME_RETAIN_MINUTES=10
 bolt.enrichment.host.enrichment_tag=host
 bolt.enrichment.host.source_ip=ip_src_addr
 bolt.enrichment.host.resp_ip=ip_dst_addr
@@ -43,16 +43,16 @@ bolt.enrichment.geo.enrichment_tag=geo
 bolt.enrichment.geo.source_ip=ip_src_addr
 bolt.enrichment.geo.resp_ip=ip_dst_addr
 bolt.enrichment.geo.adapter.table=GEO
-bolt.enrichment.geo.MAX_CACHE_SIZE=10000
-bolt.enrichment.geo.MAX_TIME_RETAIN=10
+bolt.enrichment.geo.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.geo.MAX_TIME_RETAIN_MINUTES=10
 
 #WhoisEnrichment
 bolt.enrichment.whois.num.tasks=1
 bolt.enrichment.whois.parallelism.hint=1
 bolt.enrichment.whois.whois_enrichment_tag=whois_enrichment
 bolt.enrichment.whois.source=host\":\"(.*?)\"
-bolt.enrichment.whois.MAX_CACHE_SIZE=10000
-bolt.enrichment.whois.MAX_TIME_RETAIN=10
+bolt.enrichment.whois.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.whois.MAX_TIME_RETAIN_MINUTES=10
 
 #CIF Enrichment
 bolt.enrichment.cif.tablename=cif_table
@@ -62,8 +62,8 @@ bolt.enrichment.cif.source_ip=id.orig_h
 bolt.enrichment.cif.resp_ip=id.resp_h
 bolt.enrichment.cif.host=host
 bolt.enrichment.cif.email=email
-bolt.enrichment.cif.MAX_CACHE_SIZE=10000
-bolt.enrichment.cif.MAX_TIME_RETAIN=10
+bolt.enrichment.cif.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.cif.MAX_TIME_RETAIN_MINUTES=10
 
 
 #Indexing Bolt



[09/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/src/main/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/src/main/resources/log4j.properties b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/src/main/resources/log4j.properties
deleted file mode 100644
index 224aed5..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/src/main/resources/log4j.properties
+++ /dev/null
@@ -1,10 +0,0 @@
-# Root logger option
-log4j.rootLogger=INFO,file,stdout
- 
-# Direct log messages to a log file
-log4j.appender.file=org.apache.log4j.RollingFileAppender
-log4j.appender.file.File=/var/log/rest/cisco-rest.log
-log4j.appender.file.MaxFileSize=1MB
-log4j.appender.file.MaxBackupIndex=1
-log4j.appender.file.layout=org.apache.log4j.PatternLayout
-log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n	
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/src/main/webapp/META-INF/MANIFEST.MF
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/src/main/webapp/META-INF/MANIFEST.MF b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/src/main/webapp/META-INF/MANIFEST.MF
deleted file mode 100644
index 5e94951..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/src/main/webapp/META-INF/MANIFEST.MF
+++ /dev/null
@@ -1,3 +0,0 @@
-Manifest-Version: 1.0
-Class-Path: 
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/src/main/webapp/WEB-INF/ipcap-config.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/src/main/webapp/WEB-INF/ipcap-config.xml b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/src/main/webapp/WEB-INF/ipcap-config.xml
deleted file mode 100644
index c09a808..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/src/main/webapp/WEB-INF/ipcap-config.xml
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<beans xmlns="http://www.springframework.org/schema/beans"
-	xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-	xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd" >
-
-	<bean id="pcapGetter" class="com.cisco.opensoc.hbase.client.PcapReceiverImpl" />
-</beans>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/src/main/webapp/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/src/main/webapp/WEB-INF/web.xml b/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/src/main/webapp/WEB-INF/web.xml
deleted file mode 100644
index e4a521f..0000000
--- a/opensoc-streaming/OpenSOC-PCAP_Reconstruction/service/src/main/webapp/WEB-INF/web.xml
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<web-app xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://java.sun.com/xml/ns/javaee" xmlns:web="http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd" xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd" id="WebApp_ID" version="2.5">
-  <display-name>multipart-http</display-name>
-  <servlet>
-    <servlet-name>ipcap</servlet-name>
-    <servlet-class>org.springframework.web.servlet.DispatcherServlet</servlet-class>
-    <init-param>
-      <param-name>contextConfigLocation</param-name>
-      <param-value>/WEB-INF/ipcap-config.xml</param-value>
-    </init-param>
-    <load-on-startup>1</load-on-startup>
-  </servlet>
-  <servlet-mapping>
-    <servlet-name>ipcap</servlet-name>
-    <url-pattern>/*</url-pattern>
-  </servlet-mapping>
-</web-app>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/README.txt
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/README.txt b/opensoc-streaming/OpenSOC-Pcap_Service/README.txt
new file mode 100644
index 0000000..8aba23e
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/README.txt
@@ -0,0 +1,16 @@
+'hbase' module of 'opensoc' project contains the code to communicate with HBase. This module has several APIs ( refer IPcapGetter.java, IPcapScanner.java files ) 
+to fetch pcaps from HBase. Following APIs have been created under this module implementation.
+
+APIs ( in IPcapGetter.java) to get pcaps using keys :
+ 1. public PcapsResponse getPcaps(List<String> keys, String lastRowKey, long startTime, long endTime, boolean includeReverseTraffic, boolean includeDuplicateLastRow, long maxResultSize) throws IOException;
+ 2. public PcapsResponse getPcaps(String key, long startTime, long endTime, boolean includeReverseTraffic) throws IOException;
+ 3. public PcapsResponse getPcaps(List<String> keys) throws IOException;
+ 4. public PcapsResponse getPcaps(String key) throws IOException;
+
+APIs ( in IPcapScanner.java) to get pcaps using key range :
+ 1. public byte[] getPcaps(String startKey, String endKey, long maxResponseSize, long startTime, long endTime) throws IOException;
+ 2. public byte[] getPcaps(String startKey, String endKey) throws IOException;
+ 
+ 
+Refer the wiki documentation for further details : https://hwcsco.atlassian.net/wiki/pages/viewpage.action?pageId=5242892
+ 	
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/pom.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/pom.xml b/opensoc-streaming/OpenSOC-Pcap_Service/pom.xml
new file mode 100644
index 0000000..ecbce82
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/pom.xml
@@ -0,0 +1,267 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<groupId>com.opensoc</groupId>
+		<artifactId>OpenSOC-Streaming</artifactId>
+		<version>0.6BETA</version>
+	</parent>
+	<artifactId>OpenSOC-Pcap_Service</artifactId>
+	<description>OpenSOC Pcap_Service</description>
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+		<flume.version>1.4.0.2.0.6.0-76</flume.version>
+		<hadoop.version>2.2.0.2.0.6.0-76</hadoop.version>
+		<maven.compiler.target>1.7</maven.compiler.target>
+		<maven.compiler.source>1.7</maven.compiler.source>
+		<storm.version>0.9.2-incubating</storm.version>
+		<kafka.version>0.8.0</kafka.version>
+		<slf4j.version>1.7.5</slf4j.version>
+		<zookeeper.version>3.4.5.2.0.6.0-76</zookeeper.version>
+		<logger.version>1.2.15</logger.version>
+
+		<storm-kafka.version>0.9.2-incubating</storm-kafka.version>
+		<storm-hdfs.version>0.0.7-SNAPSHOT</storm-hdfs.version>
+		<storm-hbase.version>0.0.5-SNAPSHOT</storm-hbase.version>
+
+		<spring.integration.version>3.0.0.RELEASE</spring.integration.version>
+		<spring.version>3.2.6.RELEASE</spring.version>
+		<commons-fileupload.version>1.2.2</commons-fileupload.version>
+		<commons-io.version>2.4</commons-io.version>
+		<commons-configuration.version>1.10</commons-configuration.version>
+		<commons-lang.version>2.6</commons-lang.version>
+		<commons-collections.version>3.2.1</commons-collections.version>
+		<commons-beanutils.version>1.8.3</commons-beanutils.version>
+		<commons-jexl.version>2.1.1</commons-jexl.version>
+
+
+		<junit.version>4.11</junit.version>
+		<hamcrest.version>1.3</hamcrest.version>
+		<mockito.version>1.9.5</mockito.version>
+		<elastic-search.version>1.3.0</elastic-search.version>
+	</properties>
+	<dependencies>
+		<dependency>
+			<groupId>org.jboss.resteasy</groupId>
+			<artifactId>jaxrs-api</artifactId>
+			<version>3.0.4.Final</version>
+		</dependency>
+		<dependency>
+			<groupId>com.opensoc</groupId>
+			<artifactId>OpenSOC-Common</artifactId>
+			<version>${project.parent.version}</version>
+		</dependency>
+
+		<dependency>
+			<groupId>commons-beanutils</groupId>
+			<artifactId>commons-beanutils</artifactId>
+			<version>${commons-beanutils.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.commons</groupId>
+			<artifactId>commons-jexl</artifactId>
+			<version>${commons-jexl.version}</version>
+		</dependency>
+
+		<dependency>
+			<artifactId>commons-configuration</artifactId>
+			<groupId>commons-configuration</groupId>
+			<version>${commons-configuration.version}</version>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-api</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>${junit.version}</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.powermock</groupId>
+			<artifactId>powermock-api-mockito</artifactId>
+			<version>1.5</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.powermock</groupId>
+			<artifactId>powermock-core</artifactId>
+			<version>1.5</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.powermock</groupId>
+			<artifactId>powermock-module-junit4</artifactId>
+			<version>1.5</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>joda-time</groupId>
+			<artifactId>joda-time</artifactId>
+			<version>2.3</version>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hbase</groupId>
+			<artifactId>hbase-client</artifactId>
+			<version>${global_hbase_version}</version>
+			<scope>provided</scope>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-log4j12</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hbase</groupId>
+			<artifactId>hbase-testing-util</artifactId>
+			<version>${global_hbase_version}</version>
+			<scope>provided</scope>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-log4j12</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-common</artifactId>
+			<version>${global_hadoop_version}</version>
+			<scope>provided</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-hdfs</artifactId>
+			<version>${global_hadoop_version}</version>
+			<scope>provided</scope>
+		</dependency>
+
+		<dependency>
+			<groupId>org.springframework.integration</groupId>
+			<artifactId>spring-integration-http</artifactId>
+			<version>${spring.integration.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.springframework</groupId>
+			<artifactId>spring-webmvc</artifactId>
+			<version>${spring.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>log4j</groupId>
+			<artifactId>log4j</artifactId>
+			<version>${logger.version}</version>
+			<exclusions>
+				<exclusion>
+					<groupId>com.sun.jmx</groupId>
+					<artifactId>jmxri</artifactId>
+				</exclusion>
+				<exclusion>
+					<groupId>com.sun.jdmk</groupId>
+					<artifactId>jmxtools</artifactId>
+				</exclusion>
+				<exclusion>
+					<groupId>javax.jms</groupId>
+					<artifactId>jms</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+
+
+
+
+
+		<dependency>
+			<groupId>org.jboss.resteasy</groupId>
+			<artifactId>resteasy-jaxrs</artifactId>
+			<version>3.0.1.Final</version>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-simple</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.jboss.resteasy</groupId>
+			<artifactId>resteasy-jaxb-provider</artifactId>
+			<version>3.0.1.Final</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.jboss.resteasy</groupId>
+			<artifactId>async-http-servlet-3.0</artifactId>
+			<version>3.0.1.Final</version>
+			<scope>compile</scope>
+		</dependency>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+		<dependency>
+			<groupId>org.eclipse.jetty</groupId>
+			<artifactId>jetty-server</artifactId>
+			<version>9.3.0.M0</version>
+		</dependency>
+		<dependency>
+			<groupId>org.eclipse.jetty</groupId>
+			<artifactId>jetty-servlet</artifactId>
+			<version>9.3.0.M0</version>
+		</dependency>
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-simple</artifactId>
+			<version>${global_slf4j_version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-api</artifactId>
+			<version>${global_slf4j_version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-log4j12</artifactId>
+			<version>${global_slf4j_version}</version>
+		</dependency>
+	</dependencies>
+
+	<build>
+		<plugins>
+			<plugin>
+				<artifactId>maven-assembly-plugin</artifactId>
+				<configuration>
+					<archive>
+						<manifest>
+							<mainClass>com.opensoc.pcapservice.rest.PcapService</mainClass>
+						</manifest>
+					</archive>
+					<descriptorRefs>
+						<descriptorRef>jar-with-dependencies</descriptorRef>
+					</descriptorRefs>
+				</configuration>
+				<executions>
+					<execution>
+						<id>make-assembly</id> <!-- this is used for inheritance merges -->
+						<phase>package</phase> <!-- bind to the packaging phase -->
+						<goals>
+							<goal>single</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
+		</plugins>
+	</build>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/pom.xml.versionsBackup
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/pom.xml.versionsBackup b/opensoc-streaming/OpenSOC-Pcap_Service/pom.xml.versionsBackup
new file mode 100644
index 0000000..a400fe2
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/pom.xml.versionsBackup
@@ -0,0 +1,268 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<groupId>com.opensoc</groupId>
+		<artifactId>OpenSOC-Streaming</artifactId>
+		<version>0.4BETA</version>
+	</parent>
+	<artifactId>OpenSOC-Pcap_Service</artifactId>
+	<description>OpenSOC Pcap_Service</description>
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+		<flume.version>1.4.0.2.0.6.0-76</flume.version>
+		<hadoop.version>2.2.0.2.0.6.0-76</hadoop.version>
+		<maven.compiler.source>${jdk.version}</maven.compiler.source>
+		<maven.compiler.target>${jdk.version}</maven.compiler.target>
+
+		<storm.version>0.9.2-incubating</storm.version>
+		<kafka.version>0.8.0</kafka.version>
+		<slf4j.version>1.7.5</slf4j.version>
+		<zookeeper.version>3.4.5.2.0.6.0-76</zookeeper.version>
+		<logger.version>1.2.15</logger.version>
+
+		<storm-kafka.version>0.9.2-incubating</storm-kafka.version>
+		<storm-hdfs.version>0.0.7-SNAPSHOT</storm-hdfs.version>
+		<storm-hbase.version>0.0.5-SNAPSHOT</storm-hbase.version>
+
+		<spring.integration.version>3.0.0.RELEASE</spring.integration.version>
+		<spring.version>3.2.6.RELEASE</spring.version>
+		<commons-fileupload.version>1.2.2</commons-fileupload.version>
+		<commons-io.version>2.4</commons-io.version>
+		<commons-configuration.version>1.10</commons-configuration.version>
+		<commons-lang.version>2.6</commons-lang.version>
+		<commons-collections.version>3.2.1</commons-collections.version>
+		<commons-beanutils.version>1.8.3</commons-beanutils.version>
+		<commons-jexl.version>2.1.1</commons-jexl.version>
+
+
+		<junit.version>4.11</junit.version>
+		<hamcrest.version>1.3</hamcrest.version>
+		<mockito.version>1.9.5</mockito.version>
+		<elastic-search.version>1.3.0</elastic-search.version>
+	</properties>
+	<dependencies>
+		<dependency>
+			<groupId>org.jboss.resteasy</groupId>
+			<artifactId>jaxrs-api</artifactId>
+			<version>3.0.4.Final</version>
+		</dependency>
+		<dependency>
+			<groupId>com.opensoc</groupId>
+			<artifactId>OpenSOC-Common</artifactId>
+			<version>${parent.version}</version>
+		</dependency>
+
+		<dependency>
+			<groupId>commons-beanutils</groupId>
+			<artifactId>commons-beanutils</artifactId>
+			<version>${commons-beanutils.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.commons</groupId>
+			<artifactId>commons-jexl</artifactId>
+			<version>${commons-jexl.version}</version>
+		</dependency>
+
+		<dependency>
+			<artifactId>commons-configuration</artifactId>
+			<groupId>commons-configuration</groupId>
+			<version>${commons-configuration.version}</version>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-api</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>${junit.version}</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.powermock</groupId>
+			<artifactId>powermock-api-mockito</artifactId>
+			<version>1.5</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.powermock</groupId>
+			<artifactId>powermock-core</artifactId>
+			<version>1.5</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.powermock</groupId>
+			<artifactId>powermock-module-junit4</artifactId>
+			<version>1.5</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>joda-time</groupId>
+			<artifactId>joda-time</artifactId>
+			<version>2.3</version>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hbase</groupId>
+			<artifactId>hbase-client</artifactId>
+			<version>${global_hbase_version}</version>
+			<scope>provided</scope>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-log4j12</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hbase</groupId>
+			<artifactId>hbase-testing-util</artifactId>
+			<version>${global_hbase_version}</version>
+			<scope>provided</scope>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-log4j12</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-common</artifactId>
+			<version>${global_hadoop_version}</version>
+			<scope>provided</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-hdfs</artifactId>
+			<version>${global_hadoop_version}</version>
+			<scope>provided</scope>
+		</dependency>
+
+		<dependency>
+			<groupId>org.springframework.integration</groupId>
+			<artifactId>spring-integration-http</artifactId>
+			<version>${spring.integration.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.springframework</groupId>
+			<artifactId>spring-webmvc</artifactId>
+			<version>${spring.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>log4j</groupId>
+			<artifactId>log4j</artifactId>
+			<version>${logger.version}</version>
+			<exclusions>
+				<exclusion>
+					<groupId>com.sun.jmx</groupId>
+					<artifactId>jmxri</artifactId>
+				</exclusion>
+				<exclusion>
+					<groupId>com.sun.jdmk</groupId>
+					<artifactId>jmxtools</artifactId>
+				</exclusion>
+				<exclusion>
+					<groupId>javax.jms</groupId>
+					<artifactId>jms</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+
+
+
+
+
+		<dependency>
+			<groupId>org.jboss.resteasy</groupId>
+			<artifactId>resteasy-jaxrs</artifactId>
+			<version>3.0.1.Final</version>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-simple</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.jboss.resteasy</groupId>
+			<artifactId>resteasy-jaxb-provider</artifactId>
+			<version>3.0.1.Final</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.jboss.resteasy</groupId>
+			<artifactId>async-http-servlet-3.0</artifactId>
+			<version>3.0.1.Final</version>
+			<scope>compile</scope>
+		</dependency>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+		<dependency>
+			<groupId>org.eclipse.jetty</groupId>
+			<artifactId>jetty-server</artifactId>
+			<version>9.3.0.M0</version>
+		</dependency>
+		<dependency>
+			<groupId>org.eclipse.jetty</groupId>
+			<artifactId>jetty-servlet</artifactId>
+			<version>9.3.0.M0</version>
+		</dependency>
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-simple</artifactId>
+			<version>${global_slf4j_version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-api</artifactId>
+			<version>${global_slf4j_version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-log4j12</artifactId>
+			<version>${global_slf4j_version}</version>
+		</dependency>
+	</dependencies>
+
+	<build>
+		<plugins>
+			<plugin>
+				<artifactId>maven-assembly-plugin</artifactId>
+				<configuration>
+					<archive>
+						<manifest>
+							<mainClass>com.opensoc.pcapservice.rest.PcapService</mainClass>
+						</manifest>
+					</archive>
+					<descriptorRefs>
+						<descriptorRef>jar-with-dependencies</descriptorRef>
+					</descriptorRefs>
+				</configuration>
+				<executions>
+					<execution>
+						<id>make-assembly</id> <!-- this is used for inheritance merges -->
+						<phase>package</phase> <!-- bind to the packaging phase -->
+						<goals>
+							<goal>single</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
+		</plugins>
+	</build>
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/CellTimestampComparator.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/CellTimestampComparator.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/CellTimestampComparator.java
new file mode 100644
index 0000000..e45d849
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/CellTimestampComparator.java
@@ -0,0 +1,23 @@
+package com.opensoc.pcapservice;
+
+import java.util.Comparator;
+
+import org.apache.hadoop.hbase.Cell;
+
+/**
+ * Comparator created for sorting pcaps cells based on the timestamp (asc).
+ * 
+ * @author Sayi
+ */
+public class CellTimestampComparator implements Comparator<Cell> {
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
+   */
+  
+  public int compare(Cell o1, Cell o2) {
+    return Long.valueOf(o1.getTimestamp()).compareTo(o2.getTimestamp());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/ConfigurationUtil.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/ConfigurationUtil.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/ConfigurationUtil.java
new file mode 100644
index 0000000..be1a1bf
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/ConfigurationUtil.java
@@ -0,0 +1,269 @@
+package com.opensoc.pcapservice;
+
+import org.apache.commons.configuration.Configuration;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.springframework.util.Assert;
+
+import com.opensoc.configuration.ConfigurationManager;
+
+
+
+/**
+ * utility class for this module which loads commons configuration to fetch
+ * properties from underlying resources to communicate with hbase.
+ * 
+ * @author Sayi
+ */
+public class ConfigurationUtil {
+
+	/** Configuration definition file name for fetching pcaps from hbase */
+	private static final String configDefFileName = "config-definition-hbase.xml";
+	
+	/** property configuration. */
+	private static Configuration propConfiguration = null;
+
+
+	/**
+	 * The Enum SizeUnit.
+	 */
+	public enum SizeUnit {
+
+		/** The kb. */
+		KB,
+		/** The mb. */
+		MB
+	};
+
+	/** The Constant DEFAULT_HCONNECTION_RETRY_LIMIT. */
+	private static final int DEFAULT_HCONNECTION_RETRY_LIMIT = 0;
+
+	/**
+	 * Loads configuration resources 
+	 * @return Configuration
+	 */
+	public static Configuration getConfiguration() {
+		if(propConfiguration == null){
+			propConfiguration =  ConfigurationManager.getConfiguration(configDefFileName);
+		}
+		return propConfiguration;
+	}
+
+	/**
+	 * Returns the configured default result size in bytes, if the user input is
+	 * null; otherwise, returns the user input after validating with the
+	 * configured max value. Throws IllegalArgumentException if : 1. input is
+	 * less than or equals to 0 OR 2. input is greater than configured
+	 * {hbase.scan.max.result.size} value
+	 * 
+	 * @param input
+	 *            the input
+	 * @return long
+	 */
+	public static long validateMaxResultSize(String input) {
+		if (input == null) {
+			return getDefaultResultSize();
+		}
+		// validate the user input
+		long value = convertToBytes(Long.parseLong(input), getResultSizeUnit());
+		Assert.isTrue(
+				isAllowableResultSize(value),
+				"'maxResponseSize' param value must be positive and less than {hbase.scan.max.result.size} value");
+		return convertToBytes(value, getResultSizeUnit());
+	}
+
+	/**
+	 * Checks if is allowable result size.
+	 * 
+	 * @param input
+	 *            the input
+	 * @return true, if is allowable result size
+	 */
+	public static boolean isAllowableResultSize(long input) {
+		if (input <= 0 || input > getMaxResultSize()) {
+			return false;
+		}
+		return true;
+	}
+
+	/**
+	 * Returns the configured default result size in bytes.
+	 * 
+	 * @return long
+	 */
+	public static long getDefaultResultSize() {
+		float value = ConfigurationUtil.getConfiguration().getFloat(
+				"hbase.scan.default.result.size");
+		return convertToBytes(value, getResultSizeUnit());
+	}
+
+	/**
+	 * Returns the configured max result size in bytes.
+	 * 
+	 * @return long
+	 */
+	public static long getMaxResultSize() {
+		float value = ConfigurationUtil.getConfiguration().getFloat(
+				"hbase.scan.max.result.size");
+		return convertToBytes(value, getResultSizeUnit());
+	}
+
+	/**
+	 * Returns the configured max row size in bytes.
+	 * 
+	 * @return long
+	 */
+	public static long getMaxRowSize() {
+		float maxRowSize = ConfigurationUtil.getConfiguration().getFloat(
+				"hbase.table.max.row.size");
+		return convertToBytes(maxRowSize, getRowSizeUnit());
+	}
+
+	/**
+	 * Gets the result size unit.
+	 * 
+	 * @return the result size unit
+	 */
+	public static SizeUnit getResultSizeUnit() {
+		return SizeUnit.valueOf(ConfigurationUtil.getConfiguration()
+				.getString("hbase.scan.result.size.unit"));
+	}
+
+	/**
+	 * Gets the row size unit.
+	 * 
+	 * @return the row size unit
+	 */
+	public static SizeUnit getRowSizeUnit() {
+		return SizeUnit.valueOf(ConfigurationUtil.getConfiguration()
+				.getString("hbase.table.row.size.unit"));
+	}
+
+	/**
+	 * Gets the connection retry limit.
+	 * 
+	 * @return the connection retry limit
+	 */
+	public static int getConnectionRetryLimit() {
+		return ConfigurationUtil.getConfiguration().getInt(
+				"hbase.hconnection.retries.number",
+				DEFAULT_HCONNECTION_RETRY_LIMIT);
+	}
+
+	/**
+	 * Checks if is default include reverse traffic.
+	 * 
+	 * @return true, if is default include reverse traffic
+	 */
+	public static boolean isDefaultIncludeReverseTraffic() {
+		return ConfigurationUtil.getConfiguration().getBoolean(
+				"pcaps.include.reverse.traffic");
+	}
+
+	/**
+	 * Gets the table name.
+	 * 
+	 * @return the table name
+	 */
+	public static byte[] getTableName() {
+		return Bytes.toBytes(ConfigurationUtil.getConfiguration().getString(
+				"hbase.table.name"));
+	}
+
+	/**
+	 * Gets the column family.
+	 * 
+	 * @return the column family
+	 */
+	public static byte[] getColumnFamily() {
+		return Bytes.toBytes(ConfigurationUtil.getConfiguration().getString(
+				"hbase.table.column.family"));
+	}
+
+	/**
+	 * Gets the column qualifier.
+	 * 
+	 * @return the column qualifier
+	 */
+	public static byte[] getColumnQualifier() {
+		return Bytes.toBytes(ConfigurationUtil.getConfiguration().getString(
+				"hbase.table.column.qualifier"));
+	}
+
+	/**
+	 * Gets the max versions.
+	 * 
+	 * @return the max versions
+	 */
+	public static int getMaxVersions() {
+		return ConfigurationUtil.getConfiguration().getInt(
+				"hbase.table.column.maxVersions");
+	}
+
+	/**
+	 * Gets the configured tokens in rowkey.
+	 * 
+	 * @return the configured tokens in rowkey
+	 */
+	public static int getConfiguredTokensInRowkey() {
+		return ConfigurationUtil.getConfiguration().getInt(
+				"hbase.table.row.key.tokens");
+	}
+
+	/**
+	 * Gets the minimum tokens in inputkey.
+	 * 
+	 * @return the minimum tokens in inputkey
+	 */
+	public static int getMinimumTokensInInputkey() {
+		return ConfigurationUtil.getConfiguration().getInt(
+				"rest.api.input.key.min.tokens");
+	}
+
+	/**
+	 * Gets the appending token digits.
+	 * 
+	 * @return the appending token digits
+	 */
+	public static int getAppendingTokenDigits() {
+		return ConfigurationUtil.getConfiguration().getInt(
+				"hbase.table.row.key.token.appending.digits");
+	}
+
+	/**
+	 * Convert to bytes.
+	 * 
+	 * @param value
+	 *            the value
+	 * @param unit
+	 *            the unit
+	 * @return the long
+	 */
+	public static long convertToBytes(float value, SizeUnit unit) {
+		if (SizeUnit.KB == unit) {
+			return (long) (value * 1024);
+		}
+		if (SizeUnit.MB == unit) {
+			return (long) (value * 1024 * 1024);
+		}
+		return (long) value;
+	}
+
+	/**
+	 * The main method.
+	 * 
+	 * @param args
+	 *            the arguments
+	 */
+	public static void main(String[] args) {
+		long r1 = getMaxRowSize();
+		System.out.println("getMaxRowSizeInBytes = " + r1);
+		long r2 = getMaxResultSize();
+		System.out.println("getMaxAllowableResultSizeInBytes = " + r2);
+
+		SizeUnit u1 = getRowSizeUnit();
+		System.out.println("getMaxRowSizeUnit = " + u1.toString());
+		SizeUnit u2 = getResultSizeUnit();
+		System.out.println("getMaxAllowableResultsSizeUnit = " + u2.toString());
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/HBaseConfigConstants.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/HBaseConfigConstants.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/HBaseConfigConstants.java
new file mode 100644
index 0000000..a7e7e3b
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/HBaseConfigConstants.java
@@ -0,0 +1,40 @@
+package com.opensoc.pcapservice;
+
+/**
+ * HBase configuration properties.
+ * 
+ * @author Sayi
+ */
+public class HBaseConfigConstants {
+
+  /** The Constant HBASE_ZOOKEEPER_QUORUM. */
+  public static final String HBASE_ZOOKEEPER_QUORUM = "hbase.zookeeper.quorum";
+
+  /** The Constant HBASE_ZOOKEEPER_CLIENT_PORT. */
+  public static final String HBASE_ZOOKEEPER_CLIENT_PORT = "hbase.zookeeper.clientPort";
+
+  /** The Constant HBASE_ZOOKEEPER_SESSION_TIMEOUT. */
+  public static final String HBASE_ZOOKEEPER_SESSION_TIMEOUT = "zookeeper.session.timeout";
+
+  /** The Constant HBASE_ZOOKEEPER_RECOVERY_RETRY. */
+  public static final String HBASE_ZOOKEEPER_RECOVERY_RETRY = "zookeeper.recovery.retry";
+
+  /** The Constant HBASE_CLIENT_RETRIES_NUMBER. */
+  public static final String HBASE_CLIENT_RETRIES_NUMBER = "hbase.client.retries.number";
+
+  /** The delimeter. */
+  String delimeter = "-";
+
+  /** The regex. */
+  String regex = "\\-";
+
+  /** The Constant PCAP_KEY_DELIMETER. */
+  public static final String PCAP_KEY_DELIMETER = "-";
+
+  /** The Constant START_KEY. */
+  public static final String START_KEY = "startKey";
+
+  /** The Constant END_KEY. */
+  public static final String END_KEY = "endKey";
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/HBaseConfigurationUtil.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/HBaseConfigurationUtil.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/HBaseConfigurationUtil.java
new file mode 100644
index 0000000..8a5c022
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/HBaseConfigurationUtil.java
@@ -0,0 +1,165 @@
+/**
+ * 
+ */
+package com.opensoc.pcapservice;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.log4j.Logger;
+import org.mortbay.log.Log;
+
+/**
+ * Utility class which creates HConnection instance when the first request is
+ * received and registers a shut down hook which closes the connection when the
+ * JVM exits. Creates new connection to the cluster only if the existing
+ * connection is closed for unknown reasons. Also creates Configuration with
+ * HBase resources using configuration properties.
+ * 
+ * @author Sayi
+ * 
+ */
+public class HBaseConfigurationUtil {
+
+  /** The Constant LOGGER. */
+  private static final Logger LOGGER = Logger
+      .getLogger(HBaseConfigurationUtil.class);
+
+  /** Configuration which holds all HBase properties. */
+  private static Configuration config;
+
+  /**
+   * A cluster connection which knows how to find master node and locate regions
+   * on the cluster.
+   */
+  private static HConnection clusterConnection = null;
+
+  /**
+   * Creates HConnection instance when the first request is received and returns
+   * the same instance for all subsequent requests if the connection is still
+   * open.
+   * 
+   * @return HConnection instance
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public static HConnection getConnection() throws IOException {
+    if (!connectionAvailable()) {
+      synchronized (HBaseConfigurationUtil.class) {
+        createClusterConncetion();
+      }
+    }
+    return clusterConnection;
+  }
+
+  /**
+   * Creates the cluster conncetion.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private static void createClusterConncetion() throws IOException {
+    try {
+      if (connectionAvailable()) {
+        return;
+      }
+      clusterConnection = HConnectionManager.createConnection(read());
+      addShutdownHook();
+      System.out.println("Created HConnection and added shutDownHook");
+    } catch (IOException e) {
+      LOGGER
+          .error(
+              "Exception occurred while creating HConnection using HConnectionManager",
+              e);
+      throw e;
+    }
+  }
+
+  /**
+   * Connection available.
+   * 
+   * @return true, if successful
+   */
+  private static boolean connectionAvailable() {
+    if (clusterConnection == null) {
+      System.out.println("clusterConnection=" + clusterConnection);
+      return false;
+    }
+    System.out.println("clusterConnection.isClosed()="
+        + clusterConnection.isClosed());
+    return clusterConnection != null && !clusterConnection.isClosed();
+  }
+
+  /**
+   * Adds the shutdown hook.
+   */
+  private static void addShutdownHook() {
+    Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
+      public void run() {
+        System.out
+            .println("Executing ShutdownHook HBaseConfigurationUtil : Closing HConnection");
+        try {
+          clusterConnection.close();
+        } catch (IOException e) {
+          Log.debug("Caught ignorable exception ", e);
+        }
+      }
+    }, "HBaseConfigurationUtilShutDown"));
+  }
+
+  /**
+   * Closes the underlying connection to cluster; ignores if any exception is
+   * thrown.
+   */
+  public static void closeConnection() {
+    if (clusterConnection != null) {
+      try {
+        clusterConnection.close();
+      } catch (IOException e) {
+        Log.debug("Caught ignorable exception ", e);
+      }
+    }
+  }
+
+  /**
+   * This method creates Configuration with HBase resources using configuration
+   * properties. The same Configuration object will be used to communicate with
+   * all HBase tables;
+   * 
+   * @return Configuration object
+   */
+  public static Configuration read() {
+    if (config == null) {
+      synchronized (HBaseConfigurationUtil.class) {
+        if (config == null) {
+          config = HBaseConfiguration.create();
+
+          config.set(
+              HBaseConfigConstants.HBASE_ZOOKEEPER_QUORUM,
+              ConfigurationUtil.getConfiguration().getString(
+                  "hbase.zookeeper.quorum"));
+          config.set(
+              HBaseConfigConstants.HBASE_ZOOKEEPER_CLIENT_PORT,
+              ConfigurationUtil.getConfiguration().getString(
+                  "hbase.zookeeper.clientPort"));
+          config.set(
+              HBaseConfigConstants.HBASE_CLIENT_RETRIES_NUMBER,
+              ConfigurationUtil.getConfiguration().getString(
+                  "hbase.client.retries.number"));
+          config.set(
+              HBaseConfigConstants.HBASE_ZOOKEEPER_SESSION_TIMEOUT,
+              ConfigurationUtil.getConfiguration().getString(
+                  "zookeeper.session.timeout"));
+          config.set(
+              HBaseConfigConstants.HBASE_ZOOKEEPER_RECOVERY_RETRY,
+              ConfigurationUtil.getConfiguration().getString(
+                  "zookeeper.recovery.retry"));
+        }
+      }
+    }
+    return config;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/IPcapGetter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/IPcapGetter.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/IPcapGetter.java
new file mode 100644
index 0000000..dbff59c
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/IPcapGetter.java
@@ -0,0 +1,88 @@
+/**
+ * 
+ */
+package com.opensoc.pcapservice;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * interface to all 'keys' based pcaps fetching methods.
+ * 
+ * @author Sayi
+ */
+public interface IPcapGetter {
+
+  /**
+   * Gets the pcaps for the input list of keys and lastRowKey.
+   * 
+   * @param keys
+   *          the list of keys for which pcaps are to be retrieved
+   * @param lastRowKey
+   *          last row key from the previous partial response
+   * @param startTime
+   *          the start time in system milliseconds to be used to filter the
+   *          pcaps. The value is set to '0' if the caller sends negative value
+   * @param endTime
+   *          the end time in system milliseconds to be used to filter the
+   *          pcaps. The value is set to Long.MAX_VALUE if the caller sends
+   *          negative value. 'endTime' must be greater than the 'startTime'.
+   * @param includeReverseTraffic
+   *          indicates whether or not to include pcaps from the reverse traffic
+   * @param includeDuplicateLastRow
+   *          indicates whether or not to include the last row from the previous
+   *          partial response
+   * @param maxResultSize
+   *          the max result size
+   * @return PcapsResponse with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public PcapsResponse getPcaps(List<String> keys, String lastRowKey,
+      long startTime, long endTime, boolean includeReverseTraffic,
+      boolean includeDuplicateLastRow, long maxResultSize) throws IOException;
+
+  /**
+   * Gets the pcaps for the input key.
+   * 
+   * @param key
+   *          the key for which pcaps is to be retrieved.
+   * @param startTime
+   *          the start time in system milliseconds to be used to filter the
+   *          pcaps. The value is set to '0' if the caller sends negative value
+   * @param endTime
+   *          the end time in system milliseconds to be used to filter the
+   *          pcaps.The value is set to Long.MAX_VALUE if the caller sends
+   *          negative value. 'endTime' must be greater than the 'startTime'.
+   * @param includeReverseTraffic
+   *          indicates whether or not to include pcaps from the reverse traffic
+   * @return PcapsResponse with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public PcapsResponse getPcaps(String key, long startTime, long endTime,
+      boolean includeReverseTraffic) throws IOException;
+
+  /**
+   * Gets the pcaps for the input list of keys.
+   * 
+   * @param keys
+   *          the list of keys for which pcaps are to be retrieved.
+   * @return PcapsResponse with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public PcapsResponse getPcaps(List<String> keys) throws IOException;
+
+  /**
+   * Gets the pcaps for the input key.
+   * 
+   * @param key
+   *          the key for which pcaps is to be retrieved.
+   * @return PcapsResponse with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public PcapsResponse getPcaps(String key) throws IOException;
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/IPcapScanner.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/IPcapScanner.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/IPcapScanner.java
new file mode 100644
index 0000000..64408e9
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/IPcapScanner.java
@@ -0,0 +1,49 @@
+package com.opensoc.pcapservice;
+
+import java.io.IOException;
+
+/**
+ * The Interface for all pcaps fetching methods based on key range.
+ */
+public interface IPcapScanner {
+
+  /**
+   * Gets the pcaps for between startKey (inclusive) and endKey (exclusive).
+   * 
+   * @param startKey
+   *          the start key of a key range for which pcaps is to be retrieved.
+   * @param endKey
+   *          the end key of a key range for which pcaps is to be retrieved.
+   * @param maxResponseSize
+   *          indicates the maximum response size in MegaBytes(MB). User needs
+   *          to pass positive value and must be less than 60 (MB)
+   * @param startTime
+   *          the start time in system milliseconds to be used to filter the
+   *          pcaps. The value is set to '0' if the caller sends negative value
+   * @param endTime
+   *          the end time in system milliseconds to be used to filter the
+   *          pcaps. The value is set Long.MAX_VALUE if the caller sends
+   *          negative value
+   * @return byte array with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public byte[] getPcaps(String startKey, String endKey, long maxResponseSize,
+      long startTime, long endTime) throws IOException;
+
+  /**
+   * Gets the pcaps for between startKey (inclusive) and endKey (exclusive).
+   * 
+   * @param startKey
+   *          the start key (inclusive) of a key range for which pcaps is to be
+   *          retrieved.
+   * @param endKey
+   *          the end key (exclusive) of a key range for which pcaps is to be
+   *          retrieved.
+   * @return byte array with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public byte[] getPcaps(String startKey, String endKey) throws IOException;
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapGetterHBaseImpl.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapGetterHBaseImpl.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapGetterHBaseImpl.java
new file mode 100644
index 0000000..b06137d
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapGetterHBaseImpl.java
@@ -0,0 +1,809 @@
+package com.opensoc.pcapservice;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import javax.annotation.Resource;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Response;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.ZooKeeperConnectionException;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.NoServerForRegionException;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.log4j.Logger;
+import org.springframework.util.Assert;
+import org.springframework.util.CollectionUtils;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * Singleton class which integrates with HBase table and returns pcaps sorted by
+ * timestamp(dsc) for the given list of keys. Creates HConnection if it is not
+ * already created and the same connection instance is being used for all
+ * requests
+ * 
+ * @author sheetal
+ * @version $Revision: 1.0 $
+ */
+
+@Path("/")
+public class PcapGetterHBaseImpl implements IPcapGetter {
+
+  /** The pcap getter h base. */
+  private static IPcapGetter pcapGetterHBase = null;
+
+  /** The Constant LOG. */
+  private static final Logger LOGGER = Logger
+      .getLogger(PcapGetterHBaseImpl.class);
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapGetter#getPcaps(java.util.List,
+   * java.lang.String, long, long, boolean, boolean, long)
+   */
+ 
+  
+	@GET
+	@Path("pcap/test")
+	@Produces("text/html")
+	public Response  index() throws URISyntaxException { 
+		return Response.ok("ALL GOOD").build();   
+	}
+	
+	
+  public PcapsResponse getPcaps(List<String> keys, String lastRowKey,
+      long startTime, long endTime, boolean includeReverseTraffic,
+      boolean includeDuplicateLastRow, long maxResultSize) throws IOException {
+    Assert
+        .isTrue(
+            checkIfValidInput(keys, lastRowKey),
+            "No valid input. One of the value must be present from {keys, lastRowKey}");
+    LOGGER.info(" keys=" + keys.toString() + ";  lastRowKey="
+        + lastRowKey);
+
+    PcapsResponse pcapsResponse = new PcapsResponse();
+    // 1. Process partial response key
+    if (StringUtils.isNotEmpty(lastRowKey)) {
+      pcapsResponse = processKey(pcapsResponse, lastRowKey, startTime,
+          endTime, true, includeDuplicateLastRow, maxResultSize);
+      // LOGGER.debug("after scanning lastRowKey=" +
+      // pcapsResponse.toString()+"*********************************************************************");
+      if (pcapsResponse.getStatus() == PcapsResponse.Status.PARTIAL) {
+        return pcapsResponse;
+      }
+    }
+    // 2. Process input keys
+    List<String> sortedKeys = sortKeysByAscOrder(keys, includeReverseTraffic);
+    List<String> unprocessedKeys = new ArrayList<String>();
+    unprocessedKeys.addAll(sortedKeys);
+    if (StringUtils.isNotEmpty(lastRowKey)) {
+      unprocessedKeys.clear();
+      unprocessedKeys = getUnprocessedSublistOfKeys(sortedKeys,
+          lastRowKey);
+    }
+    LOGGER.info("unprocessedKeys in getPcaps" + unprocessedKeys.toString());
+    if (!CollectionUtils.isEmpty(unprocessedKeys)) {
+      for (int i = 0; i < unprocessedKeys.size(); i++) {
+        pcapsResponse = processKey(pcapsResponse, unprocessedKeys.get(i),
+            startTime, endTime, false, includeDuplicateLastRow, maxResultSize);
+        // LOGGER.debug("after scanning input unprocessedKeys.get(" + i + ") ="
+        // +
+        // pcapsResponse.toString()+"*********************************************************************");
+        if (pcapsResponse.getStatus() == PcapsResponse.Status.PARTIAL) {
+          return pcapsResponse;
+        }
+      }
+    }
+    return pcapsResponse;
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapGetter#getPcaps(java.lang.String, long,
+   * long, boolean)
+   */
+ 
+  public PcapsResponse getPcaps(String key, long startTime, long endTime,
+      boolean includeReverseTraffic) throws IOException {
+    Assert.hasText(key, "key must not be null or empty");
+    return getPcaps(Arrays.asList(key), null, startTime, endTime,
+        includeReverseTraffic, false, ConfigurationUtil.getDefaultResultSize());
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapGetter#getPcaps(java.util.List)
+   */
+ 
+  public PcapsResponse getPcaps(List<String> keys) throws IOException {
+    Assert.notEmpty(keys, "'keys' must not be null or empty");
+    return getPcaps(keys, null, -1, -1,
+        ConfigurationUtil.isDefaultIncludeReverseTraffic(), false,
+        ConfigurationUtil.getDefaultResultSize());
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapGetter#getPcaps(java.lang.String)
+   */
+ 
+  public PcapsResponse getPcaps(String key) throws IOException {
+    Assert.hasText(key, "key must not be null or empty");
+    return getPcaps(Arrays.asList(key), null, -1, -1,
+        ConfigurationUtil.isDefaultIncludeReverseTraffic(), false,
+        ConfigurationUtil.getDefaultResultSize());
+  }
+
+  /**
+   * Always returns the singleton instance.
+   * 
+   * @return IPcapGetter singleton instance
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public static IPcapGetter getInstance() throws IOException {
+    if (pcapGetterHBase == null) {
+      synchronized (PcapGetterHBaseImpl.class) {
+        if (pcapGetterHBase == null) {
+          pcapGetterHBase = new PcapGetterHBaseImpl();
+        }
+      }
+    }
+    return pcapGetterHBase;
+  }
+
+  /**
+   * Instantiates a new pcap getter h base impl.
+   */
+  private PcapGetterHBaseImpl() {
+  }
+
+  /**
+   * Adds reverse keys to the list if the flag 'includeReverseTraffic' is set to
+   * true; removes duplicates and sorts the list by ascending order;.
+   * 
+   * @param keys
+   *          input keys
+   * @param includeReverseTraffic
+   *          flag whether or not to include reverse traffic
+   * @return List<String>
+   */
+  @VisibleForTesting
+  List<String> sortKeysByAscOrder(List<String> keys,
+      boolean includeReverseTraffic) {
+    Assert.notEmpty(keys, "'keys' must not be null");
+    if (includeReverseTraffic) {
+      keys.addAll(PcapHelper.reverseKey(keys));
+    }
+    List<String> deDupKeys = removeDuplicateKeys(keys);
+    Collections.sort(deDupKeys);
+    return deDupKeys;
+  }
+
+  /**
+   * Removes the duplicate keys.
+   * 
+   * @param keys
+   *          the keys
+   * @return the list
+   */
+  @VisibleForTesting
+public
+  List<String> removeDuplicateKeys(List<String> keys) {
+    Set<String> set = new HashSet<String>(keys);
+    return new ArrayList<String>(set);
+  }
+
+  /**
+   * <p>
+   * Returns the sublist starting from the element after the lastRowKey
+   * to the last element in the list; if the 'lastRowKey' is not matched
+   * the complete list will be returned.
+   * </p>
+   * 
+   * <pre>
+   * Eg :
+   *  keys = [18800006-1800000b-06-0019-caac, 18800006-1800000b-06-0050-5af6, 18800006-1800000b-11-0035-3810]
+   *  lastRowKey = "18800006-1800000b-06-0019-caac-65140-40815"
+   *  and the response from this method [18800006-1800000b-06-0050-5af6, 18800006-1800000b-11-0035-3810]
+   * </pre>
+   * 
+   * @param keys
+   *          keys
+   * @param lastRowKey
+   *          last row key of the previous partial response
+   * @return List<String>
+   */
+  @VisibleForTesting
+  List<String> getUnprocessedSublistOfKeys(List<String> keys,
+      String lastRowKey) {
+    Assert.notEmpty(keys, "'keys' must not be null");
+    Assert.hasText(lastRowKey, "'lastRowKey' must not be null");
+    String partialKey = getTokens(lastRowKey, 5);
+    int startIndex = 0;
+    for (int i = 0; i < keys.size(); i++) {
+      if (partialKey.equals(keys.get(i))) {
+        startIndex = i + 1;
+        break;
+      }
+    }
+    List<String> unprocessedKeys = keys.subList(startIndex, keys.size());
+    return unprocessedKeys;
+  }
+
+  /**
+   * Returns the first 'noOfTokens' tokens from the given key; token delimiter
+   * "-";.
+   * 
+   * @param key
+   *          given key
+   * @param noOfTokens
+   *          number of tokens to retrieve
+   * @return the tokens
+   */
+  @VisibleForTesting
+  String getTokens(String key, int noOfTokens) {
+    String delimeter = HBaseConfigConstants.PCAP_KEY_DELIMETER;
+    String regex = "\\" + delimeter;
+    String[] keyTokens = key.split(regex);
+    Assert.isTrue(noOfTokens < keyTokens.length,
+        "Invalid value for 'noOfTokens'");
+    StringBuffer sbf = new StringBuffer();
+    for (int i = 0; i < noOfTokens; i++) {
+      sbf.append(keyTokens[i]);
+      if (i != (noOfTokens - 1)) {
+        sbf.append(HBaseConfigConstants.PCAP_KEY_DELIMETER);
+      }
+
+    }
+    return sbf.toString();
+  }
+
+  /**
+   * Process key.
+   * 
+   * @param pcapsResponse
+   *          the pcaps response
+   * @param key
+   *          the key
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @param isPartialResponse
+   *          the is partial response
+   * @param includeDuplicateLastRow
+   *          the include duplicate last row
+   * @param maxResultSize
+   *          the max result size
+   * @return the pcaps response
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @VisibleForTesting
+  PcapsResponse processKey(PcapsResponse pcapsResponse, String key,
+      long startTime, long endTime, boolean isPartialResponse,
+      boolean includeDuplicateLastRow, long maxResultSize) throws IOException {
+    HTable table = null;
+    Scan scan = null;
+    List<Cell> scannedCells = null;
+    try {
+      // 1. Create start and stop row for the key;
+      Map<String, String> keysMap = createStartAndStopRowKeys(key,
+          isPartialResponse, includeDuplicateLastRow);
+
+      // 2. if the input key contains all fragments (7) and it is not part
+      // of previous partial response (isPartialResponse),
+      // 'keysMap' will be null; do a Get; currently not doing any
+      // response size related checks for Get;
+      // by default all cells from a specific row are sorted by timestamp
+      if (keysMap == null) {
+        Get get = createGetRequest(key, startTime, endTime);
+        List<Cell> cells = executeGetRequest(table, get);
+        for (Cell cell : cells) {
+          pcapsResponse.addPcaps(CellUtil.cloneValue(cell));
+        }
+        return pcapsResponse;
+      }
+      // 3. Create and execute Scan request
+      scan = createScanRequest(pcapsResponse, keysMap, startTime, endTime,
+          maxResultSize);
+      scannedCells = executeScanRequest(table, scan);
+      LOGGER.info("scannedCells size :" + scannedCells.size());
+      addToResponse(pcapsResponse, scannedCells, maxResultSize);
+
+    } catch (IOException e) {
+      LOGGER.error("Exception occurred while fetching Pcaps for the keys :"
+          + key, e);
+      if (e instanceof ZooKeeperConnectionException
+          || e instanceof MasterNotRunningException
+          || e instanceof NoServerForRegionException) {
+        int maxRetryLimit = ConfigurationUtil.getConnectionRetryLimit();
+        System.out.println("maxRetryLimit =" + maxRetryLimit);
+        for (int attempt = 1; attempt <= maxRetryLimit; attempt++) {
+          System.out.println("attempting  =" + attempt);
+          try {
+            HBaseConfigurationUtil.closeConnection(); // closing the
+            // existing
+            // connection
+            // and retry,
+            // it will
+            // create a new
+            // HConnection
+            scannedCells = executeScanRequest(table, scan);
+            addToResponse(pcapsResponse, scannedCells, maxResultSize);
+            break;
+          } catch (IOException ie) {
+            if (attempt == maxRetryLimit) {
+              LOGGER.error("Throwing the exception after retrying "
+                  + maxRetryLimit + " times.");
+              throw e;
+            }
+          }
+        }
+      }
+
+    } finally {
+      if (table != null) {
+        table.close();
+      }
+    }
+    return pcapsResponse;
+  }
+
+  /**
+   * Adds the to response.
+   * 
+   * @param pcapsResponse
+   *          the pcaps response
+   * @param scannedCells
+   *          the scanned cells
+   * @param maxResultSize
+   *          the max result size
+   */
+  private void addToResponse(PcapsResponse pcapsResponse,
+      List<Cell> scannedCells, long maxResultSize) {
+    String lastKeyFromCurrentScan = null;
+    if (scannedCells != null && scannedCells.size() > 0) {
+      lastKeyFromCurrentScan = new String(CellUtil.cloneRow(scannedCells
+          .get(scannedCells.size() - 1)));
+    }
+    // 4. calculate the response size
+    Collections.sort(scannedCells, PcapHelper.getCellTimestampComparator());
+    for (Cell sortedCell : scannedCells) {
+      pcapsResponse.addPcaps(CellUtil.cloneValue(sortedCell));
+    }
+    if (!pcapsResponse.isResonseSizeWithinLimit(maxResultSize)) {
+      pcapsResponse.setStatus(PcapsResponse.Status.PARTIAL); // response size
+                                                             // reached
+      pcapsResponse.setLastRowKey(new String(lastKeyFromCurrentScan));
+    }
+  }
+
+  /**
+   * Builds start and stop row keys according to the following logic : 1.
+   * Creates tokens out of 'key' using pcap_id delimiter ('-') 2. if the input
+   * 'key' contains (assume : configuredTokensInRowKey=7 and
+   * minimumTokensIninputKey=5): a). 5 tokens
+   * ("srcIp-dstIp-protocol-srcPort-dstPort") startKey =
+   * "srcIp-dstIp-protocol-srcPort-dstPort-00000-00000" stopKey =
+   * "srcIp-dstIp-protocol-srcPort-dstPort-99999-99999" b). 6 tokens
+   * ("srcIp-dstIp-protocol-srcPort-dstPort-id1") startKey =
+   * "srcIp-dstIp-protocol-srcPort-dstPort-id1-00000" stopKey =
+   * "srcIp-dstIp-protocol-srcPort-dstPort-id1-99999"
+   * 
+   * c). 7 tokens ("srcIp-dstIp-protocol-srcPort-dstPort-id1-id2") 1>. if the
+   * key is NOT part of the partial response from previous request, return
+   * 'null' 2>. if the key is part of partial response from previous request
+   * startKey = "srcIp-dstIp-protocol-srcPort-dstPort-id1-(id2+1)"; 1 is added
+   * to exclude this key as it was included in the previous request stopKey =
+   * "srcIp-dstIp-protocol-srcPort-dstPort-99999-99999"
+   * 
+   * @param key
+   *          the key
+   * @param isLastRowKey
+   *          if the key is part of partial response
+   * @param includeDuplicateLastRow
+   *          the include duplicate last row
+   * @return Map<String, String>
+   */
+  @VisibleForTesting
+  Map<String, String> createStartAndStopRowKeys(String key,
+      boolean isLastRowKey, boolean includeDuplicateLastRow) {
+    String delimeter = HBaseConfigConstants.PCAP_KEY_DELIMETER;
+    String regex = "\\" + delimeter;
+    String[] keyTokens = key.split(regex);
+
+    String startKey = null;
+    String endKey = null;
+    Map<String, String> map = new HashMap<String, String>();
+
+    int configuredTokensInRowKey = ConfigurationUtil
+        .getConfiguredTokensInRowkey();
+    int minimumTokensIninputKey = ConfigurationUtil
+        .getMinimumTokensInInputkey();
+    Assert
+        .isTrue(
+            minimumTokensIninputKey <= configuredTokensInRowKey,
+            "tokens in the input key (separated by '-'), must be less than or equal to the tokens used in hbase table row key ");
+    // in case if the input key contains 'configuredTokensInRowKey' tokens and
+    // it is NOT a
+    // partial response key, do a Get instead of Scan
+    if (keyTokens.length == configuredTokensInRowKey) {
+      if (!isLastRowKey) {
+        return null;
+      }
+      // it is a partial response key; 'startKey' is same as input partial
+      // response key; 'endKey' can be built by replacing
+      // (configuredTokensInRowKey - minimumTokensIninputKey) tokens
+      // of input partial response key with '99999'
+      if (keyTokens.length == minimumTokensIninputKey) {
+        return null;
+      }
+      int appendingTokenSlots = configuredTokensInRowKey
+          - minimumTokensIninputKey;
+      if (appendingTokenSlots > 0) {
+        String partialKey = getTokens(key, minimumTokensIninputKey);
+        StringBuffer sbfStartNew = new StringBuffer(partialKey);
+        StringBuffer sbfEndNew = new StringBuffer(partialKey);
+        for (int i = 0; i < appendingTokenSlots; i++) {
+          if (i == (appendingTokenSlots - 1)) {
+            if (!includeDuplicateLastRow) {
+              sbfStartNew
+                  .append(HBaseConfigConstants.PCAP_KEY_DELIMETER)
+                  .append(
+                      Integer.valueOf(keyTokens[minimumTokensIninputKey + i]) + 1);
+            } else {
+              sbfStartNew.append(HBaseConfigConstants.PCAP_KEY_DELIMETER)
+                  .append(keyTokens[minimumTokensIninputKey + i]);
+            }
+          } else {
+            sbfStartNew.append(HBaseConfigConstants.PCAP_KEY_DELIMETER).append(
+                keyTokens[minimumTokensIninputKey + i]);
+          }
+          sbfEndNew.append(HBaseConfigConstants.PCAP_KEY_DELIMETER).append(
+              getMaxLimitForAppendingTokens());
+        }
+        startKey = sbfStartNew.toString();
+        endKey = sbfEndNew.toString();
+      }
+    } else {
+      StringBuffer sbfStart = new StringBuffer(key);
+      StringBuffer sbfEnd = new StringBuffer(key);
+      for (int i = keyTokens.length; i < configuredTokensInRowKey; i++) {
+        sbfStart.append(HBaseConfigConstants.PCAP_KEY_DELIMETER).append(
+            getMinLimitForAppendingTokens());
+        sbfEnd.append(HBaseConfigConstants.PCAP_KEY_DELIMETER).append(
+            getMaxLimitForAppendingTokens());
+      }
+      startKey = sbfStart.toString();
+      endKey = sbfEnd.toString();
+    }
+    map.put(HBaseConfigConstants.START_KEY, startKey);
+    map.put(HBaseConfigConstants.END_KEY, endKey);
+
+    return map;
+  }
+
+  /**
+   * Returns false if keys is empty or null AND lastRowKey is null or
+   * empty; otherwise returns true;.
+   * 
+   * @param keys
+   *          input row keys
+   * @param lastRowKey
+   *          partial response key
+   * @return boolean
+   */
+  @VisibleForTesting
+  boolean checkIfValidInput(List<String> keys, String lastRowKey) {
+    if (CollectionUtils.isEmpty(keys)
+        && StringUtils.isEmpty(lastRowKey)) {
+      return false;
+    }
+    return true;
+  }
+
+  /**
+   * Executes the given Get request.
+   * 
+   * @param table
+   *          hbase table
+   * @param get
+   *          Get
+   * @return List<Cell>
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private List<Cell> executeGetRequest(HTable table, Get get)
+      throws IOException {
+    LOGGER.info("Get :" + get.toString());
+    table = (HTable) HBaseConfigurationUtil.getConnection().getTable(
+        ConfigurationUtil.getTableName());
+    Result result = table.get(get);
+    List<Cell> cells = result.getColumnCells(
+        ConfigurationUtil.getColumnFamily(),
+        ConfigurationUtil.getColumnQualifier());
+    return cells;
+  }
+
+  /**
+   * Execute scan request.
+   * 
+   * @param table
+   *          hbase table
+   * @param scan
+   *          the scan
+   * @return the list
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private List<Cell> executeScanRequest(HTable table, Scan scan)
+      throws IOException {
+    LOGGER.info("Scan :" + scan.toString());
+    table = (HTable) HBaseConfigurationUtil.getConnection().getTable(
+    		ConfigurationUtil.getConfiguration().getString("hbase.table.name"));
+    ResultScanner resultScanner = table.getScanner(scan);
+    List<Cell> scannedCells = new ArrayList<Cell>();
+    for (Result result = resultScanner.next(); result != null; result = resultScanner
+        .next()) {
+      List<Cell> cells = result.getColumnCells(
+          ConfigurationUtil.getColumnFamily(),
+          ConfigurationUtil.getColumnQualifier());
+      if (cells != null) {
+        for (Cell cell : cells) {
+          scannedCells.add(cell);
+        }
+      }
+    }
+    return scannedCells;
+  }
+
+  /**
+   * Creates the get request.
+   * 
+   * @param key
+   *          the key
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @return the gets the
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @VisibleForTesting
+  Get createGetRequest(String key, long startTime, long endTime)
+      throws IOException {
+    Get get = new Get(Bytes.toBytes(key));
+    // set family name
+    get.addFamily(ConfigurationUtil.getColumnFamily());
+
+    // set column family, qualifier
+    get.addColumn(ConfigurationUtil.getColumnFamily(),
+        ConfigurationUtil.getColumnQualifier());
+
+    // set max versions
+    get.setMaxVersions(ConfigurationUtil.getMaxVersions());
+
+    // set time range
+    setTimeRangeOnGet(get, startTime, endTime);
+    return get;
+  }
+
+  /**
+   * Creates the scan request.
+   * 
+   * @param pcapsResponse
+   *          the pcaps response
+   * @param keysMap
+   *          the keys map
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @param maxResultSize
+   *          the max result size
+   * @return the scan
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @VisibleForTesting
+  Scan createScanRequest(PcapsResponse pcapsResponse,
+      Map<String, String> keysMap, long startTime, long endTime,
+      long maxResultSize) throws IOException {
+    Scan scan = new Scan();
+    // set column family, qualifier
+    scan.addColumn(ConfigurationUtil.getColumnFamily(),
+        ConfigurationUtil.getColumnQualifier());
+
+    // set start and stop keys
+    scan.setStartRow(keysMap.get(HBaseConfigConstants.START_KEY).getBytes());
+    scan.setStopRow(keysMap.get(HBaseConfigConstants.END_KEY).getBytes());
+
+    // set max results size : remaining size = max results size - ( current
+    // pcaps response size + possible maximum row size)
+    long remainingSize = maxResultSize
+        - (pcapsResponse.getResponseSize() + ConfigurationUtil.getMaxRowSize());
+
+    if (remainingSize > 0) {
+      scan.setMaxResultSize(remainingSize);
+    }
+    // set max versions
+    scan.setMaxVersions(ConfigurationUtil.getConfiguration().getInt(
+        "hbase.table.column.maxVersions"));
+
+    // set time range
+    setTimeRangeOnScan(scan, startTime, endTime);
+    return scan;
+  }
+
+  /**
+   * Sets the time range on scan.
+   * 
+   * @param scan
+   *          the scan
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private void setTimeRangeOnScan(Scan scan, long startTime, long endTime)
+      throws IOException {
+    boolean setTimeRange = true;
+    if (startTime < 0 && endTime < 0) {
+      setTimeRange = false;
+    }
+    if (setTimeRange) {
+      if (startTime < 0) {
+        startTime = 0;
+      } else {
+        startTime = PcapHelper.convertToDataCreationTimeUnit(startTime);
+      }
+      if (endTime < 0) {
+        endTime = Long.MAX_VALUE;
+      } else {
+        endTime = PcapHelper.convertToDataCreationTimeUnit(endTime);
+      }
+      Assert.isTrue(startTime < endTime,
+          "startTime value must be less than endTime value");
+      scan.setTimeRange(startTime, endTime);
+    }
+  }
+
+  /**
+   * Sets the time range on get.
+   * 
+   * @param get
+   *          the get
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private void setTimeRangeOnGet(Get get, long startTime, long endTime)
+      throws IOException {
+    boolean setTimeRange = true;
+    if (startTime < 0 && endTime < 0) {
+      setTimeRange = false;
+    }
+    if (setTimeRange) {
+      if (startTime < 0) {
+        startTime = 0;
+      } else {
+        startTime = PcapHelper.convertToDataCreationTimeUnit(startTime);
+      }
+      if (endTime < 0) {
+        endTime = Long.MAX_VALUE;
+      } else {
+        endTime = PcapHelper.convertToDataCreationTimeUnit(endTime);
+      }
+      Assert.isTrue(startTime < endTime,
+          "startTime value must be less than endTime value");
+      get.setTimeRange(startTime, endTime);
+    }
+  }
+
+  /**
+   * Gets the min limit for appending tokens.
+   * 
+   * @return the min limit for appending tokens
+   */
+  private String getMinLimitForAppendingTokens() {
+    int digits = ConfigurationUtil.getAppendingTokenDigits();
+    StringBuffer sbf = new StringBuffer();
+    for (int i = 0; i < digits; i++) {
+      sbf.append("0");
+    }
+    return sbf.toString();
+  }
+
+  /**
+   * Gets the max limit for appending tokens.
+   * 
+   * @return the max limit for appending tokens
+   */
+  private String getMaxLimitForAppendingTokens() {
+    int digits = ConfigurationUtil.getAppendingTokenDigits();
+    StringBuffer sbf = new StringBuffer();
+    for (int i = 0; i < digits; i++) {
+      sbf.append("9");
+    }
+    return sbf.toString();
+  }
+
+  /**
+   * The main method.
+   * 
+   * @param args
+   *          the arguments
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public static void main(String[] args) throws IOException {
+    if (args == null || args.length < 2) {
+      usage();
+      return;
+    }
+    String outputFileName = null;
+    outputFileName = args[1];
+    List<String> keys = Arrays.asList(StringUtils.split(args[2], ","));
+    System.out.println("Geting keys " + keys);
+    long startTime = 0;
+    long endTime = Long.MAX_VALUE;
+    if (args.length > 3) {
+      startTime = Long.valueOf(args[3]);
+    }
+    if (args.length > 4) {
+      endTime = Long.valueOf(args[4]);
+    }
+    System.out.println("With start time " + startTime + " and end time "
+        + endTime);
+    PcapGetterHBaseImpl downloader = new PcapGetterHBaseImpl();
+    PcapsResponse pcaps = downloader.getPcaps(keys, null, startTime, endTime,
+        false, false, 6);
+    File file = new File(outputFileName);
+    FileUtils.write(file, "", false);
+    FileUtils.writeByteArrayToFile(file, pcaps.getPcaps(), true);
+  }
+
+  /**
+   * Usage.
+   */
+  private static void usage() {
+    System.out.println("java " + PcapGetterHBaseImpl.class.getName() // $codepro.audit.disable
+        // debuggingCode
+        + " <zk quorum> <output file> <start key> [stop key]");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapHelper.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapHelper.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapHelper.java
new file mode 100644
index 0000000..5224945
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapHelper.java
@@ -0,0 +1,205 @@
+package com.opensoc.pcapservice;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+import org.mortbay.log.Log;
+import org.springframework.util.Assert;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * utility class which holds methods related to time conversions, building
+ * reverse keys.
+ */
+public class PcapHelper {
+
+  /** The Constant LOGGER. */
+  private static final Logger LOGGER = Logger.getLogger(PcapHelper.class);
+
+  /** The cell timestamp comparator. */
+  private static CellTimestampComparator CELL_TIMESTAMP_COMPARATOR = new CellTimestampComparator();
+
+  /**
+   * The Enum TimeUnit.
+   */
+  public enum TimeUnit {
+
+    /** The seconds. */
+    SECONDS,
+    /** The millis. */
+    MILLIS,
+    /** The micros. */
+    MICROS,
+    /** The unknown. */
+    UNKNOWN
+  };
+
+  /**
+   * Converts the given time to the 'hbase' data creation time unit.
+   * 
+   * @param inputTime
+   *          the input time
+   * @return the long
+   */
+  public static long convertToDataCreationTimeUnit(long inputTime) {
+    if (inputTime <= 9999999999L) {
+      return convertSecondsToDataCreationTimeUnit(inputTime); // input time unit
+                                                              // is in seconds
+    } else if (inputTime <= 9999999999999L) {
+      return convertMillisToDataCreationTimeUnit(inputTime); // input time unit
+                                                             // is in millis
+    } else if (inputTime <= 9999999999999999L) {
+      return convertMicrosToDataCreationTimeUnit(inputTime); // input time unit
+                                                             // it in micros
+    }
+    return inputTime; // input time unit is unknown
+  }
+
+  /**
+   * Returns the 'hbase' data creation time unit by reading
+   * 'hbase.table.data.time.unit' property in 'hbase-config' properties file; If
+   * none is mentioned in properties file, returns <code>TimeUnit.UNKNOWN</code>
+   * 
+   * @return TimeUnit
+   */
+  @VisibleForTesting
+  public static TimeUnit getDataCreationTimeUnit() {
+    String timeUnit = ConfigurationUtil.getConfiguration().getString(
+        "hbase.table.data.time.unit");
+    LOGGER.debug("hbase.table.data.time.unit=" + timeUnit.toString());
+    if (StringUtils.isNotEmpty(timeUnit)) {
+      return TimeUnit.valueOf(timeUnit);
+    }
+    return TimeUnit.UNKNOWN;
+  }
+
+  /**
+   * Convert seconds to data creation time unit.
+   * 
+   * @param inputTime
+   *          the input time
+   * @return the long
+   */
+  @VisibleForTesting
+  public static long convertSecondsToDataCreationTimeUnit(long inputTime) {
+    System.out.println("convert Seconds To DataCreation TimeUnit");
+    TimeUnit dataCreationTimeUnit = getDataCreationTimeUnit();
+    if (TimeUnit.SECONDS == dataCreationTimeUnit) {
+      return inputTime;
+    } else if (TimeUnit.MILLIS == dataCreationTimeUnit) {
+      return inputTime * 1000;
+    } else if (TimeUnit.MICROS == dataCreationTimeUnit) {
+      return inputTime * 1000 * 1000;
+    }
+    return inputTime;
+  }
+
+  /**
+   * Builds the reverseKey to fetch the pcaps in the reverse traffic
+   * (destination to source).
+   * 
+   * @param key
+   *          indicates hbase rowKey (partial or full) in the format
+   *          "srcAddr-dstAddr-protocol-srcPort-dstPort-fragment"
+   * @return String indicates the key in the format
+   *         "dstAddr-srcAddr-protocol-dstPort-srcPort"
+   */
+  public static String reverseKey(String key) {
+    Assert.hasText(key, "key must not be null or empty");
+    String delimeter = HBaseConfigConstants.PCAP_KEY_DELIMETER;
+    String regex = "\\" + delimeter;
+    StringBuffer sb = new StringBuffer();
+    try {
+      String[] tokens = key.split(regex);
+      Assert
+          .isTrue(
+              (tokens.length == 5 || tokens.length == 6 || tokens.length == 7),
+              "key is not in the format : 'srcAddr-dstAddr-protocol-srcPort-dstPort-{ipId-fragment identifier}'");
+      sb.append(tokens[1]).append(delimeter).append(tokens[0])
+          .append(delimeter).append(tokens[2]).append(delimeter)
+          .append(tokens[4]).append(delimeter).append(tokens[3]);
+    } catch (Exception e) {
+      Log.warn("Failed to reverse the key. Reverse scan won't be performed.", e);
+    }
+    return sb.toString();
+  }
+
+  /**
+   * Builds the reverseKeys to fetch the pcaps in the reverse traffic
+   * (destination to source). If all keys in the input are not in the expected
+   * format, it returns an empty list;
+   * 
+   * @param keys
+   *          indicates list of hbase rowKeys (partial or full) in the format
+   *          "srcAddr-dstAddr-protocol-srcPort-dstPort-fragment"
+   * @return List<String> indicates the list of keys in the format
+   *         "dstAddr-srcAddr-protocol-dstPort-srcPort"
+   */
+  public static List<String> reverseKey(List<String> keys) {
+    Assert.notEmpty(keys, "'keys' must not be null or empty");
+    List<String> reverseKeys = new ArrayList<String>();
+    for (String key : keys) {
+      if (key != null) {
+        String reverseKey = reverseKey(key);
+        if (StringUtils.isNotEmpty(reverseKey)) {
+          reverseKeys.add(reverseKey);
+        }
+      }
+    }
+    return reverseKeys;
+  }
+
+  /**
+   * Returns Comparator for sorting pcaps cells based on the timestamp (dsc).
+   * 
+   * @return CellTimestampComparator
+   */
+  public static CellTimestampComparator getCellTimestampComparator() {
+    return CELL_TIMESTAMP_COMPARATOR;
+  }
+
+  /**
+   * Convert millis to data creation time unit.
+   * 
+   * @param inputTime
+   *          the input time
+   * @return the long
+   */
+  @VisibleForTesting
+  private static long convertMillisToDataCreationTimeUnit(long inputTime) {
+    System.out.println("convert Millis To DataCreation TimeUnit");
+    TimeUnit dataCreationTimeUnit = getDataCreationTimeUnit();
+    if (TimeUnit.SECONDS == dataCreationTimeUnit) {
+      return (inputTime / 1000);
+    } else if (TimeUnit.MILLIS == dataCreationTimeUnit) {
+      return inputTime;
+    } else if (TimeUnit.MICROS == dataCreationTimeUnit) {
+      return inputTime * 1000;
+    }
+    return inputTime;
+  }
+
+  /**
+   * Convert micros to data creation time unit.
+   * 
+   * @param inputTime
+   *          the input time
+   * @return the long
+   */
+  @VisibleForTesting
+  private static long convertMicrosToDataCreationTimeUnit(long inputTime) {
+    System.out.println("convert Micros To DataCreation TimeUnit");
+    TimeUnit dataCreationTimeUnit = getDataCreationTimeUnit();
+    if (TimeUnit.SECONDS == dataCreationTimeUnit) {
+      return inputTime / (1000 * 1000);
+    } else if (TimeUnit.MILLIS == dataCreationTimeUnit) {
+      return inputTime / 1000;
+    } else if (TimeUnit.MICROS == dataCreationTimeUnit) {
+      return inputTime;
+    }
+    return inputTime;
+  }
+}


[08/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapReceiverImplRestEasy.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapReceiverImplRestEasy.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapReceiverImplRestEasy.java
new file mode 100644
index 0000000..98e855e
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapReceiverImplRestEasy.java
@@ -0,0 +1,250 @@
+package com.opensoc.pcapservice;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.opensoc.pcap.PcapUtils;
+
+@Path("/")
+public class PcapReceiverImplRestEasy {
+
+	/** The Constant LOGGER. */
+	private static final Logger LOGGER = Logger
+			.getLogger(PcapReceiverImplRestEasy.class);
+
+	/** The Constant HEADER_CONTENT_DISPOSITION_NAME. */
+	private static final String HEADER_CONTENT_DISPOSITION_NAME = "Content-Disposition";
+
+	/** The Constant HEADER_CONTENT_DISPOSITION_VALUE. */
+	private static final String HEADER_CONTENT_DISPOSITION_VALUE = "attachment; filename=\"managed-threat.pcap\"";
+
+	/** partial response key header name. */
+	private static final String HEADER_PARTIAL_RESPONE_KEY = "lastRowKey";
+
+	@GET
+	@Path("pcapGetter/getPcapsByKeys")
+	public Response getPcapsByKeys(
+			@QueryParam("keys") List<String> keys,
+			@QueryParam("lastRowKey") String lastRowKey,
+			@DefaultValue("-1") @QueryParam("startTime") long startTime,
+			@DefaultValue("-1") @QueryParam("endTime") long endTime,
+			@QueryParam("includeDuplicateLastRow") boolean includeDuplicateLastRow,
+			@QueryParam("includeReverseTraffic") boolean includeReverseTraffic,
+			@QueryParam("maxResponseSize") String maxResponseSize,
+			@Context HttpServletResponse response) throws IOException {
+		PcapsResponse pcapResponse = null;
+
+		if (keys == null || keys.size() == 0)
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'keys' must not be null or empty").build();
+
+		try {
+			IPcapGetter pcapGetter = PcapGetterHBaseImpl.getInstance();
+			pcapResponse = pcapGetter.getPcaps(parseKeys(keys), lastRowKey,
+					startTime, endTime, includeReverseTraffic,
+					includeDuplicateLastRow,
+					ConfigurationUtil.validateMaxResultSize(maxResponseSize));
+			LOGGER.info("pcaps response in REST layer ="
+					+ pcapResponse.toString());
+
+			// return http status '204 No Content' if the pcaps response size is
+			// 0
+			if (pcapResponse == null || pcapResponse.getResponseSize() == 0) {
+
+				return Response.status(Response.Status.NO_CONTENT).build();
+			}
+
+			// return http status '206 Partial Content', the partial response
+			// file and
+			// 'lastRowKey' header , if the pcaps response status is 'PARTIAL'
+
+			response.setHeader(HEADER_CONTENT_DISPOSITION_NAME,
+					HEADER_CONTENT_DISPOSITION_VALUE);
+
+			if (pcapResponse.getStatus() == PcapsResponse.Status.PARTIAL) {
+
+				response.setHeader(HEADER_PARTIAL_RESPONE_KEY,
+						pcapResponse.getLastRowKey());
+
+				return Response
+						.ok(pcapResponse.getPcaps(),
+								MediaType.APPLICATION_OCTET_STREAM).status(206)
+						.build();
+
+			}
+
+		} catch (IOException e) {
+			LOGGER.error(
+					"Exception occurred while fetching Pcaps for the keys :"
+							+ keys.toString(), e);
+			throw e;
+		}
+
+		// return http status '200 OK' along with the complete pcaps response
+		// file,
+		// and headers
+		// return new ResponseEntity<byte[]>(pcapResponse.getPcaps(), headers,
+		// HttpStatus.OK);
+
+		return Response
+				.ok(pcapResponse.getPcaps(), MediaType.APPLICATION_OCTET_STREAM)
+				.status(200).build();
+
+	}
+	
+	
+	@GET
+	@Path("/pcapGetter/getPcapsByKeyRange")
+
+	  public Response getPcapsByKeyRange(
+	      @QueryParam("startKey") String startKey,
+	      @QueryParam("endKey")String endKey,
+	      @QueryParam("maxResponseSize") String maxResponseSize,
+	      @DefaultValue("-1") @QueryParam("startTime")long startTime,
+	      @DefaultValue("-1") @QueryParam("endTime") long endTime, 
+	      @Context HttpServletResponse servlet_response) throws IOException {
+
+		if (startKey == null || startKey.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'start key' must not be null or empty").build();
+		
+		if (startKey == null || startKey.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'end key' must not be null or empty").build();
+		
+		
+	    byte[] response = null;
+	    try {
+	      IPcapScanner pcapScanner = PcapScannerHBaseImpl.getInstance();
+	      response = pcapScanner.getPcaps(startKey, endKey,
+	          ConfigurationUtil.validateMaxResultSize(maxResponseSize), startTime,
+	          endTime);
+	      if (response == null || response.length == 0) {
+	    	  
+	    	  return Response.status(Response.Status.NO_CONTENT).build();
+	        
+	      }
+	      servlet_response.setHeader(HEADER_CONTENT_DISPOSITION_NAME,
+					HEADER_CONTENT_DISPOSITION_VALUE);
+
+	    } catch (IOException e) {
+	      LOGGER.error(
+	          "Exception occurred while fetching Pcaps for the key range : startKey="
+	              + startKey + ", endKey=" + endKey, e);
+	      throw e;
+	    }
+	    // return http status '200 OK' along with the complete pcaps response file,
+	    // and headers
+	    
+		return Response
+				.ok(response, MediaType.APPLICATION_OCTET_STREAM)
+				.status(200).build();
+	  }
+
+	  /*
+	   * (non-Javadoc)
+	   * 
+	   * @see
+	   * com.cisco.opensoc.hbase.client.IPcapReceiver#getPcapsByIdentifiers(java.lang
+	   * .String, java.lang.String, java.lang.String, java.lang.String,
+	   * java.lang.String, long, long, boolean,
+	   * javax.servlet.http.HttpServletResponse)
+	   */
+	  
+	@GET
+	@Path("/pcapGetter/getPcapsByIdentifiers")
+
+	  public Response getPcapsByIdentifiers(
+	      @QueryParam ("srcIp") String srcIp, 
+	      @QueryParam ("dstIp") String dstIp,
+	      @QueryParam ("protocol") String protocol, 
+	      @QueryParam ("srcPort") String srcPort,
+	      @QueryParam ("dstPort") String dstPort,
+	      @DefaultValue("-1") @QueryParam ("startTime")long startTime,
+	      @DefaultValue("-1") @QueryParam ("endTime")long endTime,
+	      @DefaultValue("false") @QueryParam ("includeReverseTraffic") boolean includeReverseTraffic,
+	      @Context HttpServletResponse servlet_response)
+	      
+	      throws IOException {
+		
+		if (srcIp == null || srcIp.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'srcIp' must not be null or empty").build();
+		
+		if (dstIp == null || dstIp.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'dstIp' must not be null or empty").build();
+		
+		if (protocol == null || protocol.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'protocol' must not be null or empty").build();
+		
+		if (srcPort == null || srcPort.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'srcPort' must not be null or empty").build();
+		
+		if (dstPort == null || dstPort.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'dstPort' must not be null or empty").build();
+		
+	
+	    PcapsResponse response = null;
+	    try {
+	      String sessionKey = PcapUtils.getSessionKey(srcIp, dstIp, protocol,
+	          srcPort, dstPort);
+	      LOGGER.info("sessionKey =" + sessionKey);
+	      IPcapGetter pcapGetter = PcapGetterHBaseImpl.getInstance();
+	      response = pcapGetter.getPcaps(Arrays.asList(sessionKey), null,
+	          startTime, endTime, includeReverseTraffic, false,
+	          ConfigurationUtil.getDefaultResultSize());
+	      if (response == null || response.getResponseSize() == 0) {
+	         return Response.status(Response.Status.NO_CONTENT).build();
+	      }
+	      servlet_response.setHeader(HEADER_CONTENT_DISPOSITION_NAME,
+					HEADER_CONTENT_DISPOSITION_VALUE);
+
+	    } catch (IOException e) {
+	      LOGGER.error("Exception occurred while fetching Pcaps by identifiers :",
+	          e);
+	      throw e;
+	    }
+	    // return http status '200 OK' along with the complete pcaps response file,
+	    // and headers
+	    return Response
+				.ok(response.getPcaps(), MediaType.APPLICATION_OCTET_STREAM)
+				.status(200).build();
+	  }
+	/**
+	 * This method parses the each value in the List using delimiter ',' and
+	 * builds a new List;.
+	 * 
+	 * @param keys
+	 *            list of keys to be parsed
+	 * @return list of keys
+	 */
+	@VisibleForTesting
+	List<String> parseKeys(List<String> keys) {
+		// Assert.notEmpty(keys);
+		List<String> parsedKeys = new ArrayList<String>();
+		for (String key : keys) {
+			parsedKeys.addAll(Arrays.asList(StringUtils.split(
+					StringUtils.trim(key), ",")));
+		}
+		return parsedKeys;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapScannerHBaseImpl.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapScannerHBaseImpl.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapScannerHBaseImpl.java
new file mode 100644
index 0000000..b1f0179
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapScannerHBaseImpl.java
@@ -0,0 +1,302 @@
+package com.opensoc.pcapservice;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.ZooKeeperConnectionException;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.NoServerForRegionException;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.log4j.Logger;
+import org.springframework.util.Assert;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.opensoc.pcap.PcapMerger;
+
+/**
+ * Singleton class which integrates with HBase table and returns sorted pcaps
+ * based on the timestamp for the given range of keys. Creates HConnection if it
+ * is not already created and the same connection instance is being used for all
+ * requests
+ * 
+ * @author sheetal
+ * @version $Revision: 1.0 $
+ */
+public class PcapScannerHBaseImpl implements IPcapScanner {
+
+  /** The Constant LOGGER. */
+  private static final Logger LOGGER = Logger
+      .getLogger(PcapScannerHBaseImpl.class);
+
+  /** The Constant DEFAULT_HCONNECTION_RETRY_LIMIT. */
+  private static final int DEFAULT_HCONNECTION_RETRY_LIMIT = 0;
+
+  /** The pcap scanner h base. */
+  private static IPcapScanner pcapScannerHBase = null;
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapScanner#getPcaps(java.lang.String,
+   * java.lang.String, long, long, long)
+   */
+  
+  public byte[] getPcaps(String startKey, String endKey, long maxResultSize,
+      long startTime, long endTime) throws IOException {
+    Assert.hasText(startKey, "startKey must no be null or empty");
+    byte[] cf = Bytes.toBytes(ConfigurationUtil.getConfiguration()
+        .getString("hbase.table.column.family"));
+    byte[] cq = Bytes.toBytes(ConfigurationUtil.getConfiguration()
+        .getString("hbase.table.column.qualifier"));
+    // create scan request
+    Scan scan = createScanRequest(cf, cq, startKey, endKey, maxResultSize,
+        startTime, endTime);
+    List<byte[]> pcaps = new ArrayList<byte[]>();
+    HTable table = null;
+    try {
+      pcaps = scanPcaps(pcaps, table, scan, cf, cq);
+    } catch (IOException e) {
+      LOGGER.error(
+          "Exception occurred while fetching Pcaps for the key range : startKey="
+              + startKey + ", endKey=" + endKey, e);
+      if (e instanceof ZooKeeperConnectionException
+          || e instanceof MasterNotRunningException
+          || e instanceof NoServerForRegionException) {
+        int maxRetryLimit = getConnectionRetryLimit();
+        for (int attempt = 1; attempt <= maxRetryLimit; attempt++) {
+          try {
+            HBaseConfigurationUtil.closeConnection(); // closing the existing
+                                                      // connection and retry,
+                                                      // it will create a new
+                                                      // HConnection
+            pcaps = scanPcaps(pcaps, table, scan, cf, cq);
+            break;
+          } catch (IOException ie) {
+            if (attempt == maxRetryLimit) {
+              System.out.println("Throwing the exception after retrying "
+                  + maxRetryLimit + " times.");
+              throw e;
+            }
+          }
+        }
+      } else {
+        throw e;
+      }
+    } finally {
+      if (table != null) {
+        table.close();
+      }
+    }
+    if (pcaps.size() == 1) {
+      return pcaps.get(0);
+    }
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    PcapMerger.merge(baos, pcaps);
+    byte[] response = baos.toByteArray();
+    return response;
+  }
+
+  /**
+   * Creates the scan request.
+   * 
+   * @param cf
+   *          the cf
+   * @param cq
+   *          the cq
+   * @param startKey
+   *          the start key
+   * @param endKey
+   *          the end key
+   * @param maxResultSize
+   *          the max result size
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @return the scan
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @VisibleForTesting
+  Scan createScanRequest(byte[] cf, byte[] cq, String startKey, String endKey,
+      long maxResultSize, long startTime, long endTime) throws IOException {
+    Scan scan = new Scan();
+    scan.addColumn(cf, cq);
+    scan.setMaxVersions(ConfigurationUtil.getConfiguration().getInt(
+        "hbase.table.column.maxVersions"));
+    scan.setStartRow(startKey.getBytes());
+    if (endKey != null) {
+      scan.setStopRow(endKey.getBytes());
+    }
+    scan.setMaxResultSize(maxResultSize);
+    boolean setTimeRange = true;
+    if (startTime < 0 && endTime < 0) {
+      setTimeRange = false;
+    }
+    if (setTimeRange) {
+      if (startTime < 0) {
+        startTime = 0;
+      } else {
+        startTime = PcapHelper.convertToDataCreationTimeUnit(startTime);
+      }
+      if (endTime < 0) {
+        endTime = Long.MAX_VALUE;
+      } else {
+        endTime = PcapHelper.convertToDataCreationTimeUnit(endTime);
+      }
+      Assert.isTrue(startTime < endTime,
+          "startTime value must be less than endTime value");
+    }
+    // create Scan request;
+    if (setTimeRange) {
+      scan.setTimeRange(startTime, endTime);
+    }
+    return scan;
+  }
+
+  /**
+   * Scan pcaps.
+   * 
+   * @param pcaps
+   *          the pcaps
+   * @param table
+   *          the table
+   * @param scan
+   *          the scan
+   * @param cf
+   *          the cf
+   * @param cq
+   *          the cq
+   * @return the list
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @VisibleForTesting
+  List<byte[]> scanPcaps(List<byte[]> pcaps, HTable table, Scan scan,
+      byte[] cf, byte[] cq) throws IOException {
+    LOGGER.info("Scan =" + scan.toString());
+    table = (HTable) HBaseConfigurationUtil.getConnection().getTable(
+    		ConfigurationUtil.getConfiguration().getString("hbase.table.name"));
+    ResultScanner resultScanner = table.getScanner(scan);
+    List<Cell> scannedCells = new ArrayList<Cell>();
+    for (Result result = resultScanner.next(); result != null; result = resultScanner
+        .next()) {
+      List<Cell> cells = result.getColumnCells(cf, cq);
+      if (cells != null) {
+        for (Cell cell : cells) {
+          scannedCells.add(cell);
+        }
+      }
+    }
+    Collections.sort(scannedCells, PcapHelper.getCellTimestampComparator());
+    LOGGER.info("sorted cells :" + scannedCells.toString());
+    for (Cell sortedCell : scannedCells) {
+      pcaps.add(CellUtil.cloneValue(sortedCell));
+    }
+    return pcaps;
+  }
+
+  /**
+   * Gets the connection retry limit.
+   * 
+   * @return the connection retry limit
+   */
+  private int getConnectionRetryLimit() {
+    return ConfigurationUtil.getConfiguration().getInt(
+        "hbase.hconnection.retries.number", DEFAULT_HCONNECTION_RETRY_LIMIT);
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapScanner#getPcaps(java.lang.String,
+   * java.lang.String)
+   */
+  
+  public byte[] getPcaps(String startKey, String endKey) throws IOException {
+    Assert.hasText(startKey, "startKey must no be null or empty");
+    Assert.hasText(endKey, "endKey must no be null or empty");
+    return getPcaps(startKey, endKey, ConfigurationUtil.getDefaultResultSize(),
+        -1, -1);
+  }
+
+  /**
+   * Always returns the singleton instance.
+   * 
+   * @return IPcapScanner singleton instance
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public static IPcapScanner getInstance() throws IOException {
+    if (pcapScannerHBase == null) {
+      synchronized (PcapScannerHBaseImpl.class) {
+        if (pcapScannerHBase == null) {
+          pcapScannerHBase = new PcapScannerHBaseImpl();
+        }
+      }
+    }
+    return pcapScannerHBase;
+  }
+
+  /**
+   * Instantiates a new pcap scanner h base impl.
+   */
+  private PcapScannerHBaseImpl() {
+  }
+
+  /**
+   * The main method.
+   */
+  // public static void main(String[] args) throws IOException {
+  // if (args == null || args.length < 3) {
+  // usage();
+  // return;
+  // }
+  // String outputFileName = null;
+  // String startKey = null;
+  // String stopKey = null;
+  // outputFileName = args[0];
+  // startKey = args[1];
+  // if (args.length > 2) { // NOPMD by sheetal on 1/29/14 3:55 PM
+  // stopKey = args[2];
+  // }
+  // PcapScannerHBaseImpl downloader = new PcapScannerHBaseImpl();
+  // byte[] pcaps = downloader.getPcaps(startKey, stopKey, defaultResultSize, 0,
+  // Long.MAX_VALUE);
+  // File file = new File(outputFileName);
+  // FileUtils.write(file, "", false);
+  // ByteArrayOutputStream baos = new ByteArrayOutputStream(); //
+  // $codepro.audit.disable
+  // // closeWhereCreated
+  // PcapMerger.merge(baos, pcaps);
+  // FileUtils.writeByteArrayToFile(file, baos.toByteArray(), true);
+  // }
+
+  /**
+   * Usage.
+   */
+  @SuppressWarnings("unused")
+  private static void usage() {
+    System.out.println("java " + PcapScannerHBaseImpl.class.getName() // NOPMD
+                                                                      // by
+        // sheetal
+        // <!-- //
+        // $codepro.audit.disable
+        // debuggingCode
+        // -->
+        // on
+        // 1/29/14
+        // 3:55
+        // PM
+        + " <zk quorum> <output file> <start key> [stop key]");
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapsResponse.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapsResponse.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapsResponse.java
new file mode 100644
index 0000000..10af9e0
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapsResponse.java
@@ -0,0 +1,153 @@
+/**
+ * 
+ */
+package com.opensoc.pcapservice;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import com.opensoc.pcap.PcapMerger;
+
+
+
+/**
+ * Holds pcaps data, status and the partial response key.
+ * 
+ * @author Sayi
+ */
+public class PcapsResponse {
+
+  /**
+   * The Enum Status.
+   */
+  public enum Status {
+    
+    /** The partial. */
+    PARTIAL, 
+ /** The complete. */
+ COMPLETE
+  };
+
+  /** response of the processed keys. */
+  private List<byte[]> pcaps = new ArrayList<byte[]>();;
+
+  /** partial response key. */
+  private String lastRowKey;
+
+  /** The status. */
+  private Status status = Status.COMPLETE;
+
+  /**
+   * Sets the pcaps.
+   * 
+   * @param pcaps
+   *          the new pcaps
+   */
+  public void setPcaps(List<byte[]> pcaps) {
+    this.pcaps = pcaps;
+  }
+
+  /**
+   * Adds the pcaps.
+   * 
+   * @param pcaps
+   *          the pcaps
+   */
+  public void addPcaps(byte[] pcaps) {
+    this.pcaps.add(pcaps);
+  }
+
+  /**
+   * Gets the partial response key.
+   * 
+   * @return the partial response key
+   */
+  public String getLastRowKey() {
+    return lastRowKey;
+  }
+
+  /**
+   * Sets the partial response key.
+   * 
+   * @param lastRowKey
+   *          the last row key
+   */
+  public void setLastRowKey(String lastRowKey) {
+    this.lastRowKey = lastRowKey;
+  }
+
+  /**
+   * Gets the status.
+   * 
+   * @return the status
+   */
+  public Status getStatus() {
+    return status;
+  }
+
+  /**
+   * Sets the status.
+   * 
+   * @param status
+   *          the new status
+   */
+  public void setStatus(Status status) {
+    this.status = status;
+  }
+
+  /**
+   * Checks if is resonse size within limit.
+   * 
+   * @param maxResultSize
+   *          the max result size
+   * @return true, if is resonse size within limit
+   */
+  public boolean isResonseSizeWithinLimit(long maxResultSize) {
+    // System.out.println("isResonseSizeWithinLimit() : getResponseSize() < (input|default result size - maximum packet size ) ="+
+    // getResponseSize()+ " < " + ( maxResultSize
+    // -ConfigurationUtil.getMaxRowSize()));
+    return getResponseSize() < (maxResultSize - ConfigurationUtil
+        .getMaxRowSize());
+  }
+
+  /**
+   * Gets the response size.
+   * 
+   * @return the response size
+   */
+  public long getResponseSize() {
+    long responseSize = 0;
+    for (byte[] pcap : this.pcaps) {
+      responseSize = responseSize + pcap.length;
+    }
+    return responseSize;
+  }
+
+  /**
+   * Gets the pcaps.
+   * 
+   * @return the pcaps
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public byte[] getPcaps() throws IOException {
+    if (pcaps.size() == 1) {
+      return pcaps.get(0);
+    }
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    PcapMerger.merge(baos, pcaps);
+    return baos.toByteArray();
+  }
+
+  /* (non-Javadoc)
+   * @see java.lang.Object#toString()
+   */
+  @Override
+  public String toString() {
+    return "PcapsResponse [lastRowKey=" + lastRowKey
+        + ", status=" + status + ", pcapsSize="
+        + String.valueOf(getResponseSize()) + "]";
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/RestTestingUtil.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/RestTestingUtil.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/RestTestingUtil.java
new file mode 100644
index 0000000..651affe
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/RestTestingUtil.java
@@ -0,0 +1,238 @@
+package com.opensoc.pcapservice;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.MediaType;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.client.RestTemplate;
+
+/**
+ * The Class RestTestingUtil.
+ */
+public class RestTestingUtil {
+  
+  /** The host name. */
+  public static String hostName = null;
+
+  /**
+   * Gets the pcaps by keys.
+   * 
+   * @param keys
+   *          the keys
+   * @return the pcaps by keys
+   */
+  @SuppressWarnings("unchecked")
+  private static void getPcapsByKeys(String keys) {
+    System.out
+        .println("**********************getPcapsByKeys ******************************************************************************************");
+    // 1.
+    String url = "http://" + hostName
+        + "/cisco-rest/pcapGetter/getPcapsByKeys?keys={keys}"
+        + "&includeReverseTraffic={includeReverseTraffic}"
+        + "&startTime={startTime}" + "&endTime={endTime}"
+        + "&maxResponseSize={maxResponseSize}";
+    // default values
+    String startTime = "-1";
+    String endTime = "-1";
+    String maxResponseSize = "6";
+    String includeReverseTraffic = "false";
+
+    @SuppressWarnings("rawtypes")
+    Map map = new HashMap();
+    map.put("keys", keys);
+    map.put("includeReverseTraffic", includeReverseTraffic);
+    map.put("startTime", startTime);
+    map.put("endTime", endTime);
+    map.put("maxResponseSize", maxResponseSize);
+
+    RestTemplate template = new RestTemplate();
+
+    // set headers and entity to send
+    HttpHeaders headers = new HttpHeaders();
+    headers.set("Accept", MediaType.APPLICATION_OCTET_STREAM_VALUE);
+    HttpEntity<Object> requestEntity = new HttpEntity<Object>(headers);
+
+    // 1.
+    ResponseEntity<byte[]> response1 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
+            response1);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 2. with reverse traffic
+    includeReverseTraffic = "true";
+    map.put("includeReverseTraffic", includeReverseTraffic);
+    ResponseEntity<byte[]> response2 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
+            response2);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 3.with time range
+    startTime = System.getProperty("startTime", "-1");
+    endTime = System.getProperty("endTime", "-1");
+    map.put("startTime", startTime);
+    map.put("endTime", endTime);
+    ResponseEntity<byte[]> response3 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
+            response3);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 4.with maxResponseSize
+    maxResponseSize = System.getProperty("maxResponseSize", "6");
+    map.put("maxResponseSize", maxResponseSize);
+    ResponseEntity<byte[]> response4 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
+            response4);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+  }
+
+  /**
+   * Gets the pcaps by keys range.
+   * 
+   * @param startKey
+   *          the start key
+   * @param endKey
+   *          the end key
+   * @return the pcaps by keys range
+   */
+  @SuppressWarnings("unchecked")
+  private static void getPcapsByKeysRange(String startKey, String endKey) {
+    System.out
+        .println("**********************getPcapsByKeysRange ******************************************************************************************");
+    // 1.
+    String url = "http://" + hostName
+        + "/cisco-rest/pcapGetter/getPcapsByKeyRange?startKey={startKey}"
+        + "&endKey={endKey}" + "&startTime={startTime}" + "&endTime={endTime}"
+        + "&maxResponseSize={maxResponseSize}";
+    // default values
+    String startTime = "-1";
+    String endTime = "-1";
+    String maxResponseSize = "6";
+    @SuppressWarnings("rawtypes")
+    Map map = new HashMap();
+    map.put("startKey", startKey);
+    map.put("endKey", "endKey");
+    map.put("startTime", startTime);
+    map.put("endTime", endTime);
+    map.put("maxResponseSize", maxResponseSize);
+
+    RestTemplate template = new RestTemplate();
+
+    // set headers and entity to send
+    HttpHeaders headers = new HttpHeaders();
+    headers.set("Accept", MediaType.APPLICATION_OCTET_STREAM_VALUE);
+    HttpEntity<Object> requestEntity = new HttpEntity<Object>(headers);
+
+    // 1.
+    ResponseEntity<byte[]> response1 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            startKey, endKey, startTime, endTime, maxResponseSize, response1);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 2. with time range
+    startTime = System.getProperty("startTime", "-1");
+    endTime = System.getProperty("endTime", "-1");
+    map.put("startTime", startTime);
+    map.put("endTime", endTime);
+    ResponseEntity<byte[]> response2 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            startKey, endKey, startTime, endTime, maxResponseSize, response2);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 3. with maxResponseSize
+    maxResponseSize = System.getProperty("maxResponseSize", "6");
+    map.put("maxResponseSize", maxResponseSize);
+    ResponseEntity<byte[]> response3 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            startKey, endKey, startTime, endTime, maxResponseSize, response3);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+  }
+
+  /**
+   * The main method.
+   * 
+   * @param args
+   *          the arguments
+   */
+  public static void main(String[] args) {
+
+    /*
+     * Run this program with system properties
+     * 
+     * -DhostName=mon.hw.com:8090
+     * -Dkeys=18800006-1800000b-06-0019-b39d,18800006-
+     * 1800000b-06-0050-5af6-64840-40785
+     * -DstartKey=18000002-18800002-06-0436-0019-2440-34545
+     * -DendKey=18000002-18800002-06-b773-0019-2840-34585
+     */
+
+    hostName = System.getProperty("hostName");
+
+    String keys = System.getProperty("keys");
+
+    String statyKey = System.getProperty("startKey");
+    String endKey = System.getProperty("endKey");
+
+    getPcapsByKeys(keys);
+    getPcapsByKeysRange(statyKey, endKey);
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/rest/JettyServiceRunner.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/rest/JettyServiceRunner.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/rest/JettyServiceRunner.java
new file mode 100644
index 0000000..1fdb025
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/rest/JettyServiceRunner.java
@@ -0,0 +1,26 @@
+package com.opensoc.pcapservice.rest;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.ws.rs.core.Application;
+
+import com.opensoc.pcapservice.PcapReceiverImplRestEasy;
+
+public class JettyServiceRunner extends Application  {
+	
+
+	private static Set services = new HashSet(); 
+		
+	public  JettyServiceRunner() {     
+		// initialize restful services   
+		services.add(new PcapReceiverImplRestEasy());  
+	}
+	@Override
+	public  Set getSingletons() {
+		return services;
+	}  
+	public  static Set getServices() {  
+		return services;
+	} 
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/rest/PcapService.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/rest/PcapService.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/rest/PcapService.java
new file mode 100644
index 0000000..5f47ead
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/rest/PcapService.java
@@ -0,0 +1,34 @@
+package com.opensoc.pcapservice.rest;
+
+import java.io.IOException;
+
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.jboss.resteasy.plugins.server.servlet.HttpServletDispatcher;
+
+import com.opensoc.helpers.services.PcapServiceCli;
+
+
+public class PcapService {
+
+	public static void main(String[] args) throws IOException {
+
+		PcapServiceCli cli = new PcapServiceCli(args);
+		cli.parse();
+		
+		Server server = new Server(cli.getPort());
+		ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
+		context.setContextPath("/");
+		ServletHolder h = new ServletHolder(new HttpServletDispatcher());
+		h.setInitParameter("javax.ws.rs.Application", "com.opensoc.pcapservice.rest.JettyServiceRunner");
+		context.addServlet(h, "/*");
+		server.setHandler(context);
+		try {
+			server.start();
+			server.join();
+		} catch (Exception e) {
+			e.printStackTrace();
+		}
+	}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OnlyDeleteExpiredFilesCompactionPolicy.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OnlyDeleteExpiredFilesCompactionPolicy.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OnlyDeleteExpiredFilesCompactionPolicy.java
new file mode 100644
index 0000000..6b17410
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OnlyDeleteExpiredFilesCompactionPolicy.java
@@ -0,0 +1,37 @@
+package org.apache.hadoop.hbase.regionserver.compactions;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.regionserver.compactions.RatioBasedCompactionPolicy;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
+import org.apache.hadoop.hbase.regionserver.StoreFile;
+
+public class OnlyDeleteExpiredFilesCompactionPolicy extends RatioBasedCompactionPolicy {
+  private static final Log LOG = LogFactory.getLog(OnlyDeleteExpiredFilesCompactionPolicy.class);
+
+  /**
+   * Constructor.
+   * 
+   * @param conf
+   *          The Conf.
+   * @param storeConfigInfo
+   *          Info about the store.
+   */
+  public OnlyDeleteExpiredFilesCompactionPolicy(final Configuration conf, final StoreConfigInformation storeConfigInfo) {
+    super(conf, storeConfigInfo);
+  }
+
+  @Override
+  final ArrayList<StoreFile> applyCompactionPolicy(final ArrayList<StoreFile> candidates, final boolean mayUseOffPeak,
+      final boolean mayBeStuck) throws IOException {
+    LOG.info("Sending empty list for compaction to avoid compaction and do only deletes of files older than TTL");
+
+    return new ArrayList<StoreFile>();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/config-definition-hbase.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/config-definition-hbase.xml b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/config-definition-hbase.xml
new file mode 100644
index 0000000..efe05e8
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/config-definition-hbase.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="ISO-8859-1" ?>
+
+<configuration>
+	<header>
+		<result delimiterParsingDisabled="true" forceReloadCheck="true"></result>
+		<lookups>
+      		<lookup config-prefix="expr"
+              	config-class="org.apache.commons.configuration.interpol.ExprLookup">
+        		<variables>
+          			<variable name="System" value="Class:java.lang.System"/>
+          			<variable name="net" value="Class:java.net.InetAddress"/>
+          			<variable name="String" value="Class:org.apache.commons.lang.StringUtils"/>
+        		</variables>
+      		</lookup>
+    	</lookups>
+	</header>
+	<override>
+		<!-- 1. properties from 'hbae-config.properties' are loaded first; 
+				if a property is not present in this file, then it will search in the files in the order they are defined here.
+		     2. 'refreshDelay' indicates the minimum delay in milliseconds between checks to see if the underlying file is changed.
+		     3. 'config-optional' indicates this file is not required --> 
+		
+		<properties fileName="${expr:System.getProperty('configPath')+'/hbase-config.properties'}"  config-optional="true">
+			<reloadingStrategy refreshDelay="${expr:System.getProperty('configRefreshDelay')}"
+	      config-class="org.apache.commons.configuration.reloading.FileChangedReloadingStrategy"/>
+	     </properties>
+		
+		<properties fileName="hbase-config-default.properties" config-optional="true">
+<!-- 					<reloadingStrategy refreshDelay="${expr:System.getProperty('defaultConfigRefreshDelay')}"
+	      config-class="org.apache.commons.configuration.reloading.FileChangedReloadingStrategy"/>
+ -->	     </properties>
+		
+	</override>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/hbase-config-default.properties
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/hbase-config-default.properties b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/hbase-config-default.properties
new file mode 100644
index 0000000..4ee56b6
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/hbase-config-default.properties
@@ -0,0 +1,40 @@
+#hbase zoo keeper configuration
+hbase.zookeeper.quorum=zkpr1,zkpr2,zkpr3
+hbase.zookeeper.clientPort=2181
+hbase.client.retries.number=1
+zookeeper.session.timeout=60000
+zookeeper.recovery.retry=0
+
+#hbase table configuration
+hbase.table.name=pcap
+hbase.table.column.family=t
+hbase.table.column.qualifier=pcap
+hbase.table.column.maxVersions=5
+
+# scan size limit configuration in MB or KB; if the input is negative or greater than max value throw an error.
+hbase.scan.result.size.unit=MB
+hbase.scan.default.result.size=6
+hbase.scan.max.result.size=60
+
+# time stamp conversion configuration; possible values 'SECONDS'(seconds), 'MILLIS'(milli seconds), 'MICROS' (micro seconds)
+hbase.table.data.time.unit=MILLIS
+
+#number of retries in case of ZooKeeper or HBase server down
+hbase.hconnection.retries.number=3
+
+#configuration for including pcaps in the reverse traffic
+pcaps.include.reverse.traffic = false
+
+#maximum table row size in KB or MB 
+hbase.table.row.size.unit = KB
+hbase.table.max.row.size = 70
+
+# tokens of row key configuration
+hbase.table.row.key.tokens=7
+rest.api.input.key.min.tokens=5
+
+# whether or not to include the last row from the previous request, applicable for only partial response scenario
+hbase.table.scan.include.duplicate.lastrow= true;
+
+#number of digits for appending tokens of the row key
+hbase.table.row.key.token.appending.digits=5

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/hbase-site.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/hbase-site.xml b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/hbase-site.xml
new file mode 100644
index 0000000..5c3c819
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/hbase-site.xml
@@ -0,0 +1,127 @@
+<!--Tue Apr  1 18:16:39 2014-->
+  <configuration>
+    <property>
+    <name>hbase.tmp.dir</name>
+    <value>/disk/h/hbase</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.chunkpool.maxsize</name>
+    <value>0.5</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.codecs</name>
+    <value>lzo,gz,snappy</value>
+  </property>
+    <property>
+    <name>hbase.hstore.flush.retries.number</name>
+    <value>120</value>
+  </property>
+    <property>
+    <name>hbase.client.keyvalue.maxsize</name>
+    <value>10485760</value>
+  </property>
+    <property>
+    <name>hbase.rootdir</name>
+    <value>hdfs://nn1:8020/apps/hbase/data</value>
+  </property>
+    <property>
+    <name>hbase.defaults.for.version.skip</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.client.scanner.caching</name>
+    <value>100</value>
+  </property>
+    <property>
+    <name>hbase.superuser</name>
+    <value>hbase</value>
+  </property>
+    <property>
+    <name>hfile.block.cache.size</name>
+    <value>0.40</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.checksum.verify</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.mslab.enabled</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.max.filesize</name>
+    <value>107374182400</value>
+  </property>
+    <property>
+    <name>hbase.cluster.distributed</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>zookeeper.session.timeout</name>
+    <value>30000</value>
+  </property>
+    <property>
+    <name>zookeeper.znode.parent</name>
+    <value>/hbase-unsecure</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.global.memstore.lowerLimit</name>
+    <value>0.38</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.handler.count</name>
+    <value>240</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.mslab.chunksize</name>
+    <value>8388608</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.quorum</name>
+    <value>zkpr1,zkpr2,zkpr3</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.useMulti</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.majorcompaction</name>
+    <value>86400000</value>
+  </property>
+    <property>
+    <name>hbase.hstore.blockingStoreFiles</name>
+    <value>200</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.property.clientPort</name>
+    <value>2181</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.flush.size</name>
+    <value>134217728</value>
+  </property>
+    <property>
+    <name>hbase.security.authorization</name>
+    <value>false</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.global.memstore.upperLimit</name>
+    <value>0.4</value>
+  </property>
+    <property>
+    <name>hbase.hstore.compactionThreshold</name>
+    <value>4</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.block.multiplier</name>
+    <value>8</value>
+  </property>
+    <property>
+    <name>hbase.security.authentication</name>
+    <value>simple</value>
+  </property>
+    <property>
+    <name>dfs.client.read.shortcircuit</name>
+    <value>true</value>
+  </property>
+  </configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/log4j.properties b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/log4j.properties
new file mode 100644
index 0000000..0b6ca10
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/log4j.properties
@@ -0,0 +1,21 @@
+# Root logger option
+log4j.rootLogger=TRACE,file,stdout
+
+# Direct log messages to a log file
+log4j.appender.file=org.apache.log4j.RollingFileAppender
+log4j.appender.file.File=/var/log/hbase/cisco-hbase.log
+log4j.appender.file.MaxFileSize=1MB
+log4j.appender.file.MaxBackupIndex=1
+log4j.appender.file.layout=org.apache.log4j.PatternLayout
+log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
+
+
+# Direct log messages to console
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
+
+log4j.logger.backtype.storm=DEBUG
+log4j.logger.clojure.tools=DEBUG
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/CellTimestampComparatorTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/CellTimestampComparatorTest.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/CellTimestampComparatorTest.java
new file mode 100644
index 0000000..c2a4bf2
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/CellTimestampComparatorTest.java
@@ -0,0 +1,92 @@
+package com.opensoc.pcapservice;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.hbase.Cell;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import com.opensoc.pcapservice.CellTimestampComparator;
+
+/**
+ * The Class CellTimestampComparatorTest.
+ */
+public class CellTimestampComparatorTest {
+
+  /**
+   * Sets the up.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @Before
+  public void setUp() throws Exception {
+  }
+
+  /**
+   * Tear down.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  /**
+   * Test_less.
+   */
+  @Test
+  public void test_less() {
+    // mocking
+    Cell cell1 = Mockito.mock(Cell.class);
+    Mockito.when(cell1.getTimestamp()).thenReturn(13945345808L);
+    Cell cell2 = Mockito.mock(Cell.class);
+    Mockito.when(cell2.getTimestamp()).thenReturn(13845345808L);
+
+    CellTimestampComparator comparator = new CellTimestampComparator();
+
+    // actual call and verify
+    Assert.assertTrue(comparator.compare(cell2, cell1) == -1);
+
+  }
+
+  /**
+   * Test_greater.
+   */
+  @Test
+  public void test_greater() {
+    // mocking
+    Cell cell1 = Mockito.mock(Cell.class);
+    Mockito.when(cell1.getTimestamp()).thenReturn(13745345808L);
+    Cell cell2 = Mockito.mock(Cell.class);
+    Mockito.when(cell2.getTimestamp()).thenReturn(13945345808L);
+
+    CellTimestampComparator comparator = new CellTimestampComparator();
+
+    // actual call and verify
+    Assert.assertTrue(comparator.compare(cell2, cell1) == 1);
+
+  }
+
+  /**
+   * Test_equal.
+   */
+  @Test
+  public void test_equal() {
+    // mocking
+    Cell cell1 = Mockito.mock(Cell.class);
+    Mockito.when(cell1.getTimestamp()).thenReturn(13945345808L);
+    Cell cell2 = Mockito.mock(Cell.class);
+    Mockito.when(cell2.getTimestamp()).thenReturn(13945345808L);
+
+    CellTimestampComparator comparator = new CellTimestampComparator();
+
+    // actual call and verify
+    Assert.assertTrue(comparator.compare(cell2, cell1) == 0);
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/ConfigurationUtilTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/ConfigurationUtilTest.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/ConfigurationUtilTest.java
new file mode 100644
index 0000000..7adf388
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/ConfigurationUtilTest.java
@@ -0,0 +1,50 @@
+package com.opensoc.pcapservice;
+
+import org.eclipse.jdt.internal.core.Assert;
+import org.junit.Test;
+
+import com.opensoc.pcapservice.ConfigurationUtil;
+import com.opensoc.pcapservice.ConfigurationUtil.SizeUnit;
+
+/**
+ * The Class ConfigurationUtilTest.
+ */
+public class ConfigurationUtilTest {
+
+  /**
+   * Test_get max allowable result size in bytes.
+   */
+  @Test
+  public void test_getMaxAllowableResultSizeInBytes() {
+    long result = ConfigurationUtil.getMaxResultSize();
+    Assert.isTrue(result == 62914560);
+  }
+
+  /**
+   * Test_get max allowable results size unit.
+   */
+  @Test
+  public void test_getMaxAllowableResultsSizeUnit() {
+    SizeUnit result = ConfigurationUtil.getResultSizeUnit();
+    Assert.isTrue(SizeUnit.MB == result);
+  }
+
+  /**
+   * Test_get max row size in bytes.
+   */
+  @Test
+  public void test_getMaxRowSizeInBytes() {
+    long result = ConfigurationUtil.getMaxRowSize();
+    Assert.isTrue(result == 71680);
+  }
+
+  /**
+   * Test_get max row size unit.
+   */
+  @Test
+  public void test_getMaxRowSizeUnit() {
+    SizeUnit result = ConfigurationUtil.getRowSizeUnit();
+    Assert.isTrue(SizeUnit.KB == result);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/HBaseConfigurationUtilTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/HBaseConfigurationUtilTest.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/HBaseConfigurationUtilTest.java
new file mode 100644
index 0000000..91f87a9
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/HBaseConfigurationUtilTest.java
@@ -0,0 +1,52 @@
+package com.opensoc.pcapservice;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.springframework.util.Assert;
+
+import com.opensoc.pcapservice.HBaseConfigurationUtil;
+
+/**
+ * The Class HBaseConfigurationUtilTest.
+ */
+public class HBaseConfigurationUtilTest {
+
+  /**
+   * Sets the up.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @Before
+  public void setUp() throws Exception {
+  }
+
+  /**
+   * Tear down.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  /**
+   * Test_read.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_read() throws IOException {
+    Configuration configuration = HBaseConfigurationUtil.read();
+    Assert.isTrue(configuration != null, "Configuration must not be null");
+    Assert.isTrue(configuration.get("hbase.client.retries.number").equals("1"),
+        "value must be equal");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/HBaseIntegrationTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/HBaseIntegrationTest.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/HBaseIntegrationTest.java
new file mode 100644
index 0000000..75f8121
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/HBaseIntegrationTest.java
@@ -0,0 +1,74 @@
+/**
+ * 
+ */
+package com.opensoc.pcapservice;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.util.Bytes;
+
+/**
+ * The Class HBaseIntegrationTest.
+ * 
+ * @author Sayi
+ */
+public class HBaseIntegrationTest {
+
+  /** The test util. */
+  private final HBaseTestingUtility testUtil = new HBaseTestingUtility();
+
+  /** The test table. */
+  private HTable testTable;
+
+  /**
+   * Inits the cluster.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  void initCluster() throws Exception {
+    // testUtil.getConfiguration().addResource("hbase-site-local.xml");
+    // testUtil.getConfiguration().reloadConfiguration();
+    // start mini hbase cluster
+    testUtil.startMiniCluster(1);
+    // create tables
+    createTable();
+
+  }
+
+  /**
+   * Creates the table.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private void createTable() throws IOException {
+    testTable = testUtil.createTable("test_pcaps_local", "cf");
+    System.out.println("after 'test_pcaps_local' table creation ");
+    // create put
+    Put put = new Put(Bytes.toBytes("1111")); // row key =1111
+    put.add(Bytes.toBytes("cf"), Bytes.toBytes("packet"),
+        Bytes.toBytes("aaaaaaaa"));
+    testTable.put(put);
+    System.out.println("after testTable.put(put)");
+
+  }
+
+  /**
+   * The main method.
+   * 
+   * @param args
+   *          the arguments
+   * @throws Exception
+   *           the exception
+   */
+  public static void main(String[] args) throws Exception {
+    // HBaseIntegrationTest test = new HBaseIntegrationTest();
+    // test.initCluster();
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapGetterHBaseImplTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapGetterHBaseImplTest.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapGetterHBaseImplTest.java
new file mode 100644
index 0000000..6e0ad9e
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapGetterHBaseImplTest.java
@@ -0,0 +1,536 @@
+package com.opensoc.pcapservice;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.collections.ListUtils;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Scan;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+import org.springframework.util.Assert;
+
+import com.opensoc.pcapservice.PcapGetterHBaseImpl;
+import com.opensoc.pcapservice.PcapsResponse;
+
+/**
+ * The Class PcapGetterHBaseImplTest.
+ */
+public class PcapGetterHBaseImplTest {
+
+  /**
+   * Sets the up.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @Before
+  public void setUp() throws Exception {
+  }
+
+  /**
+   * Tear down.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  /**
+   * Test_get pcaps_with list.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @SuppressWarnings("unchecked")
+  @Test
+  public void test_getPcaps_withList() throws IOException {
+    // mocking
+    String[] keys = { "0a07002b-0a078039-06-1e8b-0087",
+        "0a070025-0a07807a-06-aab8-c360" };
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    PcapGetterHBaseImpl spy = Mockito.spy(pcapGetter);
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+
+    // Mockito.doReturn(mockPcaps).when(spy).scanPcaps(Mockito.any(ArrayList.class),
+    // Mockito.any(HTable.class), Mockito.any(Scan.class),
+    // Mockito.any(byte[].class), Mockito.any(byte[].class));
+    //
+    //
+    // actual call
+    // PcapsResponse response = spy.getPcaps(Arrays.asList(keys));
+
+    // verify
+    // Assert.assertTrue(response.getResponseSize() == mockPcaps.get(0).length);
+  }
+
+  /**
+   * Test_get pcaps_with key.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @SuppressWarnings("unchecked")
+  @Test
+  public void test_getPcaps_withKey() throws IOException {
+    // mocking
+    String key = "0a07002b-0a078039-06-1e8b-0087";
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    PcapGetterHBaseImpl spy = Mockito.spy(pcapGetter);
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+
+    // //
+    // Mockito.doReturn(mockPcaps).when(spy).scanPcaps(Mockito.any(ArrayList.class),
+    // Mockito.any(HTable.class), Mockito.any(Scan.class),
+    // Mockito.any(byte[].class), Mockito.any(byte[].class));
+    //
+
+    // actual call
+    // PcapsResponse response = spy.getPcaps(key);
+
+    // verify
+    // Assert.assertTrue(response.getResponseSize() == mockPcaps.get(0).length);
+  }
+
+  /**
+   * Test_get pcaps_with key and timestamps.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @SuppressWarnings("unchecked")
+  @Test
+  public void test_getPcaps_withKeyAndTimestamps() throws IOException {
+    // mocking
+    String key = "0a07002b-0a078039-06-1e8b-0087";
+    long startTime = 1376782349234555L;
+    long endTime = 1396782349234555L;
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    PcapGetterHBaseImpl spy = Mockito.spy(pcapGetter);
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+
+    // Mockito.doReturn(mockPcaps).when(spy).scanPcaps(Mockito.any(ArrayList.class),
+    // Mockito.any(HTable.class), Mockito.any(Scan.class),
+    // Mockito.any(byte[].class), Mockito.any(byte[].class));
+
+    // actual call
+    // PcapsResponse response = spy.getPcaps(key, startTime, endTime, false);
+
+    // verify
+    // Assert.assertTrue(response.getResponseSize() == mockPcaps.get(0).length);
+  }
+
+  /**
+   * Test_get pcaps_with key_multiple pcaps.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @SuppressWarnings("unchecked")
+  @Test
+  public void test_getPcaps_withKey_multiplePcaps() throws IOException {
+    // mocking
+    String key = "0a07002b-0a078039-06-1e8b-0087";
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    PcapGetterHBaseImpl spy = Mockito.spy(pcapGetter);
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+    mockPcaps.add(getTestPcapBytes());
+
+    /*
+     * Mockito.doReturn(mockPcaps).when(spy).scanPcaps(Mockito.any(ArrayList.class
+     * ), Mockito.any(HTable.class), Mockito.any(Scan.class),
+     * Mockito.any(byte[].class), Mockito.any(byte[].class));
+     */
+    // actual call
+    // PcapsResponse response = spy.getPcaps(key);
+
+    // verify
+    // Assert.assertNotNull(response);
+    // Assert.assertTrue(response.getResponseSize() > mockPcaps.get(0).length);
+  }
+
+  /**
+   * Gets the test pcap bytes.
+   * 
+   * @return the test pcap bytes
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private byte[] getTestPcapBytes() throws IOException {
+    File fin = new File("src/test/resources/test-tcp-packet.pcap");
+    byte[] pcapBytes = FileUtils.readFileToByteArray(fin);
+    return pcapBytes;
+  }
+
+  /**
+   * Test_remove duplicates.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_removeDuplicates() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0019-caac");
+    keys.add("18800006-1800000b-06-0050-5af6");
+
+    List<String> deDupKeys = pcapGetter.removeDuplicateKeys(keys);
+    Assert.isTrue(deDupKeys.size() == 3);
+    List<String> testKeys = new ArrayList<String>();
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0019-caac");
+
+    ListUtils.isEqualList(deDupKeys, testKeys);
+  }
+
+  /**
+   * Test_sort keys by asc order_with out reverse traffic.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_sortKeysByAscOrder_withOutReverseTraffic()
+      throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-06-0019-caac");
+
+    List<String> result = pcapGetter.sortKeysByAscOrder(keys, false);
+
+    List<String> testKeys = new ArrayList<String>();
+    testKeys.add("18800006-1800000b-06-0019-caac");
+    testKeys.add("18800006-1800000b-06-0050-5af6");
+    testKeys.add("18800006-1800000b-11-0035-3810");
+
+    Assert.isTrue(ListUtils.isEqualList(result, testKeys));
+  }
+
+  /**
+   * Test_sort keys by asc order_with reverse traffic.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_sortKeysByAscOrder_withReverseTraffic() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+    keys.add("18800006-1800000b-11-0035-3812");
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-11-0035-3811");
+
+    List<String> result = pcapGetter.sortKeysByAscOrder(keys, true);
+    Assert.isTrue(result.size() == 6);
+  }
+
+  /**
+   * Test_sort keys by asc order_get unprocessed sublist of keys.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_sortKeysByAscOrder_getUnprocessedSublistOfKeys()
+      throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-06-0019-caac");
+    System.out.println("original keys =" + keys.toString());
+
+    List<String> sortedKeys = pcapGetter.sortKeysByAscOrder(keys, false);
+    System.out.println("after sortKeysByAscOrder =" + sortedKeys.toString());
+
+    List<String> unprocessedKeys1 = pcapGetter.getUnprocessedSublistOfKeys(
+        sortedKeys, "18800006-1800000b-06-0019-caac-65140-40815");
+    System.out.println("unprocessedKeys1 =" + unprocessedKeys1);
+    Assert.isTrue(unprocessedKeys1.size() == 2);
+
+    List<String> unprocessedKeys2 = pcapGetter.getUnprocessedSublistOfKeys(
+        sortedKeys, "18800006-1800000b-06-0050-5af6-65140-40815");
+    // System.out.println("unprocessedKeys2 ="+unprocessedKeys2);
+    Assert.isTrue(unprocessedKeys2.size() == 1);
+
+    List<String> unprocessedKeys3 = pcapGetter.getUnprocessedSublistOfKeys(
+        sortedKeys, "18800006-1800000b-11-0035-3810-6514040815");
+    // System.out.println("unprocessedKeys3 ="+unprocessedKeys3);
+    Assert.isTrue(unprocessedKeys3.size() == 0);
+
+  }
+
+  /**
+   * Test_sort keys by asc order_get unprocessed sublist of keys_with out match.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_sortKeysByAscOrder_getUnprocessedSublistOfKeys_withOutMatch()
+      throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-06-0019-caac");
+    System.out.println("original keys =" + keys.toString());
+
+    List<String> sortedKeys = pcapGetter.sortKeysByAscOrder(keys, false);
+    System.out.println("after sortKeysByAscOrder =" + sortedKeys.toString());
+
+    List<String> unprocessedKeys1 = pcapGetter.getUnprocessedSublistOfKeys(
+        sortedKeys, "18800006-1800000b-11-89-455-65140-40815");
+    System.out.println("unprocessedKeys1 =" + unprocessedKeys1);
+    Assert.isTrue(unprocessedKeys1.size() == 3);
+  }
+
+  /**
+   * Test_create start and stop row keys.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createStartAndStopRowKeys() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String key = "18800006-1800000b-11-0035-3810";
+    Map<String, String> map = pcapGetter.createStartAndStopRowKeys(key, false,
+        false);
+    System.out.println("map =" + map.toString());
+
+    String lastRowKey = "18800006-1800000b-11-0035-3810-23234-32423";
+    Map<String, String> map1 = pcapGetter.createStartAndStopRowKeys(
+        lastRowKey, true, false);
+    System.out.println("map1 =" + map1.toString());
+
+    String lastRowKey2 = "18800006-1800000b-11-0035-3810-23234-32423";
+    Map<String, String> map2 = pcapGetter.createStartAndStopRowKeys(
+        lastRowKey2, true, true);
+    System.out.println("map2 =" + map2.toString());
+
+  }
+
+  /**
+   * Test_check if valid input_valid.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_checkIfValidInput_valid() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-06-0019-caac");
+
+    String lastRowKey = "18800006-1800000b-11-0035-3810-23234-32423";
+
+    boolean response = pcapGetter.checkIfValidInput(keys, lastRowKey);
+    Assert.isTrue(response);
+
+  }
+
+  /**
+   * Test_check if valid input_in valid.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_checkIfValidInput_inValid() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    @SuppressWarnings("unchecked")
+    boolean response = pcapGetter.checkIfValidInput(Collections.EMPTY_LIST,
+        null);
+    Assert.isTrue(!response);
+
+  }
+
+  /**
+   * Test_check if valid input_valid_mixed.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_checkIfValidInput_valid_mixed() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String lastRowKey = "18800006-1800000b-11-0035-3810-23234-32423";
+    @SuppressWarnings("unchecked")
+    boolean response = pcapGetter.checkIfValidInput(Collections.EMPTY_LIST,
+        lastRowKey);
+    Assert.isTrue(response);
+  }
+
+  /**
+   * Test_create get request.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createGetRequest() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String key = "18800006-1800000b-11-0035-3810-23234-324230";
+
+    long startTime = 139812323L; // in seconds
+    long endTime = 139923424L; // in seconds
+
+    Get get = pcapGetter.createGetRequest(key, startTime, endTime);
+    Assert.notNull(get);
+
+    Assert.isTrue(Arrays.equals(get.getRow(), key.getBytes()));
+    // compare in micros as the data creation time unit is set to Micros in
+    // properties file.
+    Assert.isTrue(get.getTimeRange().getMin() == startTime * 1000 );
+    Assert.isTrue(get.getTimeRange().getMax() == endTime * 1000 );
+  }
+
+  /**
+   * Test_create get request_default time range.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createGetRequest_defaultTimeRange() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String key = "18800006-1800000b-11-0035-3810-23234-324230";
+
+    Get get = pcapGetter.createGetRequest(key, -1, -1);
+    Assert.notNull(get);
+
+    Assert.isTrue(Arrays.equals(get.getRow(), key.getBytes()));
+    Assert.isTrue(get.getTimeRange().getMin() == 0);
+  }
+
+  /**
+   * Test_create get request_with start time.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createGetRequest_withStartTime() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String key = "18800006-1800000b-11-0035-3810-23234-324230";
+
+    long startTime = 139812323L; // in seconds
+
+    Get get = pcapGetter.createGetRequest(key, startTime, -1);
+    Assert.notNull(get);
+
+    Assert.isTrue(Arrays.equals(get.getRow(), key.getBytes()));
+    Assert.isTrue(get.getTimeRange().getMin() == startTime * 1000 );
+    Assert.isTrue(get.getTimeRange().getMax() == Long.valueOf(Long.MAX_VALUE));
+  }
+
+  /**
+   * Test_create get request_with end time.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createGetRequest_withEndTime() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String key = "18800006-1800000b-11-0035-3810-23234-324230";
+
+    long endTime = 139923424L; // in seconds
+
+    Get get = pcapGetter.createGetRequest(key, -1, endTime);
+    Assert.notNull(get);
+
+    Assert.isTrue(Arrays.equals(get.getRow(), key.getBytes()));
+    Assert.isTrue(get.getTimeRange().getMin() == 0);
+    Assert.isTrue(get.getTimeRange().getMax() == endTime * 1000 );
+  }
+
+  /**
+   * Test_create scan request.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createScanRequest() throws IOException {
+    // mocking
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+
+    PcapsResponse pcapsResponse = new PcapsResponse();
+
+    Map<String, String> keysMap = new HashMap<String, String>();
+    String startKey = "0a07002b-0a078039-06-1e8b-0087-00000-00000";
+    String endKey = "0a070025-0a07807a-06-aab8-c360-99999-99999";
+    keysMap.put("startKey", startKey);
+    keysMap.put("endKey", endKey);
+
+    long startTime = 139812323L; // in seconds
+    long endTime = 139923424L; // in seconds
+    long maxResultSize = 673424;
+
+    // actual call
+    Scan scan = pcapGetter.createScanRequest(pcapsResponse, keysMap, startTime,
+        endTime, maxResultSize);
+
+    // verify time range
+    Assert.isTrue(scan.getTimeRange().getMin() == startTime * 1000 ); // compare
+                                                                            // in
+                                                                            // millis
+    Assert.isTrue(scan.getTimeRange().getMax() == endTime * 1000 ); // compare
+                                                                          // in
+                                                                          // millis
+
+    // verify start and stop rows
+    Assert.isTrue(Arrays.equals(scan.getStartRow(), startKey.getBytes()));
+    Assert.isTrue(Arrays.equals(scan.getStopRow(), endKey.getBytes()));
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapHelperTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapHelperTest.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapHelperTest.java
new file mode 100644
index 0000000..a1f6c04
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapHelperTest.java
@@ -0,0 +1,321 @@
+/**
+ * 
+ */
+package com.opensoc.pcapservice;
+
+import java.util.Arrays;
+import java.util.List;
+
+import org.eclipse.jdt.internal.core.Assert;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+import com.opensoc.pcapservice.PcapHelper;
+import com.opensoc.pcapservice.PcapHelper.TimeUnit;
+
+// TODO: Auto-generated Javadoc
+/**
+ * The Class PcapHelperTest.
+ * 
+ * @author Sayi
+ */
+@RunWith(PowerMockRunner.class)
+@PrepareForTest(PcapHelper.class)
+public class PcapHelperTest {
+
+  /**
+   * Sets the up.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @Before
+  public void setUp() throws Exception {
+    PowerMockito.spy(PcapHelper.class);
+  }
+
+  /**
+   * Tear down.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in SECONDS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_seconds() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.SECONDS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222L; // input time in seconds
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222L == time);
+  }
+
+  /**
+   * Input time is in MILLIS and data creation time is in SECONDS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_millis_seconds() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.SECONDS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222333L; // input time in millis
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222L == time);
+  }
+
+  /**
+   * Input time is in MICROS and data creation time is in SECONDS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_micros_seconds() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.SECONDS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222333444L; // input time in micros
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MILLIS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_millis() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MILLIS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222L; // input time in seconds
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222000L == time);
+  }
+
+  /**
+   * Input time is in MILLIS and data creation time is in MILLIS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_millis_millis() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MILLIS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 111112222233L; // input time in millis
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(111112222233L == time);
+  }
+
+  /**
+   * Input time is in MICROS and data creation time is in MILLIS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_micros_millis() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MILLIS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 111112222233344L; // input time in micros
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(111112222233L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_micros() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222L; // input time in seconds
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222000000L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_micros_random() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 13388; // input time in seconds
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(13388000000L == time);
+  }
+
+  /**
+   * Input time is in MILLIS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_millis_micros() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 111112222233L; // input time in millis
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(111112222233000L == time);
+  }
+
+  /**
+   * Input time is in MICROS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_micros_micros() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222334444L; // input time in micros
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222334444L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_micros_0() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 0; // input time in micros
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(0 == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_micros_1() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1; // input time in micros
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1000000L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_micros_decimal() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long inputTime = 13; // input time in seconds (double to long type casting)
+    long time = PcapHelper.convertSecondsToDataCreationTimeUnit(inputTime);
+
+    Assert.isTrue(13000000L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = (long) 111.333; // input time in seconds (double to long type
+                                   // casting)
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(111000000L == time);
+  }
+
+  /**
+   * Test_get data creation time unit.
+   */
+  @Test
+  public void test_getDataCreationTimeUnit() {
+    TimeUnit dataCreationTimeUnit = PcapHelper.getDataCreationTimeUnit();
+    Assert.isTrue(TimeUnit.MILLIS == dataCreationTimeUnit);
+  }
+
+  /**
+   * Test_reverse key_valid.
+   */
+  @Test
+  public void test_reverseKey_valid() {
+    String key = "162.242.152.24-162.242.153.12-TCP-38190-9092";
+    String reversekey = PcapHelper.reverseKey(key);
+    Assert.isTrue("162.242.153.12-162.242.152.24-TCP-9092-38190"
+        .equals(reversekey));
+  }
+
+  /**
+   * Test_reverse key_valid_with fragment.
+   */
+  @Test
+  public void test_reverseKey_valid_withFragment() {
+    String key = "162.242.152.24-162.242.153.12-TCP-38190-9092-fragmentId";
+    String reversekey = PcapHelper.reverseKey(key);
+    Assert.isTrue("162.242.153.12-162.242.152.24-TCP-9092-38190"
+        .equals(reversekey));
+  }
+
+  /**
+   * Test_reverse key_in valid.
+   */
+  @Test
+  public void test_reverseKey_inValid() {
+    String key = "162.242.152.24-162.242.153.12-TCP-38190-9092-ipId-fragmentId-extra";
+    String reversekey = PcapHelper.reverseKey(key);
+    Assert.isTrue("".equals(reversekey));
+  }
+
+  /**
+   * Test_reverse key_as list.
+   */
+  @Test
+  public void test_reverseKey_asList() {
+    String[] keys = {
+        "162.242.152.24-162.242.153.12-TCP-38190-9092-fragmentId",
+        "162.242.152.24-162.242.153.12-UDP-38190-9092" };
+
+    List<String> reverseKeys = PcapHelper.reverseKey(Arrays.asList(keys));
+
+    Assert.isTrue("162.242.153.12-162.242.152.24-TCP-9092-38190"
+        .equals(reverseKeys.get(0)));
+    Assert.isTrue("162.242.153.12-162.242.152.24-UDP-9092-38190"
+        .equals(reverseKeys.get(1)));
+  }
+
+}


[23/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/test/AbstractConfigTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/test/AbstractConfigTest.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/test/AbstractConfigTest.java
new file mode 100644
index 0000000..7484e16
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/test/AbstractConfigTest.java
@@ -0,0 +1,299 @@
+
+ 
+ /*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.opensoc.test;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Map;
+
+import org.apache.commons.configuration.Configuration;
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.commons.configuration.PropertiesConfiguration;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.github.fge.jackson.JsonLoader;
+import com.github.fge.jsonschema.core.report.ProcessingReport;
+import com.github.fge.jsonschema.main.JsonSchemaFactory;
+import com.github.fge.jsonschema.main.JsonValidator;
+import com.opensoc.helpers.topology.SettingsLoader;
+
+ /**
+ * <ul>
+ * <li>Title: </li>
+ * <li>Description: The class <code>AbstractConfigTest</code> is
+ * an abstract base class for implementing JUnit tests that need to use
+ * config to connect to ZooKeeper and HBase. The <code>setup</code> method will attempt to
+ * load a properties from a file, located in src/test/resources,
+ * with the same name as the class.</li>
+ * <li>Created: Oct 10, 2014</li>
+ * </ul>
+ * @version $Revision: 1.1 $
+ */
+public class AbstractConfigTest  extends AbstractTestContext{
+         /**
+         * The configPath.
+         */
+        protected String configPath=null;   
+        
+        /**
+        * The configName.
+        */
+       protected String configName=null;           
+
+        /**
+         * The config.
+         */
+        private Configuration config=null;
+        
+         /**
+         * The settings.
+         */
+        Map<String, String> settings=null;       
+
+        /**
+         * The schemaJsonString.
+         */
+        private String schemaJsonString = null;
+        /**
+         * Any Object for mavenMode
+         * @parameter
+         *   expression="${mode}"
+         *   default-value="local"
+         */
+         private Object mode="local";        
+
+        /**
+         * Constructs a new <code>AbstractConfigTest</code> instance.
+         * @throws Exception 
+         */
+        public AbstractConfigTest() throws Exception {
+            super.setUp();
+        }
+
+        /**
+         * Constructs a new <code>AbstractTestContext</code> instance.
+         * @param name the name of the test case.
+         */
+        public AbstractConfigTest(String name) {
+            super(name);
+        }
+
+        /*
+         * (non-Javadoc)
+         * @see junit.framework.TestCase#setUp()
+         */
+        protected void setUp(String configName) throws Exception {
+            super.setUp();
+            this.setConfigPath("src/test/resources/config/"+getClass().getSimpleName()+".config");
+            try {
+                this.setConfig(new PropertiesConfiguration(this.getConfigPath()));
+               
+                Map configOptions= SettingsLoader.getConfigOptions((PropertiesConfiguration)this.config, configName+"=");
+                this.setSettings(SettingsLoader.getConfigOptions((PropertiesConfiguration)this.config, configName + "."));
+                this.getSettings().put(configName, (String) configOptions.get(configName));
+            } catch (ConfigurationException e) {
+                fail("Config not found !!"+e);
+                e.printStackTrace();
+            }               
+        }
+
+        /*
+         * (non-Javadoc)
+         * @see junit.framework.TestCase#tearDown()
+         */
+        @Override
+        protected void tearDown() throws Exception {
+
+        }
+
+        
+         /**
+         * validateJsonData
+         * @param jsonSchema
+         * @param jsonData
+         * @return
+         * @throws Exception
+         */
+         
+        protected boolean validateJsonData(final String jsonSchema, final String jsonData)
+            throws Exception {
+    
+            final JsonNode d = JsonLoader.fromString(jsonData);
+            final JsonNode s = JsonLoader.fromString(jsonSchema);
+    
+            final JsonSchemaFactory factory = JsonSchemaFactory.byDefault();
+            JsonValidator v = factory.getValidator();
+    
+            ProcessingReport report = v.validate(s, d);
+            System.out.println(report);
+            
+            return report.toString().contains("success");
+        }
+        
+        protected String readSchemaFromFile(URL schema_url) throws Exception {
+            BufferedReader br = new BufferedReader(new FileReader(
+                    schema_url.getFile()));
+            String line;
+            StringBuilder sb = new StringBuilder();
+            while ((line = br.readLine()) != null) {
+                System.out.println(line);
+                sb.append(line);
+            }
+            br.close();
+
+            String schema_string = sb.toString().replaceAll("\n", "");
+            schema_string = schema_string.replaceAll(" ", "");
+
+            System.out.println("Read in schema: " + schema_string);
+
+            return schema_string;
+        }        
+  
+        protected String[] readTestDataFromFile(String test_data_url) throws Exception {
+            BufferedReader br = new BufferedReader(new FileReader(
+                    new File(test_data_url)));
+            ArrayList<String> inputDataLines = new ArrayList<String>();
+           
+            String line;
+            while ((line = br.readLine()) != null) {
+                System.out.println(line);
+                inputDataLines.add(line.toString().replaceAll("\n", ""));
+            }
+            br.close();
+            String[] inputData = new String[inputDataLines.size()];
+            inputData = inputDataLines.toArray(inputData);
+
+            return inputData;
+        }          
+       /**
+        * Skip Tests
+        */
+       public boolean skipTests(Object mode){
+           if(mode.toString().equals("local")){
+               return true;
+           }else {
+               return false;
+           }
+       }
+       
+       /**
+        * Returns the mode.
+        * @return the mode.
+        */
+       
+       public Object getMode() {
+           return mode;
+       }
+
+       /**
+        * Sets the mode.
+        * @param mode the mode.
+        */
+       
+       public void setMode(Object mode) {
+       
+           this.mode = mode;
+       }
+
+    
+         /**
+         * @param readSchemaFromFile
+         */
+        public void setSchemaJsonString(String schemaJsonString) {
+            this.schemaJsonString=schemaJsonString;
+        }
+
+    
+         /**
+         * @return
+         */
+        public String getSchemaJsonString() {
+           return this.schemaJsonString;
+        }
+        
+        /**
+        * Returns the configPath.
+        * @return the configPath.
+        */
+       public String getConfigPath() {
+           return configPath;
+       }
+    
+       /**
+        * Sets the configPath.
+        * @param configPath the configPath.
+        */
+       public void setConfigPath(String configPath) {
+           this.configPath = configPath;
+       }    
+       /**
+        * Returns the config.
+        * @return the config.
+        */
+       
+       public Configuration getConfig() {
+           return config;
+       }
+    
+       /**
+        * Sets the config.
+        * @param config the config.
+        */
+       
+       public void setConfig(Configuration config) {
+       
+           this.config = config;
+       }  
+       /**
+        * Returns the settings.
+        * @return the settings.
+        */
+       
+       public Map<String, String> getSettings() {
+           return settings;
+       }
+
+       /**
+        * Sets the settings.
+        * @param settings the settings.
+        */
+       
+       public void setSettings(Map<String, String> settings) {
+           this.settings = settings;
+       }   
+       /**
+       * Returns the configName.
+       * @return the configName.
+       */
+      public String getConfigName() {
+          return configName;
+      }
+
+      /**
+       * Sets the configName.
+       * @param configName the configName.
+       */
+      public void setConfigName(String configName) {  
+          this.configName = configName;
+      }       
+}
+
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/test/AbstractSchemaTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/test/AbstractSchemaTest.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/test/AbstractSchemaTest.java
new file mode 100644
index 0000000..670d7f9
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/test/AbstractSchemaTest.java
@@ -0,0 +1,198 @@
+
+ 
+ /*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.opensoc.test;
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.net.URL;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.github.fge.jackson.JsonLoader;
+import com.github.fge.jsonschema.core.report.ProcessingReport;
+import com.github.fge.jsonschema.main.JsonSchemaFactory;
+import com.github.fge.jsonschema.main.JsonValidator;
+
+ /**
+ * <ul>
+ * <li>Title: </li>
+ * <li>Description: The class <code>AbstractSchemaTest</code> is
+ * an abstract base class for implementing JUnit tests that need to load a
+ * Json Schema. The <code>setup</code> method will attempt to
+ * load a properties from a file, located in src/test/resources,
+ * with the same name as the class.</li>
+ * <li>Created: Aug 7, 2014</li>
+ * </ul>
+ * @version $Revision: 1.1 $
+ */
+public class AbstractSchemaTest  extends AbstractConfigTest{
+        
+        
+         /**
+         * The schemaJsonString.
+         */
+        private String schemaJsonString = null;
+        /**
+         * Any Object for mavenMode
+         * @parameter
+         *   expression="${mode}"
+         *   default-value="local"
+         */
+         private Object mode="local";        
+
+        /**
+         * Constructs a new <code>AbstractTestContext</code> instance.
+         * @throws Exception 
+         */
+        public AbstractSchemaTest() throws Exception {
+            super.setUp();
+        }
+
+        /**
+         * Constructs a new <code>AbstractTestContext</code> instance.
+         * @param name the name of the test case.
+         */
+        public AbstractSchemaTest(String name) {
+            super(name);
+            try{
+                if(System.getProperty("mode")!=null){
+                    setMode(System.getProperty("mode") );                
+                }else
+                {
+                    setMode("local");
+                }
+            }catch(Exception ex){
+                setMode("local");
+            }            
+        }
+
+        /*
+         * (non-Javadoc)
+         * @see junit.framework.TestCase#setUp()
+         */
+        @Override
+        protected void setUp() throws Exception {
+            super.setUp();
+            
+        }
+
+        /*
+         * (non-Javadoc)
+         * @see junit.framework.TestCase#tearDown()
+         */
+        @Override
+        protected void tearDown() throws Exception {
+
+        }
+
+        
+         /**
+         * validateJsonData
+         * @param jsonSchema
+         * @param jsonData
+         * @return
+         * @throws Exception
+         */
+         
+        protected boolean validateJsonData(final String jsonSchema, final String jsonData)
+            throws Exception {
+    
+            final JsonNode d = JsonLoader.fromString(jsonData);
+            final JsonNode s = JsonLoader.fromString(jsonSchema);
+    
+            final JsonSchemaFactory factory = JsonSchemaFactory.byDefault();
+            JsonValidator v = factory.getValidator();
+    
+            ProcessingReport report = v.validate(s, d);
+            System.out.println(report);
+            
+            return report.toString().contains("success");
+        }
+        
+        protected String readSchemaFromFile(URL schema_url) throws Exception {
+            BufferedReader br = new BufferedReader(new FileReader(
+                    schema_url.getFile()));
+            String line;
+            StringBuilder sb = new StringBuilder();
+            while ((line = br.readLine()) != null) {
+                System.out.println(line);
+                sb.append(line);
+            }
+            br.close();
+
+            String schema_string = sb.toString().replaceAll("\n", "");
+            schema_string = schema_string.replaceAll(" ", "");
+
+            System.out.println("Read in schema: " + schema_string);
+
+            return schema_string;
+
+        }        
+        
+       /**
+        * Skip Tests
+        */
+       public boolean skipTests(Object mode){
+           if(mode.toString().equals("local")){
+               return true;
+           }else {
+               return false;
+           }
+       }
+       
+       /**
+        * Returns the mode.
+        * @return the mode.
+        */
+       
+       public Object getMode() {
+           return mode;
+       }
+
+       /**
+        * Sets the mode.
+        * @param mode the mode.
+        */
+       
+       public void setMode(Object mode) {
+       
+           this.mode = mode;
+       }
+
+    
+     /**
+     
+     * @param readSchemaFromFile
+     */
+     
+    public void setSchemaJsonString(String schemaJsonString) {
+        this.schemaJsonString=schemaJsonString;
+    }
+
+    
+     /**
+     
+     * @return
+     */
+     
+    public String getSchemaJsonString() {
+       return this.schemaJsonString;
+    }
+     
+}
+
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/test/AbstractTestContext.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/test/AbstractTestContext.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/test/AbstractTestContext.java
index 7f7f34a..ea5b04f 100644
--- a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/test/AbstractTestContext.java
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/test/AbstractTestContext.java
@@ -52,7 +52,7 @@ public class AbstractTestContext  extends TestCase{
          * Any Object for mavenMode
          * @parameter
          *   expression="${mode}"
-         *   default-value="local"
+         *   default-value="global"
          */
          private Object mode="local";        
 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/test/ISEParserTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/test/ISEParserTest.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/test/ISEParserTest.java
deleted file mode 100644
index 47061b6..0000000
--- a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/test/ISEParserTest.java
+++ /dev/null
@@ -1,27 +0,0 @@
-package com.opensoc.test;
-
-import java.io.BufferedReader;
-import java.io.DataInputStream;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.StringReader;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.json.simple.JSONObject;
-
-import com.opensoc.ise.parser.ISEParser;
-import com.opensoc.ise.parser.ParseException;
-
-public class ISEParserTest {
-
-	public static void main(String[] args) throws ParseException, IOException {
-	}
-
-}
-
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/tldextractor/BasicTldExtractor.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/tldextractor/BasicTldExtractor.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/tldextractor/BasicTldExtractor.java
new file mode 100644
index 0000000..2dbcd95
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/tldextractor/BasicTldExtractor.java
@@ -0,0 +1,137 @@
+package com.opensoc.tldextractor;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class BasicTldExtractor implements Serializable {
+	private static final long serialVersionUID = -7440226111118873815L;
+	private StringBuilder sb = new StringBuilder();
+
+    private Pattern pattern;
+    
+    /**
+    * The inputFile.
+    */
+   private String inputFile ="effective_tld_names.dat";
+   
+   public BasicTldExtractor(String filePath) {
+       this.inputFile=filePath;
+       this.init();
+   }
+   
+	public BasicTldExtractor() {
+      this.init();
+	}
+
+	private void init(){
+	       try {
+	            ArrayList<String> terms = new ArrayList<String>();
+
+	            
+	            BufferedReader br = new BufferedReader(new InputStreamReader(
+	                    getClass().getClassLoader().getResourceAsStream(inputFile)));
+	            String s = null;
+	            while ((s = br.readLine()) != null) {
+	                s = s.trim();
+	                if (s.length() == 0 || s.startsWith("//") || s.startsWith("!"))
+	                    continue;
+	                terms.add(s);
+	            }
+	            Collections.sort(terms, new StringLengthComparator());
+	            for (String t : terms)
+	                add(t);
+	            compile();
+	            br.close();
+	        } catch (IOException e) {
+	            throw new IllegalStateException(e);
+	        }
+	}
+	protected void add(String s) {
+		s = s.replace(".", "\\.");
+		s = "\\." + s;
+		if (s.startsWith("*")) {
+			s = s.replace("*", ".+");
+			sb.append(s).append("|");
+		} else {
+			sb.append(s).append("|");
+		}
+	}
+
+	public void compile() {
+		if (sb.length() > 0)
+			sb.deleteCharAt(sb.length() - 1);
+		sb.insert(0, "[^.]+?(");
+		sb.append(")$");
+		pattern = Pattern.compile(sb.toString());
+		sb = null;
+	}
+
+	public String extract2LD(String host) {
+		Matcher m = pattern.matcher(host);
+		if (m.find()) {
+			return m.group(0);
+		}
+		return null;
+	}
+
+	public String extractTLD(String host) {
+		Matcher m = pattern.matcher(host);
+		if (m.find()) {
+			return m.group(1);
+		}
+		return null;
+	}
+
+	public static class StringLengthComparator implements Comparator<String> {
+		public int compare(String s1, String s2) {
+			if (s1.length() > s2.length())
+				return -1;
+			if (s1.length() < s2.length())
+				return 1;
+			return 0;
+		}
+	}
+    /**
+     * Returns the sb.
+     * @return the sb.
+     */
+    
+    public StringBuilder getSb() {
+        return sb;
+    }
+
+    /**
+     * Sets the sb.
+     * @param sb the sb.
+     */
+    
+    public void setSb(StringBuilder sb) {
+    
+        this.sb = sb;
+    }
+    /**
+     * Returns the inputFile.
+     * @return the inputFile.
+     */
+    
+    public String getInputFile() {
+        return inputFile;
+    }
+
+    /**
+     * Sets the inputFile.
+     * @param inputFile the inputFile.
+     */
+    
+    public void setInputFile(String inputFile) {
+    
+        this.inputFile = inputFile;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/tldextractor/test/BasicTldExtractorTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/tldextractor/test/BasicTldExtractorTest.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/tldextractor/test/BasicTldExtractorTest.java
new file mode 100644
index 0000000..03cc065
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/tldextractor/test/BasicTldExtractorTest.java
@@ -0,0 +1,125 @@
+
+ 
+ /*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.opensoc.tldextractor.test;
+
+import com.opensoc.test.AbstractConfigTest;
+import com.opensoc.tldextractor.BasicTldExtractor;
+
+
+ /**
+ * <ul>
+ * <li>Title: Basic TLD Extractor Test</li>
+ * <li>Description: Basic TLD Extractor class test</li>
+ * <li>Created: Feb 26, 2015</li>
+ * </ul>
+ * @author $Author:  $
+ * @version $Revision: 1.1 $
+ */
+public class BasicTldExtractorTest extends AbstractConfigTest {
+     /**
+     * The tldExtractor.
+     */
+     
+    private BasicTldExtractor tldExtractor=null;
+
+    /**
+     * Constructs a new <code>BasicTldExtractorTest</code> instance.
+     * @param name
+     */
+
+    public BasicTldExtractorTest(String name) {
+        super(name);
+    }
+
+    /**
+     
+     * @throws java.lang.Exception
+     */
+    protected static void setUpBeforeClass() throws Exception {
+    }
+
+    /**
+     
+     * @throws java.lang.Exception
+     */
+    protected static void tearDownAfterClass() throws Exception {
+    }
+
+    /* 
+     * (non-Javadoc)
+     * @see junit.framework.TestCase#setUp()
+     */
+
+    protected void setUp() throws Exception {
+        super.setUp("com.opensoc.tldextractor.test.BasicTldExtractorTest");
+        this.tldExtractor=new BasicTldExtractor(this.getConfig().getString("logFile"));
+    } 
+
+    /* 
+     * (non-Javadoc)
+     * @see junit.framework.TestCase#tearDown()
+     */
+
+    protected void tearDown() throws Exception {
+        super.tearDown();
+    }
+
+    /**
+     * Test method for {@link com.opensoc.tldextractor.BasicTldExtractor#BasicTldExtractor()}.
+     */
+    public void testBasicTldExtractor() {
+        assertNotNull(this.tldExtractor);
+    }
+
+    /**
+     * Test method for {@link com.opensoc.tldextractor.BasicTldExtractor#extract2LD(java.lang.String)}.
+     */
+    public void testExtract2LD() {
+        //fail("Not yet implemented");
+    }
+
+    /**
+     * Test method for {@link com.opensoc.tldextractor.BasicTldExtractor#extractTLD(java.lang.String)}.
+     */
+    public void testExtractTLD() 
+    {
+        String result = this.tldExtractor.extractTLD("cisco.com");
+        System.out.println("result ="+result);
+    }
+    /**
+     * Returns the tldExtractor.
+     * @return the tldExtractor.
+     */
+    
+    public BasicTldExtractor getTldExtractor() {
+        return tldExtractor;
+    }
+
+    /**
+     * Sets the tldExtractor.
+     * @param tldExtractor the tldExtractor.
+     */
+    
+    public void setTldExtractor(BasicTldExtractor tldExtractor) {
+    
+        this.tldExtractor = tldExtractor;
+    }
+    
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/topologyhelpers/Cli.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/topologyhelpers/Cli.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/topologyhelpers/Cli.java
deleted file mode 100644
index 9f8bae3..0000000
--- a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/topologyhelpers/Cli.java
+++ /dev/null
@@ -1,186 +0,0 @@
-package com.opensoc.topologyhelpers;
-
-import java.io.File;
-
-import org.apache.commons.cli.BasicParser;
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
-
-public class Cli {
-
-	private String[] args = null;
-	private Options options = new Options();
-
-	private String path = null;
-	private boolean debug = true;
-	private boolean local_mode = true;
-	private boolean generator_spout = false;
-
-	public boolean isGenerator_spout() {
-		return generator_spout;
-	}
-
-	public void setGenerator_spout(boolean generator_spout) {
-		this.generator_spout = generator_spout;
-	}
-
-	public String getPath() {
-		return path;
-	}
-
-	public void setPath(String path) {
-		this.path = path;
-	}
-
-	public boolean isDebug() {
-		return debug;
-	}
-
-	public void setDebug(boolean debug) {
-		this.debug = debug;
-	}
-
-	public boolean isLocal_mode() {
-		return local_mode;
-	}
-
-	public void setLocal_mode(boolean local_mode) {
-		this.local_mode = local_mode;
-	}
-
-	public Cli(String[] args) {
-
-		this.args = args;
-
-		Option help = new Option("h", "Display help menue");
-		options.addOption(help);
-		options.addOption(
-				"config_path",
-				true,
-				"OPTIONAL ARGUMENT [/path/to/configs] Path to configuration folder. If not provided topology will initialize with default configs");
-		options.addOption(
-				"local_mode",
-				true,
-				"REQUIRED ARGUMENT [true|false] Local mode or cluster mode.  If set to true the topology will run in local mode.  If set to false the topology will be deployed to Storm nimbus");
-		options.addOption(
-				"debug",
-				true,
-				"OPTIONAL ARGUMENT [true|false] Storm debugging enabled.  Default value is true");
-		options.addOption(
-				"generator_spout",
-				true,
-				"REQUIRED ARGUMENT [true|false] Turn on test generator spout.  Default is set to false.  If test generator spout is turned on then kafka spout is turned off.  Instead the generator spout will read telemetry from file and ingest it into a topology");
-	}
-
-	public void parse() {
-		CommandLineParser parser = new BasicParser();
-
-		CommandLine cmd = null;
-		try {
-			cmd = parser.parse(options, args);
-
-			if (cmd.hasOption("h"))
-				help();
-
-			if (cmd.hasOption("local_mode")) {
-
-				String local_value = cmd.getOptionValue("local_mode").trim()
-						.toLowerCase();
-
-				if (local_value.equals("true"))
-					local_mode = true;
-
-				else if (local_value.equals("false"))
-					local_mode = false;
-				else {
-					System.out
-							.println("[OpenSOC] ERROR: Invalid value for local mode");
-					System.out
-							.println("[OpenSOC] ERROR: Using cli argument -local_mode="
-									+ cmd.getOptionValue("local_mode"));
-					help();
-				}
-			} else {
-				System.out
-						.println("[OpenSOC] ERROR: Invalid value for local mode");
-				help();
-			}
-			if (cmd.hasOption("generator_spout")) {
-
-				String local_value = cmd.getOptionValue("generator_spout").trim()
-						.toLowerCase();
-
-				if (local_value.equals("true"))
-					generator_spout = true;
-
-				else if (local_value.equals("false"))
-					generator_spout = false;
-				else {
-					System.out
-							.println("[OpenSOC] ERROR: Invalid value for local generator_spout");
-					System.out
-							.println("[OpenSOC] ERROR: Using cli argument -generator_spout="
-									+ cmd.getOptionValue("generator_spout"));
-					help();
-				}
-			} else {
-				System.out
-						.println("[OpenSOC] ERROR: Invalid value for generator_spout");
-				help();
-			}
-			if (cmd.hasOption("config_path")) {
-
-				path = cmd.getOptionValue("config_path").trim();
-
-				File file = new File(path);
-
-				if (!file.isDirectory() || !file.exists()) {
-					System.out
-							.println("[OpenSOC] ERROR: Invalid settings directory name given");
-					System.out
-							.println("[OpenSOC] ERROR: Using cli argument -config_path="
-									+ cmd.getOptionValue("config_path"));
-					help();
-				}
-			}
-
-			if (cmd.hasOption("debug")) {
-				String debug_value = cmd.getOptionValue("debug");
-
-				if (debug_value.equals("true"))
-					debug = true;
-				else if (debug_value.equals("false"))
-					debug = false;
-				else {
-					System.out
-							.println("[OpenSOC] ERROR: Invalid value for debug_value");
-					System.out
-							.println("[OpenSOC] ERROR: Using cli argument -debug_value="
-									+ cmd.getOptionValue("debug_value"));
-					help();
-				}
-			}
-
-		} catch (ParseException e) {
-			System.out
-					.println("[OpenSOC] ERROR: Failed to parse command line arguments");
-			help();
-		}
-	}
-
-	private void help() {
-		// This prints out some help
-		HelpFormatter formater = new HelpFormatter();
-
-		formater.printHelp("Topology Options:", options);
-
-		System.out
-				.println("[OpenSOC] Example usage: \n storm jar OpenSOC-Topologies-0.3BETA-SNAPSHOT.jar com.opensoc.topology.Bro -local_mode true -config_path OpenSOC_Configs/ -generator_spout true");
-
-		System.exit(0);
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/topologyhelpers/ErrorGenerator.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/topologyhelpers/ErrorGenerator.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/topologyhelpers/ErrorGenerator.java
deleted file mode 100644
index c21205e..0000000
--- a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/topologyhelpers/ErrorGenerator.java
+++ /dev/null
@@ -1,27 +0,0 @@
-package com.opensoc.topologyhelpers;
-
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-
-import org.json.simple.JSONObject;
-
-public class ErrorGenerator {
-
-	public static JSONObject generateErrorMessage(String message, String exception)
-	{
-		JSONObject error_message = new JSONObject();
-		
-		error_message.put("time", System.currentTimeMillis());
-		try {
-			error_message.put("hostname", InetAddress.getLocalHost().getHostName());
-		} catch (UnknownHostException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		}
-		
-		error_message.put("message", message);
-		error_message.put("exception", exception);
-		
-		return error_message;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/topologyhelpers/SettingsLoader.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/topologyhelpers/SettingsLoader.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/topologyhelpers/SettingsLoader.java
deleted file mode 100644
index bb2a460..0000000
--- a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/topologyhelpers/SettingsLoader.java
+++ /dev/null
@@ -1,118 +0,0 @@
-package com.opensoc.topologyhelpers;
-
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-
-import org.apache.commons.configuration.Configuration;
-import org.apache.commons.configuration.ConfigurationException;
-import org.apache.commons.configuration.PropertiesConfiguration;
-import org.apache.commons.configuration.XMLConfiguration;
-import org.json.simple.JSONArray;
-import org.json.simple.JSONObject;
-import org.json.simple.parser.JSONParser;
-import org.json.simple.parser.ParseException;
-
-public class SettingsLoader {
-
-	public static JSONObject loadEnvironmentIdnetifier(String config_path)
-			throws ConfigurationException {
-		Configuration config = new PropertiesConfiguration(config_path);
-
-		String customer = config.getString("customer.id", "unknown");
-		String datacenter = config.getString("datacenter.id", "unknown");
-		String instance = config.getString("instance.id", "unknown");
-
-		JSONObject identifier = new JSONObject();
-		identifier.put("customer", customer);
-		identifier.put("datacenter", datacenter);
-		identifier.put("instance", instance);
-
-		return identifier;
-	}
-
-	public static JSONObject loadTopologyIdnetifier(String config_path)
-			throws ConfigurationException {
-		Configuration config = new PropertiesConfiguration(config_path);
-
-		String topology = config.getString("topology.id", "unknown");
-		String instance = config.getString("instance.id", "unknown");
-
-		JSONObject identifier = new JSONObject();
-		identifier.put("topology", topology);
-		identifier.put("topology_instance", instance);
-
-		return identifier;
-	}
-	
-
-	public static String generateTopologyName(JSONObject env, JSONObject topo) {
-
-		return (env.get("customer") + "_" + env.get("datacenter") + "_"
-				+ env.get("instance") + "_" + topo.get("topology") + "_" + topo.get("topology_instance"));
-	}
-	
-	public static JSONObject generateAlertsIdentifier(JSONObject env, JSONObject topo)
-	{
-		JSONObject identifier = new JSONObject();
-		identifier.put("environment", env);
-		identifier.put("topology", topo);
-		
-		return identifier;
-	}
-
-	public static Map<String, JSONObject> loadRegexAlerts(String config_path)
-			throws ConfigurationException, ParseException {
-		XMLConfiguration alert_rules = new XMLConfiguration();
-		alert_rules.setDelimiterParsingDisabled(true);
-		alert_rules.load(config_path);
-
-		int number_of_rules = alert_rules.getList("rule.pattern").size();
-
-		String[] patterns = alert_rules.getStringArray("rule.pattern");
-		String[] alerts = alert_rules.getStringArray("rule.alert");
-
-		JSONParser pr = new JSONParser();
-		Map<String, JSONObject> rules = new HashMap<String, JSONObject>();
-
-		for (int i = 0; i < patterns.length; i++)
-			rules.put(patterns[i], (JSONObject) pr.parse(alerts[i]));
-
-		return rules;
-	}
-
-	public static Map<String, JSONObject> loadKnownHosts(String config_path)
-			throws ConfigurationException, ParseException {
-		Configuration hosts = new PropertiesConfiguration(config_path);
-
-		Iterator<String> keys = hosts.getKeys();
-		Map<String, JSONObject> known_hosts = new HashMap<String, JSONObject>();
-		JSONParser parser = new JSONParser();
-
-		while (keys.hasNext()) {
-			String key = keys.next().trim();
-			JSONArray value = (JSONArray) parser.parse(hosts.getProperty(key)
-					.toString());
-			known_hosts.put(key, (JSONObject) value.get(0));
-		}
-
-		return known_hosts;
-	}
-
-	public static void printConfigOptions(PropertiesConfiguration config, String path_fragment)
-	{
-		Iterator<String> itr = config.getKeys();
-		
-		while(itr.hasNext())
-		{
-			String key = itr.next();
-			
-			if(key.contains(path_fragment))
-			{
-				
-				System.out.println("[OpenSOC] Key: " + key + " -> " + config.getString(key));
-			}
-		}
-
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/test/resources/config/BasicTldExtractorTest.config
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/test/resources/config/BasicTldExtractorTest.config b/opensoc-streaming/OpenSOC-Common/src/test/resources/config/BasicTldExtractorTest.config
new file mode 100644
index 0000000..6b3dc05
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Common/src/test/resources/config/BasicTldExtractorTest.config
@@ -0,0 +1,2 @@
+#BasicTldExtractorConfig
+logFile=effective_tld_names.dat



[07/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapScannerHBaseImplTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapScannerHBaseImplTest.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapScannerHBaseImplTest.java
new file mode 100644
index 0000000..89bad6d
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapScannerHBaseImplTest.java
@@ -0,0 +1,232 @@
+package com.opensoc.pcapservice;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import junit.framework.Assert;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Scan;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+// TODO: Auto-generated Javadoc
+/**
+ * The Class PcapScannerHBaseImplTest.
+ */
+public class PcapScannerHBaseImplTest {
+
+  /**
+   * Sets the up.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @Before
+  public void setUp() throws Exception {
+  }
+
+  /**
+   * Tear down.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  /**
+   * Test_create scan request.
+   * 
+   * @throws IOException
+   *           the IO exception
+   */
+  @Test
+  public void test_createScanRequest() throws IOException {
+    // mocking
+    PcapScannerHBaseImpl pcapScanner = (PcapScannerHBaseImpl) PcapScannerHBaseImpl
+        .getInstance();
+    byte[] cf = "cf".getBytes();
+    byte[] cq = "pcap".getBytes();
+    String startKey = "0a07002b-0a078039-06-1e8b-0087";
+    String endKey = "0a070025-0a07807a-06-aab8-c360";
+    long maxResultSize = 60;
+    long startTime = 1376782349234555L;
+    long endTime = 1396782349234555L;
+
+    // actual call
+    Scan scan = pcapScanner.createScanRequest(cf, cq, startKey, endKey,
+        maxResultSize, -1, -1);
+
+    // verify
+    Assert.assertTrue(scan.getTimeRange().getMin() == 0);
+    Assert.assertTrue(Arrays.equals(scan.getStartRow(), startKey.getBytes()));
+    Assert.assertTrue(Arrays.equals(scan.getStopRow(), endKey.getBytes()));
+  }
+
+  /**
+   * Test_create scan request_with timestamps.
+   * 
+   * @throws IOException
+   *           the IO exception
+   */
+  @Test
+  public void test_createScanRequest_withTimestamps() throws IOException {
+    // mocking
+    PcapScannerHBaseImpl pcapScanner = (PcapScannerHBaseImpl) PcapScannerHBaseImpl
+        .getInstance();
+    byte[] cf = "cf".getBytes();
+    byte[] cq = "pcap".getBytes();
+    String startKey = "0a07002b-0a078039-06-1e8b-0087";
+    String endKey = "0a070025-0a07807a-06-aab8-c360";
+    long maxResultSize = 60;
+    long startTime = 1376782349234555L;
+    long endTime = 1396782349234555L;
+
+    // actual call
+    Scan scan = pcapScanner.createScanRequest(cf, cq, startKey, endKey,
+        maxResultSize, startTime, endTime);
+
+    // verify
+    Assert.assertTrue(scan.getTimeRange().getMin() == 1376782349234L);
+    Assert.assertTrue(Arrays.equals(scan.getStartRow(), startKey.getBytes()));
+    Assert.assertTrue(Arrays.equals(scan.getStopRow(), endKey.getBytes()));
+  }
+
+  /**
+   * Test_get pcaps_with all arguments.
+   * 
+   * @throws IOException
+   *           the IO exception
+   */
+  @SuppressWarnings({ "unchecked", "unused" })
+  @Test
+  public void test_getPcaps_withAllArguments() throws IOException {
+    // mocking
+    PcapScannerHBaseImpl pcapScanner = (PcapScannerHBaseImpl) PcapScannerHBaseImpl
+        .getInstance();
+    PcapScannerHBaseImpl spy = Mockito.spy(pcapScanner);
+    byte[] cf = "cf".getBytes();
+    byte[] cq = "pcap".getBytes();
+    String startKey = "0a07002b-0a078039-06-1e8b-0087";
+    String endKey = "0a070025-0a07807a-06-aab8-c360";
+    long maxResultSize = 60;
+    long startTime = 1376782349234555L;
+    long endTime = 1396782349234555L;
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+
+    Mockito
+        .doReturn(mockPcaps)
+        .when(spy)
+        .scanPcaps(Mockito.any(ArrayList.class), Mockito.any(HTable.class),
+            Mockito.any(Scan.class), Mockito.any(byte[].class),
+            Mockito.any(byte[].class));
+
+    // actual call
+    byte[] response = spy.getPcaps(startKey, endKey, maxResultSize, startTime,
+        endTime);
+
+    // verify
+    Assert.assertTrue(response.length == mockPcaps.get(0).length);
+  }
+
+  /**
+   * Test_get pcaps_with minimal arguments.
+   * 
+   * @throws IOException
+   *           the IO exception
+   */
+  @SuppressWarnings({ "unchecked", "unused" })
+  @Test
+  public void test_getPcaps_withMinimalArguments() throws IOException {
+    // mocking
+    PcapScannerHBaseImpl pcapScanner = (PcapScannerHBaseImpl) PcapScannerHBaseImpl
+        .getInstance();
+    PcapScannerHBaseImpl spy = Mockito.spy(pcapScanner);
+    byte[] cf = "cf".getBytes();
+    byte[] cq = "pcap".getBytes();
+    String startKey = "0a07002b-0a078039-06-1e8b-0087";
+    String endKey = "0a070025-0a07807a-06-aab8-c360";
+    long maxResultSize = 60;
+    long startTime = 1376782349234555L;
+    long endTime = 1396782349234555L;
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+
+    Mockito
+        .doReturn(mockPcaps)
+        .when(spy)
+        .scanPcaps(Mockito.any(ArrayList.class), Mockito.any(HTable.class),
+            Mockito.any(Scan.class), Mockito.any(byte[].class),
+            Mockito.any(byte[].class));
+
+    // actual call
+    byte[] response = spy.getPcaps(startKey, endKey);
+
+    // verify
+    Assert.assertTrue(response.length == mockPcaps.get(0).length);
+  }
+
+  /**
+   * Test_get pcaps_multiple pcaps.
+   * 
+   * @throws IOException
+   *           the IO exception
+   */
+  @SuppressWarnings({ "unchecked", "unused" })
+  @Test
+  public void test_getPcaps_multiplePcaps() throws IOException {
+    // mocking
+    PcapScannerHBaseImpl pcapScanner = (PcapScannerHBaseImpl) PcapScannerHBaseImpl
+        .getInstance();
+    PcapScannerHBaseImpl spy = Mockito.spy(pcapScanner);
+    byte[] cf = "cf".getBytes();
+    byte[] cq = "pcap".getBytes();
+    String startKey = "0a07002b-0a078039-06-1e8b-0087";
+    String endKey = "0a070025-0a07807a-06-aab8-c360";
+    long maxResultSize = 60;
+    long startTime = 1376782349234555L;
+    long endTime = 1396782349234555L;
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+    mockPcaps.add(getTestPcapBytes());
+
+    Mockito
+        .doReturn(mockPcaps)
+        .when(spy)
+        .scanPcaps(Mockito.any(ArrayList.class), Mockito.any(HTable.class),
+            Mockito.any(Scan.class), Mockito.any(byte[].class),
+            Mockito.any(byte[].class));
+
+    // actual call
+    byte[] response = spy.getPcaps(startKey, endKey);
+
+    // verify
+    Assert.assertNotNull(response);
+    Assert.assertTrue(response.length > mockPcaps.get(0).length);
+  }
+
+  /**
+   * Gets the test pcap bytes.
+   * 
+   * @return the test pcap bytes
+   * @throws IOException
+   *           the IO exception
+   */
+  private byte[] getTestPcapBytes() throws IOException {
+    File fin = new File("src/test/resources/test-tcp-packet.pcap");
+    byte[] pcapBytes = FileUtils.readFileToByteArray(fin);
+    return pcapBytes;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/test/resources/hbase-config.properties
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/test/resources/hbase-config.properties b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/resources/hbase-config.properties
new file mode 100644
index 0000000..66f9d54
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/resources/hbase-config.properties
@@ -0,0 +1,40 @@
+#hbase zoo keeper configuration
+hbase.zookeeper.quorum=dn1.hw.com,dn2.hw.com,dn3.hw.com
+hbase.zookeeper.clientPort=2181
+hbase.client.retries.number=1
+zookeeper.session.timeout=60000
+zookeeper.recovery.retry=0
+
+#hbase table configuration
+hbase.table.name=pcap
+hbase.table.column.family=t
+hbase.table.column.qualifier=pcap
+hbase.table.column.maxVersions=5
+
+# scan size limit configuration in MB or KB; if the input is negative or greater than max value throw an error.
+hbase.scan.result.size.unit=MB
+hbase.scan.default.result.size=6
+hbase.scan.max.result.size=60
+
+# time stamp conversion configuration; possible values 'SECONDS'(seconds), 'MILLIS'(milli seconds), 'MICROS' (micro seconds)
+hbase.table.data.time.unit=MILLIS
+
+#number of retries in case of ZooKeeper or HBase server down
+hbase.hconnection.retries.number=3
+
+#configuration for including pcaps in the reverse traffic
+pcaps.include.reverse.traffic = false
+
+#maximum table row size in KB or MB 
+hbase.table.row.size.unit = KB
+hbase.table.max.row.size = 0.07
+
+# tokens of row key configuration
+hbase.table.row.key.tokens=7
+rest.api.input.key.min.tokens=5
+
+# whether or not to include the last row from the previous request, applicable for only partial response scenario
+hbase.table.scan.include.duplicate.lastrow= true;
+
+#number of digits for appending tokens of the row key
+hbase.table.row.key.token.appending.digits=5

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/test/resources/test-tcp-packet.pcap
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/test/resources/test-tcp-packet.pcap b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/resources/test-tcp-packet.pcap
new file mode 100644
index 0000000..25d47da
Binary files /dev/null and b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/resources/test-tcp-packet.pcap differ

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/pom.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/pom.xml b/opensoc-streaming/OpenSOC-Topologies/pom.xml
new file mode 100644
index 0000000..3ec016f
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/pom.xml
@@ -0,0 +1,188 @@
+<?xml version="1.0" encoding="UTF-8"?><!-- Licensed to the Apache Software 
+	Foundation (ASF) under one or more contributor license agreements. See the 
+	NOTICE file distributed with this work for additional information regarding 
+	copyright ownership. The ASF licenses this file to You under the Apache License, 
+	Version 2.0 (the "License"); you may not use this file except in compliance 
+	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 
+	Unless required by applicable law or agreed to in writing, software distributed 
+	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES 
+	OR CONDITIONS OF ANY KIND, either express or implied. See the License for 
+	the specific language governing permissions and limitations under the License. -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<groupId>com.opensoc</groupId>
+		<artifactId>OpenSOC-Streaming</artifactId>
+		<version>0.6BETA</version>
+	</parent>
+	<artifactId>OpenSOC-Topologies</artifactId>
+	<description>OpenSOC Topologies</description>
+
+	<properties>
+		<storm.hdfs.version>0.9.1.2.1.1.0-385</storm.hdfs.version>
+		<cli.version>20040117.000000</cli.version>
+		<commons.config.version>1.10</commons.config.version>
+	</properties>
+	<repositories>
+		<repository>
+			<id>github-snapshots</id>
+			<url>http://oss.sonatype.org/content/repositories/snapshots/</url>
+		</repository>
+	</repositories>
+	<dependencies>
+		<dependency>
+			<groupId>com.opensoc</groupId>
+			<artifactId>OpenSOC-Common</artifactId>
+			<version>${project.parent.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>com.opensoc</groupId>
+			<artifactId>OpenSOC-Alerts</artifactId>
+			<version>${project.parent.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>com.opensoc</groupId>
+			<artifactId>OpenSOC-EnrichmentAdapters</artifactId>
+			<version>${project.parent.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>com.opensoc</groupId>
+			<artifactId>OpenSOC-MessageParsers</artifactId>
+			<version>${project.parent.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>com.opensoc</groupId>
+			<artifactId>OpenSOC-Indexing</artifactId>
+			<version>${project.parent.version}</version>
+		</dependency>
+
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-client</artifactId>
+			<version>${global_hadoop_version}</version>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-log4j12</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-hdfs</artifactId>
+			<version>${global_hadoop_version}</version>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-log4j12</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.kafka</groupId>
+			<artifactId>kafka_2.8.2</artifactId>
+			<version>${global_kafka_version}</version>
+			<exclusions>
+				<!--exclusion> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> 
+					</exclusion -->
+				<exclusion>
+					<groupId>log4j</groupId>
+					<artifactId>log4j</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.storm</groupId>
+			<artifactId>storm-core</artifactId>
+			<version>${global_storm_version}</version>
+			<scope>provided</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.storm</groupId>
+			<artifactId>storm-kafka</artifactId>
+			<version>${global_storm_version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.storm</groupId>
+			<artifactId>storm</artifactId>
+			<version>${global_storm_version}</version>
+			<type>pom</type>
+			<scope>provided</scope>
+		</dependency>
+		
+
+		<dependency>
+			<groupId>com.github.ptgoetz</groupId>
+			<artifactId>storm-hbase</artifactId>
+			<version>0.1.2</version>
+		</dependency>
+		<!-- dependency> <groupId>com.github.ptgoetz</groupId> <artifactId>storm-hdfs</artifactId> 
+			<version>0.1.2</version> </dependency -->
+		<dependency>
+			<groupId>com.github.sheetaldolas</groupId>
+			<artifactId>storm-hdfs</artifactId>
+			<version>0.0.7-SNAPSHOT</version>
+			<exclusions>
+				<exclusion>
+					<groupId>org.apache.storm</groupId>
+					<artifactId>storm-core</artifactId>
+				</exclusion>
+				<exclusion>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-client</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+
+	</dependencies>
+	<build>
+		<resources>
+			<resource>
+				<directory>src/main/resources</directory>
+			</resource>
+		</resources>
+		<plugins>
+
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-shade-plugin</artifactId>
+				<version>1.4</version>
+				<configuration>
+					<createDependencyReducedPom>true</createDependencyReducedPom>
+				</configuration>
+				<executions>
+					<execution>
+						<phase>package</phase>
+						<goals>
+							<goal>shade</goal>
+						</goals>
+						<configuration>
+							<artifactSet>
+								<excludes>
+									<exclude>storm:storm-core:*</exclude>
+									<exclude>storm:storm-lib:*</exclude>
+									<exclude>*slf4j*</exclude>
+								</excludes>
+							</artifactSet>
+
+							<transformers>
+								<transformer
+									implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
+									<resource>.yaml</resource>
+								</transformer>
+								<transformer
+									implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
+								<transformer
+									implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+									<mainClass></mainClass>
+								</transformer>
+							</transformers>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+		</plugins>
+	</build>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/readme.md
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/readme.md b/opensoc-streaming/OpenSOC-Topologies/readme.md
new file mode 100644
index 0000000..feac62d
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/readme.md
@@ -0,0 +1,47 @@
+#OpenSOC-Topologies
+
+#Module Description
+
+This module provides example topologies that show how to drive OpenSOC modules and components.  The sample topologies provided are to process PCAP, Ise, Lancope, and Bro telemetries
+
+##Launching Topologies
+
+
+```
+
+storm jar OpenSOC-Topologies-0.6BETA.jar com.opensoc.topology.Pcap
+storm jar OpenSOC-Topologies-0.6BETA.jar com.opensoc.topology.Sourcefire
+storm jar OpenSOC-Topologies-0.6BETA.jar com.opensoc.topology.Lancope
+storm jar OpenSOC-Topologies-0.6BETA.jar com.opensoc.topology.Ise
+
+Topology Options:
+-config_path <arg>       OPTIONAL ARGUMENT [/path/to/configs] Path to
+configuration folder. If not provided topology
+will initialize with default configs
+-debug <arg>             OPTIONAL ARGUMENT [true|false] Storm debugging
+enabled.  Default value is true
+-generator_spout <arg>   REQUIRED ARGUMENT [true|false] Turn on test
+generator spout.  Default is set to false.  If
+test generator spout is turned on then kafka
+spout is turned off.  Instead the generator
+spout will read telemetry from file and ingest
+it into a topology
+-h                       Display help menue
+-local_mode <arg>        REQUIRED ARGUMENT [true|false] Local mode or
+cluster mode.  If set to true the topology will
+run in local mode.  If set to false the topology
+will be deployed to Storm nimbus
+```
+
+##Topology Configs
+
+The sample topologies provided use a specific directory structure.  The example directory structure was checked in here:
+
+```
+https://github.com/OpenSOC/opensoc-streaming/tree/master/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs
+```
+
+topology.conf - settings specific to each topology
+features_enabled.conf - turn on and off features for each topology and control parallelism
+metrics.conf - export definitions for metrics to Graphite 
+topology_dentifier.conf - customer-specific tag (since we deploy to multiple data centers we need to identify where the alerts are coming from and what topologies we are looking at when we need to debug)

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Asa.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Asa.java b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Asa.java
new file mode 100644
index 0000000..68f0c89
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Asa.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.opensoc.topology;
+
+import org.apache.commons.configuration.ConfigurationException;
+
+import backtype.storm.generated.InvalidTopologyException;
+
+import com.opensoc.topology.runner.AsaRunner;
+import com.opensoc.topology.runner.TopologyRunner;
+
+
+/**
+ * Topology for processing Asa messages
+ *
+ */
+public class Asa{
+	
+	public static void main(String[] args) throws ConfigurationException, Exception, InvalidTopologyException {
+		
+		TopologyRunner runner = new AsaRunner();
+		runner.initTopology(args, "asa");
+	}
+	
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/FireEye.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/FireEye.java b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/FireEye.java
new file mode 100644
index 0000000..e1f489b
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/FireEye.java
@@ -0,0 +1,21 @@
+package com.opensoc.topology;
+
+import org.apache.commons.configuration.ConfigurationException;
+import backtype.storm.generated.InvalidTopologyException;
+import com.opensoc.topology.runner.FireEyeRunner;
+import com.opensoc.topology.runner.TopologyRunner;
+
+
+/**
+ * Topology for processing FireEye syslog messages
+ *
+ */
+public class FireEye {
+
+	public static void main(String[] args) throws ConfigurationException, Exception, InvalidTopologyException {
+		
+		TopologyRunner runner = new FireEyeRunner();
+		runner.initTopology(args, "fireeye");
+	}
+	
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Ise.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Ise.java b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Ise.java
index d1fca55..7bcd0c2 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Ise.java
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Ise.java
@@ -24,6 +24,10 @@ import backtype.storm.generated.InvalidTopologyException;
 import com.opensoc.topology.runner.ISERunner;
 import com.opensoc.topology.runner.TopologyRunner;
 
+/**
+ * Topology for processing Ise messages
+ *
+ */
 public class Ise{
 	
 	public static void main(String[] args) throws ConfigurationException, Exception, InvalidTopologyException {

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Lancope.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Lancope.java b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Lancope.java
index 236836f..c3ecc54 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Lancope.java
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Lancope.java
@@ -24,6 +24,11 @@ import backtype.storm.generated.InvalidTopologyException;
 import com.opensoc.topology.runner.LancopeRunner;
 import com.opensoc.topology.runner.TopologyRunner;
 
+
+/**
+ * Topology for processing Lancope messages
+ *
+ */
 public class Lancope{
 	
 	public static void main(String[] args) throws ConfigurationException, Exception, InvalidTopologyException {

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/PaloAltoFirewall.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/PaloAltoFirewall.java b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/PaloAltoFirewall.java
new file mode 100644
index 0000000..222cc29
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/PaloAltoFirewall.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.opensoc.topology;
+
+import org.apache.commons.configuration.ConfigurationException;
+
+import backtype.storm.generated.InvalidTopologyException;
+
+import com.opensoc.topology.runner.AsaRunner;
+import com.opensoc.topology.runner.PaloAltoFirewallRunner;
+import com.opensoc.topology.runner.TopologyRunner;
+
+
+/**
+ * Topology for processing Palo Alto Firewall Syslog messages
+ *
+ */
+public class PaloAltoFirewall {
+	
+	public static void main(String[] args) throws ConfigurationException, Exception, InvalidTopologyException {
+		
+		TopologyRunner runner = new PaloAltoFirewallRunner();
+		runner.initTopology(args, "paloalto");
+	}
+	
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Pcap.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Pcap.java b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Pcap.java
index 4fbd7c0..2532893 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Pcap.java
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/Pcap.java
@@ -24,6 +24,12 @@ import backtype.storm.generated.InvalidTopologyException;
 import com.opensoc.topology.runner.PcapRunner;
 import com.opensoc.topology.runner.TopologyRunner;
 
+
+/**
+ * Topology for processing raw packet messages
+ *
+ */
+
 public class Pcap{
 	
 	public static void main(String[] args) throws ConfigurationException, Exception, InvalidTopologyException {

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/AsaRunner.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/AsaRunner.java b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/AsaRunner.java
new file mode 100644
index 0000000..8cc2db7
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/AsaRunner.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.opensoc.topology.runner;
+
+import com.opensoc.filters.GenericMessageFilter;
+import com.opensoc.parser.interfaces.MessageParser;
+import com.opensoc.parsing.AbstractParserBolt;
+import com.opensoc.parsing.TelemetryParserBolt;
+import com.opensoc.test.spouts.GenericInternalTestSpout;
+
+public class AsaRunner extends TopologyRunner{
+	
+	 static String test_file_path = "SampleInput/AsaOutput";
+
+	@Override
+	public boolean initializeParsingBolt(String topology_name,
+			String name) {
+		try {
+			
+			String messageUpstreamComponent = messageComponents.get(messageComponents.size()-1);
+			
+			System.out.println("[OpenSOC] ------" +  name + " is initializing from " + messageUpstreamComponent);
+
+			
+			String class_name = config.getString("bolt.parser.adapter");
+			
+			if(class_name == null)
+			{
+				System.out.println("[OpenSOC] Parser adapter not set.  Please set bolt.indexing.adapter in topology.conf");
+				throw new Exception("Parser adapter not set");
+			}
+			
+			Class loaded_class = Class.forName(class_name);
+			MessageParser parser = (MessageParser) loaded_class.newInstance();
+			
+	        
+			AbstractParserBolt parser_bolt = new TelemetryParserBolt()
+					.withMessageParser(parser)
+					.withOutputFieldName(topology_name)
+					.withMessageFilter(new GenericMessageFilter())
+					.withMetricConfig(config);
+
+			builder.setBolt(name, parser_bolt,
+					config.getInt("bolt.parser.parallelism.hint"))
+					.shuffleGrouping(messageUpstreamComponent)
+					.setNumTasks(config.getInt("bolt.parser.num.tasks"));
+
+		} catch (Exception e) {
+			e.printStackTrace();
+			System.exit(0);
+		}
+
+		return true;
+	}
+
+	@Override	
+	public  boolean initializeTestingSpout(String name) {
+		try {
+
+			System.out.println("[OpenSOC] Initializing Test Spout");
+
+			GenericInternalTestSpout testSpout = new GenericInternalTestSpout()
+					.withFilename(test_file_path).withRepeating(
+							config.getBoolean("spout.test.parallelism.repeat"));
+
+			builder.setSpout(name, testSpout,
+					config.getInt("spout.test.parallelism.hint")).setNumTasks(
+					config.getInt("spout.test.num.tasks"));
+
+		} catch (Exception e) {
+			e.printStackTrace();
+			System.exit(0);
+		}
+		return true;
+	}
+	
+	
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/BroRunner.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/BroRunner.java b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/BroRunner.java
index fd951b3..c448017 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/BroRunner.java
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/BroRunner.java
@@ -18,9 +18,9 @@
 package com.opensoc.topology.runner;
 
 import com.opensoc.filters.GenericMessageFilter;
+import com.opensoc.parser.interfaces.MessageParser;
 import com.opensoc.parsing.AbstractParserBolt;
 import com.opensoc.parsing.TelemetryParserBolt;
-import com.opensoc.parsing.parsers.BasicBroParser;
 import com.opensoc.test.spouts.GenericInternalTestSpout;
 
 public class BroRunner extends TopologyRunner{
@@ -36,8 +36,19 @@ public class BroRunner extends TopologyRunner{
 			
 			System.out.println("[OpenSOC] ------" +  name + " is initializing from " + messageUpstreamComponent);
 			
+			String class_name = config.getString("bolt.parser.adapter");
+			
+			if(class_name == null)
+			{
+				System.out.println("[OpenSOC] Parser adapter not set.  Please set bolt.indexing.adapter in topology.conf");
+				throw new Exception("Parser adapter not set");
+			}
+			
+			Class loaded_class = Class.forName(class_name);
+			MessageParser parser = (MessageParser) loaded_class.newInstance();
+			
 			AbstractParserBolt parser_bolt = new TelemetryParserBolt()
-					.withMessageParser(new BasicBroParser())
+					.withMessageParser(parser)
 					.withOutputFieldName(topology_name)
 					.withMessageFilter(new GenericMessageFilter())
 					.withMetricConfig(config);

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/FireEyeRunner.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/FireEyeRunner.java b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/FireEyeRunner.java
new file mode 100644
index 0000000..31026df
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/FireEyeRunner.java
@@ -0,0 +1,77 @@
+package com.opensoc.topology.runner;
+
+import com.opensoc.filters.GenericMessageFilter;
+import com.opensoc.parser.interfaces.MessageParser;
+import com.opensoc.parsing.AbstractParserBolt;
+import com.opensoc.parsing.TelemetryParserBolt;
+import com.opensoc.test.spouts.GenericInternalTestSpout;
+
+public class FireEyeRunner extends TopologyRunner{
+	
+	 static String test_file_path = "SampleInput/FireeyeExampleOutput";
+
+	@Override
+	public boolean initializeParsingBolt(String topology_name,
+			String name) {
+		try {
+			
+			String messageUpstreamComponent = messageComponents.get(messageComponents.size()-1);
+			
+			System.out.println("[OpenSOC] ------" +  name + " is initializing from " + messageUpstreamComponent);
+
+			
+			String class_name = config.getString("bolt.parser.adapter");
+			
+			if(class_name == null)
+			{
+				System.out.println("[OpenSOC] Parser adapter not set.  Please set bolt.indexing.adapter in topology.conf");
+				throw new Exception("Parser adapter not set");
+			}
+			
+			Class loaded_class = Class.forName(class_name);
+			MessageParser parser = (MessageParser) loaded_class.newInstance();
+	        
+	        
+			AbstractParserBolt parser_bolt = new TelemetryParserBolt()
+					.withMessageParser(parser)
+					.withOutputFieldName(topology_name)
+					.withMessageFilter(new GenericMessageFilter())
+					.withMetricConfig(config);
+
+			builder.setBolt(name, parser_bolt,
+					config.getInt("bolt.parser.parallelism.hint"))
+					.shuffleGrouping(messageUpstreamComponent)
+					.setNumTasks(config.getInt("bolt.parser.num.tasks"));
+
+		} catch (Exception e) {
+			e.printStackTrace();
+			System.exit(0);
+		}
+
+		return true;
+	}
+
+	@Override	
+	public  boolean initializeTestingSpout(String name) {
+		try {
+
+			System.out.println("[OpenSOC] Initializing Test Spout");
+
+			GenericInternalTestSpout testSpout = new GenericInternalTestSpout()
+					.withFilename(test_file_path).withRepeating(
+							config.getBoolean("spout.test.parallelism.repeat"));
+
+			builder.setSpout(name, testSpout,
+					config.getInt("spout.test.parallelism.hint")).setNumTasks(
+					config.getInt("spout.test.num.tasks"));
+
+		} catch (Exception e) {
+			e.printStackTrace();
+			System.exit(0);
+		}
+		return true;
+	}
+	
+	
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/ISERunner.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/ISERunner.java b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/ISERunner.java
index 87a648d..7f377d5 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/ISERunner.java
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/ISERunner.java
@@ -18,9 +18,9 @@
 package com.opensoc.topology.runner;
 
 import com.opensoc.filters.GenericMessageFilter;
+import com.opensoc.parser.interfaces.MessageParser;
 import com.opensoc.parsing.AbstractParserBolt;
 import com.opensoc.parsing.TelemetryParserBolt;
-import com.opensoc.parsing.parsers.BasicIseParser;
 import com.opensoc.test.spouts.GenericInternalTestSpout;
 
 public class ISERunner extends TopologyRunner{
@@ -36,8 +36,21 @@ public class ISERunner extends TopologyRunner{
 			
 			System.out.println("[OpenSOC] ------" +  name + " is initializing from " + messageUpstreamComponent);
 
+			
+			String class_name = config.getString("bolt.parser.adapter");
+			
+			if(class_name == null)
+			{
+				System.out.println("[OpenSOC] Parser adapter not set.  Please set bolt.indexing.adapter in topology.conf");
+				throw new Exception("Parser adapter not set");
+			}
+			
+			Class loaded_class = Class.forName(class_name);
+			MessageParser parser = (MessageParser) loaded_class.newInstance();
+			
+			
 			AbstractParserBolt parser_bolt = new TelemetryParserBolt()
-					.withMessageParser(new BasicIseParser())
+					.withMessageParser(parser)
 					.withOutputFieldName(topology_name)
 					.withMessageFilter(new GenericMessageFilter())
 					.withMetricConfig(config);

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/LancopeRunner.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/LancopeRunner.java b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/LancopeRunner.java
index ef73e13..1031abf 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/LancopeRunner.java
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/LancopeRunner.java
@@ -18,6 +18,7 @@
 package com.opensoc.topology.runner;
 
 import com.opensoc.filters.GenericMessageFilter;
+import com.opensoc.parser.interfaces.MessageParser;
 import com.opensoc.parsing.AbstractParserBolt;
 import com.opensoc.parsing.TelemetryParserBolt;
 import com.opensoc.parsing.parsers.BasicLancopeParser;
@@ -36,8 +37,20 @@ public class LancopeRunner extends TopologyRunner{
 			
 			System.out.println("[OpenSOC] ------" +  name + " is initializing from " + messageUpstreamComponent);
 
+			
+			String class_name = config.getString("bolt.parser.adapter");
+			
+			if(class_name == null)
+			{
+				System.out.println("[OpenSOC] Parser adapter not set.  Please set bolt.indexing.adapter in topology.conf");
+				throw new Exception("Parser adapter not set");
+			}
+			
+			Class loaded_class = Class.forName(class_name);
+			MessageParser parser = (MessageParser) loaded_class.newInstance();
+			
 			AbstractParserBolt parser_bolt = new TelemetryParserBolt()
-					.withMessageParser(new BasicLancopeParser())
+					.withMessageParser(parser)
 					.withOutputFieldName(topology_name)
 					.withMessageFilter(new GenericMessageFilter())
 					.withMetricConfig(config);

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/PaloAltoFirewallRunner.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/PaloAltoFirewallRunner.java b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/PaloAltoFirewallRunner.java
new file mode 100644
index 0000000..0b6adad
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/PaloAltoFirewallRunner.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.opensoc.topology.runner;
+
+import com.opensoc.filters.GenericMessageFilter;
+import com.opensoc.parser.interfaces.MessageParser;
+import com.opensoc.parsing.AbstractParserBolt;
+import com.opensoc.parsing.TelemetryParserBolt;
+import com.opensoc.test.spouts.GenericInternalTestSpout;
+
+public class PaloAltoFirewallRunner extends TopologyRunner{
+	
+	 static String test_file_path = "SampleInput/PaloaltoOutput";
+
+	@Override
+	public boolean initializeParsingBolt(String topology_name,
+			String name) {
+		try {
+			
+			String messageUpstreamComponent = messageComponents.get(messageComponents.size()-1);
+			
+			System.out.println("[OpenSOC] ------" +  name + " is initializing from " + messageUpstreamComponent);
+
+			
+			String class_name = config.getString("bolt.parser.adapter");
+			
+			if(class_name == null)
+			{
+				System.out.println("[OpenSOC] Parser adapter not set.  Please set bolt.indexing.adapter in topology.conf");
+				throw new Exception("Parser adapter not set");
+			}
+			
+			Class loaded_class = Class.forName(class_name);
+			MessageParser parser = (MessageParser) loaded_class.newInstance();
+	        
+			
+	        
+			AbstractParserBolt parser_bolt = new TelemetryParserBolt()
+					.withMessageParser(parser)
+					.withOutputFieldName(topology_name)
+					.withMessageFilter(new GenericMessageFilter())
+					.withMetricConfig(config);
+
+			builder.setBolt(name, parser_bolt,
+					config.getInt("bolt.parser.parallelism.hint"))
+					.shuffleGrouping(messageUpstreamComponent)
+					.setNumTasks(config.getInt("bolt.parser.num.tasks"));
+
+		} catch (Exception e) {
+			e.printStackTrace();
+			System.exit(0);
+		}
+
+		return true;
+	}
+
+	@Override	
+	public  boolean initializeTestingSpout(String name) {
+		try {
+
+			System.out.println("[OpenSOC] Initializing Test Spout");
+
+			GenericInternalTestSpout testSpout = new GenericInternalTestSpout()
+					.withFilename(test_file_path).withRepeating(
+							config.getBoolean("spout.test.parallelism.repeat"));
+
+			builder.setSpout(name, testSpout,
+					config.getInt("spout.test.parallelism.hint")).setNumTasks(
+					config.getInt("spout.test.num.tasks"));
+
+		} catch (Exception e) {
+			e.printStackTrace();
+			System.exit(0);
+		}
+		return true;
+	}
+	
+	
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/PcapRunner.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/PcapRunner.java b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/PcapRunner.java
index 9620483..a26a467 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/PcapRunner.java
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/PcapRunner.java
@@ -56,7 +56,9 @@ public class PcapRunner extends TopologyRunner{
 			
 			System.out.println("[OpenSOC] ------" +  name + " is initializing from " + messageUpstreamComponent);
 			
-			builder.setBolt(name, new PcapParserBolt(),
+			PcapParserBolt pcapParser = new PcapParserBolt().withTsPrecision(config.getString("bolt.parser.ts.precision"));
+			
+			builder.setBolt(name, pcapParser,
 					config.getInt("bolt.parser.parallelism.hint"))
 					.setNumTasks(config.getInt("bolt.parser.num.tasks"))
 					.shuffleGrouping(messageUpstreamComponent);

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/SourcefireRunner.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/SourcefireRunner.java b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/SourcefireRunner.java
index 04d2fdf..69b4581 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/SourcefireRunner.java
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/SourcefireRunner.java
@@ -18,9 +18,9 @@
 package com.opensoc.topology.runner;
 
 import com.opensoc.filters.GenericMessageFilter;
+import com.opensoc.parser.interfaces.MessageParser;
 import com.opensoc.parsing.AbstractParserBolt;
 import com.opensoc.parsing.TelemetryParserBolt;
-import com.opensoc.parsing.parsers.BasicSourcefireParser;
 import com.opensoc.test.spouts.GenericInternalTestSpout;
 
 public class SourcefireRunner extends TopologyRunner{
@@ -36,8 +36,21 @@ public class SourcefireRunner extends TopologyRunner{
 			
 			System.out.println("[OpenSOC] ------" +  name + " is initializing from " + messageUpstreamComponent);
 
+			
+			String class_name = config.getString("bolt.parser.adapter");
+			
+			if(class_name == null)
+			{
+				System.out.println("[OpenSOC] Parser adapter not set.  Please set bolt.indexing.adapter in topology.conf");
+				throw new Exception("Parser adapter not set");
+			}
+			
+			Class loaded_class = Class.forName(class_name);
+			MessageParser parser = (MessageParser) loaded_class.newInstance();
+	        
+	        
 			AbstractParserBolt parser_bolt = new TelemetryParserBolt()
-					.withMessageParser(new BasicSourcefireParser())
+					.withMessageParser(parser)
 					.withOutputFieldName(topology_name)
 					.withMessageFilter(new GenericMessageFilter())
 					.withMetricConfig(config);

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/TopologyRunner.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/TopologyRunner.java b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/TopologyRunner.java
index 095d3be..72c2240 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/TopologyRunner.java
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/java/com/opensoc/topology/runner/TopologyRunner.java
@@ -17,14 +17,17 @@
  */
 package com.opensoc.topology.runner;
 
+import java.lang.reflect.Constructor;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Stack;
 
+import oi.thekraken.grok.api.Grok;
+
 import org.apache.commons.configuration.Configuration;
-import org.apache.commons.configuration.ConfigurationException;
 import org.apache.commons.configuration.PropertiesConfiguration;
 import org.apache.commons.lang.StringUtils;
 import org.apache.storm.hdfs.bolt.HdfsBolt;
@@ -48,15 +51,18 @@ import storm.kafka.bolt.KafkaBolt;
 import backtype.storm.Config;
 import backtype.storm.LocalCluster;
 import backtype.storm.StormSubmitter;
-import backtype.storm.generated.AlreadyAliveException;
 import backtype.storm.generated.Grouping;
-import backtype.storm.generated.InvalidTopologyException;
 import backtype.storm.spout.RawScheme;
 import backtype.storm.spout.SchemeAsMultiScheme;
 import backtype.storm.topology.BoltDeclarer;
 import backtype.storm.topology.TopologyBuilder;
 import backtype.storm.tuple.Fields;
 
+import com.esotericsoftware.kryo.serializers.FieldSerializer;
+import com.esotericsoftware.kryo.serializers.MapSerializer;
+
+
+
 import com.opensoc.alerts.TelemetryAlertsBolt;
 import com.opensoc.alerts.adapters.HbaseWhiteAndBlacklistAdapter;
 import com.opensoc.alerts.interfaces.AlertsAdapter;
@@ -64,17 +70,17 @@ import com.opensoc.enrichment.adapters.cif.CIFHbaseAdapter;
 import com.opensoc.enrichment.adapters.geo.GeoMysqlAdapter;
 import com.opensoc.enrichment.adapters.host.HostFromPropertiesFileAdapter;
 import com.opensoc.enrichment.adapters.whois.WhoisHBaseAdapter;
+import com.opensoc.enrichment.adapters.threat.ThreatHbaseAdapter;
 import com.opensoc.enrichment.common.GenericEnrichmentBolt;
 import com.opensoc.enrichment.interfaces.EnrichmentAdapter;
 import com.opensoc.hbase.HBaseBolt;
 import com.opensoc.hbase.HBaseStreamPartitioner;
 import com.opensoc.hbase.TupleTableConfig;
+import com.opensoc.helpers.topology.Cli;
+import com.opensoc.helpers.topology.SettingsLoader;
+import com.opensoc.index.interfaces.IndexAdapter;
 import com.opensoc.indexing.TelemetryIndexingBolt;
-import com.opensoc.indexing.adapters.ESBaseBulkAdapter;
-import com.opensoc.indexing.adapters.ESTimedRotatingAdapter;
 import com.opensoc.json.serialization.JSONKryoSerializer;
-import com.opensoc.topologyhelpers.Cli;
-import com.opensoc.topologyhelpers.SettingsLoader;
 
 public abstract class TopologyRunner {
 
@@ -93,8 +99,7 @@ public abstract class TopologyRunner {
 	protected Stack<String> terminalComponents = new Stack<String>();
 
 	public void initTopology(String args[], String subdir)
-			throws ConfigurationException, AlreadyAliveException,
-			InvalidTopologyException {
+			throws Exception {
 		Cli command_line = new Cli(args);
 		command_line.parse();
 
@@ -148,7 +153,7 @@ public abstract class TopologyRunner {
 		builder = new TopologyBuilder();
 
 		conf = new Config();
-		conf.registerSerialization(JSONObject.class, JSONKryoSerializer.class);
+		conf.registerSerialization(JSONObject.class, MapSerializer.class);
 		conf.setDebug(debug);
 
 		System.out.println("[OpenSOC] Initializing Spout: " + topology_name);
@@ -180,8 +185,8 @@ public abstract class TopologyRunner {
 					"spout.kafka");
 		}
 
-		if (config.getBoolean("parser.bolt.enabled", true)) {
-			String component_name = config.getString("parser.bolt.name",
+		if (config.getBoolean("bolt.parser.enabled", true)) {
+			String component_name = config.getString("bolt.parser.name",
 					"DefaultTopologyParserBot");
 
 			success = initializeParsingBolt(topology_name, component_name);
@@ -194,7 +199,7 @@ public abstract class TopologyRunner {
 					+ " initialized with the following settings:");
 
 			SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
-					"parser.bolt");
+					"bolt.parser");
 		}
 
 		if (config.getBoolean("bolt.enrichment.geo.enabled", false)) {
@@ -259,6 +264,21 @@ public abstract class TopologyRunner {
 			SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
 					"bolt.enrichment.cif");
 		}
+		
+		if (config.getBoolean("bolt.enrichment.threat.enabled", false)) {
+			String component_name = config.getString(
+					"bolt.enrichment.threat.name", "DefaultThreatEnrichmentBolt");
+
+			success = initializeThreatEnrichment(topology_name, component_name);
+			messageComponents.add(component_name);
+			errorComponents.add(component_name);
+
+			System.out.println("[OpenSOC] ------Component " + component_name
+					+ " initialized with the following settings:");
+
+			SettingsLoader.printConfigOptions((PropertiesConfiguration) config,
+					"bolt.enrichment.threat");
+		}
 
 		if (config.getBoolean("bolt.alerts.enabled", false)) {
 			String component_name = config.getString("bolt.alerts.name",
@@ -392,6 +412,7 @@ public abstract class TopologyRunner {
 		} else {
 
 			conf.setNumWorkers(config.getInt("num.workers"));
+			conf.setNumAckers(config.getInt("num.ackers"));
 			StormSubmitter.submitTopology(topology_name, conf,
 					builder.createTopology());
 		}
@@ -486,7 +507,15 @@ public abstract class TopologyRunner {
 
 	private boolean initializeErrorIndexBolt(String component_name) {
 		try {
+			
+			Class loaded_class = Class.forName(config.getString("bolt.error.indexing.adapter"));
+			IndexAdapter adapter = (IndexAdapter) loaded_class.newInstance();
 
+			String dateFormat = "yyyy.MM";
+			if (config.containsKey("bolt.alerts.indexing.timestamp")) {
+				dateFormat = config.getString("bolt.alerts.indexing.timestamp");
+			}
+			
 			TelemetryIndexingBolt indexing_bolt = new TelemetryIndexingBolt()
 					.withIndexIP(config.getString("es.ip"))
 					.withIndexPort(config.getInt("es.port"))
@@ -495,8 +524,9 @@ public abstract class TopologyRunner {
 							config.getString("bolt.error.indexing.indexname"))
 					.withDocumentName(
 							config.getString("bolt.error.indexing.documentname"))
+					.withIndexTimestamp(dateFormat)
 					.withBulk(config.getInt("bolt.error.indexing.bulk"))
-					.withIndexAdapter(new ESBaseBulkAdapter())
+					.withIndexAdapter(adapter)
 					.withMetricConfiguration(config);
 
 			BoltDeclarer declarer = builder
@@ -553,10 +583,10 @@ public abstract class TopologyRunner {
 			System.out.println("[OpenSOC] ------" + name
 					+ " is initializing from " + messageUpstreamComponent);
 
-			List<String> geo_keys = new ArrayList<String>();
-			geo_keys.add(config.getString("source.ip"));
-			geo_keys.add(config.getString("dest.ip"));
-
+			
+			String[] keys_from_settings = config.getStringArray("bolt.enrichment.geo.fields");
+			List<String> geo_keys = new ArrayList<String>(Arrays.asList(keys_from_settings));
+			
 			GeoMysqlAdapter geo_adapter = new GeoMysqlAdapter(
 					config.getString("mysql.ip"), config.getInt("mysql.port"),
 					config.getString("mysql.username"),
@@ -569,9 +599,9 @@ public abstract class TopologyRunner {
 					.withOutputFieldName(topology_name)
 					.withAdapter(geo_adapter)
 					.withMaxTimeRetain(
-							config.getInt("bolt.enrichment.geo.MAX_TIME_RETAIN"))
+							config.getInt("bolt.enrichment.geo.MAX_TIME_RETAIN_MINUTES"))
 					.withMaxCacheSize(
-							config.getInt("bolt.enrichment.geo.MAX_CACHE_SIZE"))
+							config.getInt("bolt.enrichment.geo.MAX_CACHE_SIZE_OBJECTS_NUM"))
 					.withKeys(geo_keys).withMetricConfiguration(config);
 
 			builder.setBolt(name, geo_enrichment,
@@ -614,9 +644,9 @@ public abstract class TopologyRunner {
 							config.getString("bolt.enrichment.host.enrichment_tag"))
 					.withAdapter(host_adapter)
 					.withMaxTimeRetain(
-							config.getInt("bolt.enrichment.host.MAX_TIME_RETAIN"))
+							config.getInt("bolt.enrichment.host.MAX_TIME_RETAIN_MINUTES"))
 					.withMaxCacheSize(
-							config.getInt("bolt.enrichment.host.MAX_CACHE_SIZE"))
+							config.getInt("bolt.enrichment.host.MAX_CACHE_SIZE_OBJECTS_NUM"))
 					.withOutputFieldName(topology_name).withKeys(hosts_keys)
 					.withMetricConfiguration(config);
 
@@ -635,10 +665,23 @@ public abstract class TopologyRunner {
 		return true;
 	}
 
+	@SuppressWarnings("rawtypes")
 	private boolean initializeAlerts(String topology_name, String name,
 			String alerts_path, JSONObject environment_identifier,
 			JSONObject topology_identifier) {
 		try {
+			
+			Class loaded_class = Class.forName(config.getString("bolt.alerts.adapter"));
+			Constructor constructor = loaded_class.getConstructor(new Class[] { Map.class});
+			
+			Map<String, String> settings = SettingsLoader.getConfigOptions((PropertiesConfiguration)config, config.getString("bolt.alerts.adapter") + ".");
+			
+			System.out.println("Adapter Settings: ");
+			SettingsLoader.printOptionalSettings(settings);
+			
+			AlertsAdapter alerts_adapter = (AlertsAdapter) constructor.newInstance(settings);
+			
+	
 
 			String messageUpstreamComponent = messageComponents
 					.get(messageComponents.size() - 1);
@@ -650,10 +693,7 @@ public abstract class TopologyRunner {
 					.generateAlertsIdentifier(environment_identifier,
 							topology_identifier);
 
-			AlertsAdapter alerts_adapter = new HbaseWhiteAndBlacklistAdapter(
-					"ip_whitelist", "ip_blacklist",
-					config.getString("kafka.zk.list"),
-					config.getString("kafka.zk.port"), 3600, 1000);
+			 
 
 			TelemetryAlertsBolt alerts_bolt = new TelemetryAlertsBolt()
 					.withIdentifier(alerts_identifier).withMaxCacheSize(1000)
@@ -675,12 +715,21 @@ public abstract class TopologyRunner {
 	}
 
 	private boolean initializeAlertIndexing(String name) {
+		
+		try{
 		String messageUpstreamComponent = alertComponents.get(alertComponents
 				.size() - 1);
 
 		System.out.println("[OpenSOC] ------" + name + " is initializing from "
 				+ messageUpstreamComponent);
+		
+		Class loaded_class = Class.forName(config.getString("bolt.alerts.indexing.adapter"));
+		IndexAdapter adapter = (IndexAdapter) loaded_class.newInstance();
 
+		String dateFormat = "yyyy.MM.dd";
+		if (config.containsKey("bolt.alerts.indexing.timestamp")) {
+			dateFormat = config.getString("bolt.alerts.indexing.timestamp");
+		}
 		TelemetryIndexingBolt indexing_bolt = new TelemetryIndexingBolt()
 				.withIndexIP(config.getString("es.ip"))
 				.withIndexPort(config.getInt("es.port"))
@@ -689,8 +738,9 @@ public abstract class TopologyRunner {
 						config.getString("bolt.alerts.indexing.indexname"))
 				.withDocumentName(
 						config.getString("bolt.alerts.indexing.documentname"))
+				.withIndexTimestamp(dateFormat)
 				.withBulk(config.getInt("bolt.alerts.indexing.bulk"))
-				.withIndexAdapter(new ESBaseBulkAdapter())
+				.withIndexAdapter(adapter)
 				.withMetricConfiguration(config);
 
 		String alerts_name = config.getString("bolt.alerts.indexing.name");
@@ -698,6 +748,12 @@ public abstract class TopologyRunner {
 				config.getInt("bolt.indexing.parallelism.hint"))
 				.shuffleGrouping(messageUpstreamComponent, "alert")
 				.setNumTasks(config.getInt("bolt.indexing.num.tasks"));
+		}
+		catch(Exception e)
+		{
+			e.printStackTrace();
+			return false;
+		}
 
 		return true;
 	}
@@ -748,12 +804,8 @@ public abstract class TopologyRunner {
 			System.out.println("[OpenSOC] ------" + name
 					+ " is initializing from " + messageUpstreamComponent);
 
-			List<String> whois_keys = new ArrayList<String>();
-			String[] keys_from_settings = config.getString(
-					"bolt.enrichment.whois.source").split(",");
-
-			for (String key : keys_from_settings)
-				whois_keys.add(key);
+			String[] keys_from_settings = config.getString("bolt.enrichment.whois.fields").split(",");
+			List<String> whois_keys = new ArrayList<String>(Arrays.asList(keys_from_settings));
 
 			EnrichmentAdapter whois_adapter = new WhoisHBaseAdapter(
 					config.getString("bolt.enrichment.whois.hbase.table.name"),
@@ -766,9 +818,9 @@ public abstract class TopologyRunner {
 					.withOutputFieldName(topology_name)
 					.withAdapter(whois_adapter)
 					.withMaxTimeRetain(
-							config.getInt("bolt.enrichment.whois.MAX_TIME_RETAIN"))
+							config.getInt("bolt.enrichment.whois.MAX_TIME_RETAIN_MINUTES"))
 					.withMaxCacheSize(
-							config.getInt("bolt.enrichment.whois.MAX_CACHE_SIZE"))
+							config.getInt("bolt.enrichment.whois.MAX_CACHE_SIZE_OBJECTS_NUM"))
 					.withKeys(whois_keys).withMetricConfiguration(config);
 
 			builder.setBolt(name, whois_enrichment,
@@ -794,16 +846,34 @@ public abstract class TopologyRunner {
 
 			System.out.println("[OpenSOC] ------" + name
 					+ " is initializing from " + messageUpstreamComponent);
+			
+			Class loaded_class = Class.forName(config.getString("bolt.indexing.adapter"));
+			IndexAdapter adapter = (IndexAdapter) loaded_class.newInstance();
+			
+			Map<String, String> settings = SettingsLoader.getConfigOptions((PropertiesConfiguration)config, "optional.settings.bolt.index.search.");
+			
+			if(settings != null && settings.size() > 0)
+			{
+				adapter.setOptionalSettings(settings);
+				System.out.println("[OpenSOC] Index Bolt picket up optional settings:");
+				SettingsLoader.printOptionalSettings(settings);			
+			}
 
+			// dateFormat defaults to hourly if not specified
+			String dateFormat = "yyyy.MM.dd.hh";
+			if (config.containsKey("bolt.indexing.timestamp")) {
+				dateFormat = config.getString("bolt.indexing.timestamp");
+			}
 			TelemetryIndexingBolt indexing_bolt = new TelemetryIndexingBolt()
 					.withIndexIP(config.getString("es.ip"))
 					.withIndexPort(config.getInt("es.port"))
 					.withClusterName(config.getString("es.clustername"))
 					.withIndexName(config.getString("bolt.indexing.indexname"))
+					.withIndexTimestamp(dateFormat)
 					.withDocumentName(
 							config.getString("bolt.indexing.documentname"))
 					.withBulk(config.getInt("bolt.indexing.bulk"))
-					.withIndexAdapter(new ESTimedRotatingAdapter())
+					.withIndexAdapter(adapter)
 					.withMetricConfiguration(config);
 
 			builder.setBolt(name, indexing_bolt,
@@ -819,6 +889,50 @@ public abstract class TopologyRunner {
 
 		return true;
 	}
+	
+	
+	private boolean initializeThreatEnrichment(String topology_name, String name) {
+		try {
+
+			String messageUpstreamComponent = messageComponents
+					.get(messageComponents.size() - 1);
+
+			System.out.println("[OpenSOC] ------" + name
+					+ " is initializing from " + messageUpstreamComponent);
+
+			String[] fields = config.getStringArray("bolt.enrichment.threat.fields");
+			List<String> threat_keys = new ArrayList<String>(Arrays.asList(fields));
+
+			GenericEnrichmentBolt threat_enrichment = new GenericEnrichmentBolt()
+					.withEnrichmentTag(
+							config.getString("bolt.enrichment.threat.enrichment_tag"))
+					.withAdapter(
+							new ThreatHbaseAdapter(config
+									.getString("kafka.zk.list"), config
+									.getString("kafka.zk.port"), config
+									.getString("bolt.enrichment.threat.tablename")))
+					.withOutputFieldName(topology_name)
+					.withEnrichmentTag(config.getString("bolt.enrichment.threat.enrichment_tag"))
+					.withKeys(threat_keys)
+					.withMaxTimeRetain(
+							config.getInt("bolt.enrichment.threat.MAX_TIME_RETAIN_MINUTES"))
+					.withMaxCacheSize(
+							config.getInt("bolt.enrichment.threat.MAX_CACHE_SIZE_OBJECTS_NUM"))
+					.withMetricConfiguration(config);
+
+			builder.setBolt(name, threat_enrichment,
+					config.getInt("bolt.enrichment.threat.parallelism.hint"))
+					.fieldsGrouping(messageUpstreamComponent, "message",
+							new Fields("key"))
+					.setNumTasks(config.getInt("bolt.enrichment.threat.num.tasks"));
+
+		} catch (Exception e) {
+			e.printStackTrace();
+			System.exit(0);
+		}
+
+		return true;
+	}
 
 	private boolean initializeCIFEnrichment(String topology_name, String name) {
 		try {
@@ -831,11 +945,15 @@ public abstract class TopologyRunner {
 
 			List<String> cif_keys = new ArrayList<String>();
 
-			cif_keys.add(config.getString("source.ip"));
-			cif_keys.add(config.getString("dest.ip"));
-			cif_keys.add(config.getString("bolt.enrichment.cif.host"));
-			cif_keys.add(config.getString("bolt.enrichment.cif.email"));
-
+			String[] ipFields = config.getStringArray("bolt.enrichment.cif.fields.ip");
+			cif_keys.addAll(Arrays.asList(ipFields));
+			
+			String[] hostFields = config.getStringArray("bolt.enrichment.cif.fields.host");
+			cif_keys.addAll(Arrays.asList(hostFields));
+			
+			String[] emailFields = config.getStringArray("bolt.enrichment.cif.fields.email");
+			cif_keys.addAll(Arrays.asList(emailFields));
+			
 			GenericEnrichmentBolt cif_enrichment = new GenericEnrichmentBolt()
 					.withEnrichmentTag(
 							config.getString("bolt.enrichment.cif.enrichment_tag"))
@@ -845,12 +963,11 @@ public abstract class TopologyRunner {
 									.getString("kafka.zk.port"), config
 									.getString("bolt.enrichment.cif.tablename")))
 					.withOutputFieldName(topology_name)
-					.withEnrichmentTag("CIF_Enrichment")
 					.withKeys(cif_keys)
 					.withMaxTimeRetain(
-							config.getInt("bolt.enrichment.cif.MAX_TIME_RETAIN"))
+							config.getInt("bolt.enrichment.cif.MAX_TIME_RETAIN_MINUTES"))
 					.withMaxCacheSize(
-							config.getInt("bolt.enrichment.cif.MAX_CACHE_SIZE"))
+							config.getInt("bolt.enrichment.cif.MAX_CACHE_SIZE_OBJECTS_NUM"))
 					.withMetricConfiguration(config);
 
 			builder.setBolt(name, cif_enrichment,

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/asa/features_enabled.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/asa/features_enabled.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/asa/features_enabled.conf
new file mode 100644
index 0000000..5b45dde
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/asa/features_enabled.conf
@@ -0,0 +1,113 @@
+#Enable and disable features for each topology
+
+#Feature: Test spout 
+##Feature Description: Reads telemetry from file and ingests it into topology.  Used for testing or bulk loading the topology
+
+spout.test.name=TestSpout
+spout.test.enabled=true
+spout.test.num.tasks=1
+spout.test.parallelism.hint=1
+
+#Feature: Kafka spout
+##Feature Description: Acts as a Kafka consumer.  Takes messages from a Kafka topic and ingests them into a topology
+
+spout.kafka.name=KafkaSpout
+spout.kafka.enabled=false
+spout.kafka.num.tasks=1
+spout.kafka.parallelism.hint=1
+
+#Feature: Parser Bolt
+##Feature Description: Parses telemetry from its native format into a native JSON
+
+bolt.parser.name=ParserBolt
+bolt.parser.enabled=true
+bolt.parser.num.tasks=1
+bolt.parser.parallelism.hint=1
+
+#Feature: Host Enrichment
+##Feature Description: Appends information about known hosts to a telemetry message
+
+bolt.enrichment.host.name=HostEnrichment
+bolt.enrichment.host.enabled=true
+bolt.enrichment.host.num.tasks=1
+bolt.enrichment.host.parallelism.hint=1
+
+#Feature: Geo Enrichment
+##Feature Description: Appends geo information about known non-local IPs to a telemetry message
+
+bolt.enrichment.geo.name=GeoEnrichment 
+bolt.enrichment.geo.enabled=true
+bolt.enrichment.geo.num.tasks=1
+bolt.enrichment.geo.parallelism.hint=1
+
+#Feature: Whois Enrichment
+##Feature Description: Appends whois information about known domains to a telemetry message
+
+bolt.enrichment.whois.name=WhoisEnrichment
+bolt.enrichment.whois.enabled=false
+bolt.enrichment.whois.num.tasks=1
+bolt.enrichment.whois.parallelism.hint=1
+
+#Feature: CIF Enrichment
+##Feature Description: Appends information from CIF threat intelligence feeds to a telemetry message
+
+bolt.enrichment.cif.name=SIFBolt
+bolt.enrichment.cif.enabled=false
+bolt.enrichment.cif.num.tasks=1
+bolt.enrichment.cif.parallelism.hint=1
+
+#Feature: Threat Enrichment
+##Feature Description: Appends information from Threat intelligence feeds to a telemetry message
+
+bolt.enrichment.threat.name=ThreatBolt
+bolt.enrichment.threat.enabled=false
+bolt.enrichment.threat.num.tasks=1
+bolt.enrichment.threat.parallelism.hint=1
+
+#Feature: Rules-Based Alerts
+##Feature Description: Tags messages with rules-based alerts
+
+bolt.alerts.name=Alerts
+bolt.alerts.enabled=true
+bolt.alerts.num.tasks=1
+bolt.alerts.parallelism.hint=1
+
+#Feature: Indexer
+##Feature Description: Indexes telemetry messages in ElasticSearch or Solr
+
+bolt.indexing.name=IndexBolt
+bolt.indexing.enabled=true
+bolt.indexing.num.tasks=1
+bolt.indexing.parallelism.hint=1
+
+#Feature: Alerts Indexer
+##Feature Description: Indexes alert messages in ElasticSearch or Solr
+
+bolt.alerts.indexing.name=AlertIndexBolt
+bolt.alerts.indexing.enabled=true
+bolt.alerts.indexing.num.tasks=1
+bolt.alerts.indexing.parallelism.hint=1
+
+#Feature: Error Indexer
+##Feature Description: Indexes error messages in ElasticSearch or Solr
+
+bolt.error.indexing.name=ErrorIndexBolt
+bolt.error.indexing.enabled=true
+bolt.error.indexing.num.tasks=1
+bolt.error.indexing.parallelism.hint=1
+
+#Feature: Kafka Bolt
+##Feature Description: Writes telemetry messages back into a Kafka topic
+
+bolt.kafka.name=KafkaBolt
+bolt.kafka.enabled=false
+bolt.kafka.num.tasks=1
+bolt.kafka.parallelism.hint=1
+
+#Feature: HDFS Bolt
+##Feature Description: Writes telemetry messages into HDFS
+
+bolt.hdfs.name=HDFSBolt
+bolt.hdfs.enabled=false
+bolt.hdfs.num.tasks=1
+bolt.hdfs.parallelism.hint=1
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/asa/metrics.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/asa/metrics.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/asa/metrics.conf
new file mode 100644
index 0000000..1daef3d
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/asa/metrics.conf
@@ -0,0 +1,26 @@
+#reporters
+com.opensoc.metrics.reporter.graphite=true
+com.opensoc.metrics.reporter.console=false
+com.opensoc.metrics.reporter.jmx=false
+
+#Graphite Addresses
+
+com.opensoc.metrics.graphite.address=localhost
+com.opensoc.metrics.graphite.port=2023
+
+#TelemetryParserBolt
+com.opensoc.metrics.TelemetryParserBolt.acks=true
+com.opensoc.metrics.TelemetryParserBolt.emits=true
+com.opensoc.metrics.TelemetryParserBolt.fails=true
+
+
+#GenericEnrichmentBolt
+com.opensoc.metrics.GenericEnrichmentBolt.acks=true
+com.opensoc.metrics.GenericEnrichmentBolt.emits=true
+com.opensoc.metrics.GenericEnrichmentBolt.fails=true
+
+
+#TelemetryIndexingBolt
+com.opensoc.metrics.TelemetryIndexingBolt.acks=true
+com.opensoc.metrics.TelemetryIndexingBolt.emits=true
+com.opensoc.metrics.TelemetryIndexingBolt.fails=true

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/asa/topology.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/asa/topology.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/asa/topology.conf
new file mode 100644
index 0000000..1720632
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/asa/topology.conf
@@ -0,0 +1,110 @@
+include = ../../etc/env/environment_common.conf
+include = ../../etc/env/es_connection.conf
+include = ../../etc/env/hdfs_connection.conf
+include = ../../etc/env/mysql_connection.conf
+include = metrics.conf
+include = features_enabled.conf
+
+#Global Properties
+
+debug.mode=true
+local.mode=true
+num.workers=1
+
+#Standard 5-tuple fields
+
+source.ip=ip_src_addr
+source.port=ip_src_port
+dest.ip=ip_dst_addr
+dest.port=ip_dst_port
+protocol=protocol
+
+#Test Spout
+spout.test.parallelism.repeat=false
+
+#Kafka Spout
+spout.kafka.topic=asa_raw
+
+#Parser Bolt
+bolt.parser.adapter=com.opensoc.parsing.parsers.GrokAsaParser
+
+#Host Enrichment
+
+bolt.enrichment.host.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.host.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.host.enrichment_tag=host
+
+
+#GeoEnrichment
+
+bolt.enrichment.geo.enrichment_tag=geo
+bolt.enrichment.geo.adapter.table=GEO
+bolt.enrichment.geo.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.geo.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.geo.fields=ip_src_addr,ip_dst_addr
+
+#WhoisEnrichment
+
+bolt.enrichment.whois.hbase.table.name=whois
+bolt.enrichment.whois.enrichment_tag=whois
+bolt.enrichment.whois.fields=host
+bolt.enrichment.whois.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.whois.MAX_TIME_RETAIN_MINUTES=10
+
+#CIF Enrichment
+bolt.enrichment.cif.tablename=cif_table
+bolt.enrichment.cif.fields.host=host
+bolt.enrichment.cif.fields.email=email
+bolt.enrichment.cif.fields.ip=ip_src_addr,ip_dst_addr
+bolt.enrichment.cif.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.cif.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.cif.enrichment_tag=cif
+
+#Threat Enrichment
+bolt.enrichment.threat.tablename=threat_table
+bolt.enrichment.threat.fields=host,ip_src_addr,ip_dst_addr
+bolt.enrichment.threat.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.threat.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.threat.enrichment_tag=threat
+
+#Indexing Bolt
+bolt.indexing.indexname=asa_index
+bolt.indexing.timestamp=yyyy.MM.ww
+bolt.indexing.documentname=asa_doc
+bolt.indexing.bulk=1
+bolt.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
+
+#Alerts Indexing Bolt
+bolt.alerts.indexing.indexname=alert
+bolt.alerts.indexing.timestamp.yyyy.MM.ww
+bolt.alerts.indexing.documentname=asa_alert
+bolt.alerts.indexing.bulk=1
+bolt.alerts.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
+
+#Error Indexing Bolt
+bolt.error.indexing.indexname=error
+bolt.error.indexing.timestamp=yyyy.MM
+bolt.error.indexing.documentname=asa_error
+bolt.error.indexing.bulk=1
+bolt.error.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
+
+#Alerts Bolt
+bolt.alerts.adapter=com.opensoc.alerts.adapters.CIFAlertsAdapter
+com.opensoc.alerts.adapters.CIFAlertsAdapter.whitelist_table_name = ip_whitelist
+com.opensoc.alerts.adapters.CIFAlertsAdapter.blacklist_table_name = ip_blacklist
+com.opensoc.alerts.adapters.CIFAlertsAdapter.quorum=zkpr1,zkpr2,zkpr3
+com.opensoc.alerts.adapters.CIFAlertsAdapter.port=2181
+com.opensoc.alerts.adapters.CIFAlertsAdapter._MAX_CACHE_SIZE_OBJECTS_NUM=3600
+com.opensoc.alerts.adapters.CIFAlertsAdapter._MAX_TIME_RETAIN_MINUTES=1000
+
+#HDFS Bolt
+bolt.hdfs.batch.size=5000
+bolt.hdfs.field.delimiter=|
+bolt.hdfs.file.rotation.size.in.mb=5
+bolt.hdfs.file.system.url=hdfs://nn1:8020
+bolt.hdfs.wip.file.path=/asa/wip
+bolt.hdfs.finished.file.path=/asa/rotated
+bolt.hdfs.compression.codec.class=org.apache.hadoop.io.compress.SnappyCodec
+
+#Kafka Bolt
+bolt.kafka.topic=asa_enriched
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/asa/topology_identifier.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/asa/topology_identifier.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/asa/topology_identifier.conf
new file mode 100644
index 0000000..68d3463
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/asa/topology_identifier.conf
@@ -0,0 +1,4 @@
+#Each topology must have a unique identifier.  This setting is required
+
+topology.id=asa
+instance.id=A001
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/bro/features_enabled.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/bro/features_enabled.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/bro/features_enabled.conf
index ef677f3..5b45dde 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/bro/features_enabled.conf
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/bro/features_enabled.conf
@@ -19,7 +19,7 @@ spout.kafka.parallelism.hint=1
 #Feature: Parser Bolt
 ##Feature Description: Parses telemetry from its native format into a native JSON
 
-parser.bolt.name=ParserBolt
+bolt.parser.name=ParserBolt
 bolt.parser.enabled=true
 bolt.parser.num.tasks=1
 bolt.parser.parallelism.hint=1
@@ -56,6 +56,14 @@ bolt.enrichment.cif.enabled=false
 bolt.enrichment.cif.num.tasks=1
 bolt.enrichment.cif.parallelism.hint=1
 
+#Feature: Threat Enrichment
+##Feature Description: Appends information from Threat intelligence feeds to a telemetry message
+
+bolt.enrichment.threat.name=ThreatBolt
+bolt.enrichment.threat.enabled=false
+bolt.enrichment.threat.num.tasks=1
+bolt.enrichment.threat.parallelism.hint=1
+
 #Feature: Rules-Based Alerts
 ##Feature Description: Tags messages with rules-based alerts
 
@@ -92,7 +100,7 @@ bolt.error.indexing.parallelism.hint=1
 ##Feature Description: Writes telemetry messages back into a Kafka topic
 
 bolt.kafka.name=KafkaBolt
-bolt.kafka.enabled=true
+bolt.kafka.enabled=false
 bolt.kafka.num.tasks=1
 bolt.kafka.parallelism.hint=1
 
@@ -100,6 +108,6 @@ bolt.kafka.parallelism.hint=1
 ##Feature Description: Writes telemetry messages into HDFS
 
 bolt.hdfs.name=HDFSBolt
-bolt.hdfs.enabled=true
+bolt.hdfs.enabled=false
 bolt.hdfs.num.tasks=1
 bolt.hdfs.parallelism.hint=1
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/bro/topology.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/bro/topology.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/bro/topology.conf
index 6012056..0012aea 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/bro/topology.conf
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/bro/topology.conf
@@ -26,13 +26,14 @@ spout.test.parallelism.repeat=false
 spout.kafka.topic=bro_raw
 
 #Parsing Bolt
+bolt.parser.adapter=com.opensoc.parsing.parsers.BasicBroParser
 source.include.protocols=snmp,http,ftp,ssh,ssl,dns,socks,dnp3,smtp,dhcp,modbus,radius,irc
 source.exclude.protocols=x509,files,app_stats
 
 #Host Enrichment
 
-bolt.enrichment.host.MAX_CACHE_SIZE=10000
-bolt.enrichment.host.MAX_TIME_RETAIN=10
+bolt.enrichment.host.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.host.MAX_TIME_RETAIN_MINUTES=10
 bolt.enrichment.host.enrichment_tag=host
 
 
@@ -40,40 +41,88 @@ bolt.enrichment.host.enrichment_tag=host
 
 bolt.enrichment.geo.enrichment_tag=geo
 bolt.enrichment.geo.adapter.table=GEO
-bolt.enrichment.geo.MAX_CACHE_SIZE=10000
-bolt.enrichment.geo.MAX_TIME_RETAIN=10
+bolt.enrichment.geo.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.geo.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.geo.fields=ip_src_addr,ip_dst_addr
 
 #WhoisEnrichment
 
 bolt.enrichment.whois.hbase.table.name=whois
 bolt.enrichment.whois.enrichment_tag=whois
-bolt.enrichment.whois.source=tld
-bolt.enrichment.whois.MAX_CACHE_SIZE=10000
-bolt.enrichment.whois.MAX_TIME_RETAIN=10
+bolt.enrichment.whois.fields=host,query
+bolt.enrichment.whois.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.whois.MAX_TIME_RETAIN_MINUTES=10
 
 #CIF Enrichment
 bolt.enrichment.cif.tablename=cif_table
-bolt.enrichment.cif.host=tld
-bolt.enrichment.cif.email=email
-bolt.enrichment.cif.MAX_CACHE_SIZE=10000
-bolt.enrichment.cif.MAX_TIME_RETAIN=10
+bolt.enrichment.cif.fields.host=host,query
+bolt.enrichment.cif.fields.email=email
+bolt.enrichment.cif.fields.ip=ip_src_addr,ip_dst_addr
+bolt.enrichment.cif.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.cif.MAX_TIME_RETAIN_MINUTES=10
 bolt.enrichment.cif.enrichment_tag=cif
 
+#Threat Enrichment
+bolt.enrichment.threat.tablename=threat_table
+bolt.enrichment.threat.fields=host,query,ip_src_addr,ip_dst_addr
+bolt.enrichment.threat.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.threat.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.threat.enrichment_tag=threat
 
 #Indexing Bolt
 bolt.indexing.indexname=bro_index
+bolt.indexing.timestamp=yyyy.MM.ww
 bolt.indexing.documentname=bro_doc
 bolt.indexing.bulk=200
+bolt.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
 
 #Alerts Indexing Bolt
 bolt.alerts.indexing.indexname=alert
 bolt.alerts.indexing.documentname=bro_alert
+bolt.alerts.indexing.timestamp=yyyy.MM.ww
 bolt.alerts.indexing.bulk=1
+bolt.alerts.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
 
 #Error Indexing Bolt
 bolt.error.indexing.indexname=error
+bolt.error.indexing.timestamp=yyyy.MM
 bolt.error.indexing.documentname=bro_error
 bolt.error.indexing.bulk=1
+bolt.error.indexing.adapter=com.opensoc.indexing.adapters.ESTimedRotatingAdapter
+
+#Alerts Bolt
+
+
+bolt.alerts.adapter=com.opensoc.alerts.adapters.ThreatAlertsAdapter
+com.opensoc.alerts.adapters.ThreatAlertsAdapter.enrichment_tag=Threat_Enrichment
+com.opensoc.alerts.adapters.ThreatAlertsAdapter.whitelist_table_name = ip_whitelist
+com.opensoc.alerts.adapters.ThreatAlertsAdapter.blacklist_table_name = ip_blacklist
+com.opensoc.alerts.adapters.ThreatAlertsAdapter.quorum=zkpr1,zkpr2,zkpr3
+com.opensoc.alerts.adapters.ThreatAlertsAdapter.port=2181
+com.opensoc.alerts.adapters.ThreatAlertsAdapter._MAX_CACHE_SIZE_OBJECTS_NUM=3600
+com.opensoc.alerts.adapters.ThreatAlertsAdapter._MAX_TIME_RETAIN_MINUTES=1000
+
+
+#bolt.alerts.adapter=com.opensoc.alerts.adapters.CIFAlertsAdapter
+#com.opensoc.alerts.adapters.CIFAlertsAdapter.enrichment_tag=CIF_Enrichment
+#com.opensoc.alerts.adapters.CIFAlertsAdapter.whitelist_table_name = ip_whitelist
+#com.opensoc.alerts.adapters.CIFAlertsAdapter.blacklist_table_name = ip_blacklist
+#com.opensoc.alerts.adapters.CIFAlertsAdapter.quorum=zkpr1,zkpr2,zkpr3
+#com.opensoc.alerts.adapters.CIFAlertsAdapter.port=2181
+#com.opensoc.alerts.adapters.CIFAlertsAdapter._MAX_CACHE_SIZE_OBJECTS_NUM=3600
+#com.opensoc.alerts.adapters.CIFAlertsAdapter._MAX_TIME_RETAIN_MINUTES=1000
+
+#bolt.alerts.adapter=com.opensoc.alerts.adapters.HbaseWhiteAndBlacklistAdapter
+#com.opensoc.alerts.adapters.HbaseWhiteAndBlacklistAdapter.whitelist_table_name = ip_whitelist
+#com.opensoc.alerts.adapters.HbaseWhiteAndBlacklistAdapter.blacklist_table_name = ip_blacklist
+#com.opensoc.alerts.adapters.HbaseWhiteAndBlacklistAdapter.quorum=zkpr1,zkpr2,zkpr3
+#com.opensoc.alerts.adapters.HbaseWhiteAndBlacklistAdapter.port=2181
+#com.opensoc.alerts.adapters.HbaseWhiteAndBlacklistAdapter._MAX_CACHE_SIZE_OBJECTS_NUM=3600
+#com.opensoc.alerts.adapters.HbaseWhiteAndBlacklistAdapter._MAX_TIME_RETAIN_MINUTES=1000
+
+
+
+
 
 #HDFS Bolt
 bolt.hdfs.batch.size=5000

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/fireeye/features_enabled.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/fireeye/features_enabled.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/fireeye/features_enabled.conf
new file mode 100644
index 0000000..5b45dde
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/fireeye/features_enabled.conf
@@ -0,0 +1,113 @@
+#Enable and disable features for each topology
+
+#Feature: Test spout 
+##Feature Description: Reads telemetry from file and ingests it into topology.  Used for testing or bulk loading the topology
+
+spout.test.name=TestSpout
+spout.test.enabled=true
+spout.test.num.tasks=1
+spout.test.parallelism.hint=1
+
+#Feature: Kafka spout
+##Feature Description: Acts as a Kafka consumer.  Takes messages from a Kafka topic and ingests them into a topology
+
+spout.kafka.name=KafkaSpout
+spout.kafka.enabled=false
+spout.kafka.num.tasks=1
+spout.kafka.parallelism.hint=1
+
+#Feature: Parser Bolt
+##Feature Description: Parses telemetry from its native format into a native JSON
+
+bolt.parser.name=ParserBolt
+bolt.parser.enabled=true
+bolt.parser.num.tasks=1
+bolt.parser.parallelism.hint=1
+
+#Feature: Host Enrichment
+##Feature Description: Appends information about known hosts to a telemetry message
+
+bolt.enrichment.host.name=HostEnrichment
+bolt.enrichment.host.enabled=true
+bolt.enrichment.host.num.tasks=1
+bolt.enrichment.host.parallelism.hint=1
+
+#Feature: Geo Enrichment
+##Feature Description: Appends geo information about known non-local IPs to a telemetry message
+
+bolt.enrichment.geo.name=GeoEnrichment 
+bolt.enrichment.geo.enabled=true
+bolt.enrichment.geo.num.tasks=1
+bolt.enrichment.geo.parallelism.hint=1
+
+#Feature: Whois Enrichment
+##Feature Description: Appends whois information about known domains to a telemetry message
+
+bolt.enrichment.whois.name=WhoisEnrichment
+bolt.enrichment.whois.enabled=false
+bolt.enrichment.whois.num.tasks=1
+bolt.enrichment.whois.parallelism.hint=1
+
+#Feature: CIF Enrichment
+##Feature Description: Appends information from CIF threat intelligence feeds to a telemetry message
+
+bolt.enrichment.cif.name=SIFBolt
+bolt.enrichment.cif.enabled=false
+bolt.enrichment.cif.num.tasks=1
+bolt.enrichment.cif.parallelism.hint=1
+
+#Feature: Threat Enrichment
+##Feature Description: Appends information from Threat intelligence feeds to a telemetry message
+
+bolt.enrichment.threat.name=ThreatBolt
+bolt.enrichment.threat.enabled=false
+bolt.enrichment.threat.num.tasks=1
+bolt.enrichment.threat.parallelism.hint=1
+
+#Feature: Rules-Based Alerts
+##Feature Description: Tags messages with rules-based alerts
+
+bolt.alerts.name=Alerts
+bolt.alerts.enabled=true
+bolt.alerts.num.tasks=1
+bolt.alerts.parallelism.hint=1
+
+#Feature: Indexer
+##Feature Description: Indexes telemetry messages in ElasticSearch or Solr
+
+bolt.indexing.name=IndexBolt
+bolt.indexing.enabled=true
+bolt.indexing.num.tasks=1
+bolt.indexing.parallelism.hint=1
+
+#Feature: Alerts Indexer
+##Feature Description: Indexes alert messages in ElasticSearch or Solr
+
+bolt.alerts.indexing.name=AlertIndexBolt
+bolt.alerts.indexing.enabled=true
+bolt.alerts.indexing.num.tasks=1
+bolt.alerts.indexing.parallelism.hint=1
+
+#Feature: Error Indexer
+##Feature Description: Indexes error messages in ElasticSearch or Solr
+
+bolt.error.indexing.name=ErrorIndexBolt
+bolt.error.indexing.enabled=true
+bolt.error.indexing.num.tasks=1
+bolt.error.indexing.parallelism.hint=1
+
+#Feature: Kafka Bolt
+##Feature Description: Writes telemetry messages back into a Kafka topic
+
+bolt.kafka.name=KafkaBolt
+bolt.kafka.enabled=false
+bolt.kafka.num.tasks=1
+bolt.kafka.parallelism.hint=1
+
+#Feature: HDFS Bolt
+##Feature Description: Writes telemetry messages into HDFS
+
+bolt.hdfs.name=HDFSBolt
+bolt.hdfs.enabled=false
+bolt.hdfs.num.tasks=1
+bolt.hdfs.parallelism.hint=1
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/fireeye/metrics.conf
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/fireeye/metrics.conf b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/fireeye/metrics.conf
new file mode 100644
index 0000000..1daef3d
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/OpenSOC_Configs/topologies/fireeye/metrics.conf
@@ -0,0 +1,26 @@
+#reporters
+com.opensoc.metrics.reporter.graphite=true
+com.opensoc.metrics.reporter.console=false
+com.opensoc.metrics.reporter.jmx=false
+
+#Graphite Addresses
+
+com.opensoc.metrics.graphite.address=localhost
+com.opensoc.metrics.graphite.port=2023
+
+#TelemetryParserBolt
+com.opensoc.metrics.TelemetryParserBolt.acks=true
+com.opensoc.metrics.TelemetryParserBolt.emits=true
+com.opensoc.metrics.TelemetryParserBolt.fails=true
+
+
+#GenericEnrichmentBolt
+com.opensoc.metrics.GenericEnrichmentBolt.acks=true
+com.opensoc.metrics.GenericEnrichmentBolt.emits=true
+com.opensoc.metrics.GenericEnrichmentBolt.fails=true
+
+
+#TelemetryIndexingBolt
+com.opensoc.metrics.TelemetryIndexingBolt.acks=true
+com.opensoc.metrics.TelemetryIndexingBolt.emits=true
+com.opensoc.metrics.TelemetryIndexingBolt.fails=true



[15/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/IseSample.log
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/IseSample.log b/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/IseSample.log
deleted file mode 100644
index 809501b..0000000
--- a/opensoc-streaming/OpenSOC-MessageParsers/src/test/resources/IseSample.log
+++ /dev/null
@@ -1,308 +0,0 @@
-Aug  6 17:26:31 10.34.84.145 Aug  7 00:45:43 stage-pdp01 CISE_Profiler 0000024855 1 0 2014-08-07 00:45:43.741 -07:00 0000288542 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=113, EndpointCertainityMetric=10, EndpointIPAddress=10.56.111.14, EndpointMacAddress=3C:97:0E:C3:F8:F1, EndpointMatchedPolicy=Nortel-Device, EndpointNADAddress=10.56.72.127, EndpointOUI=Wistron InfoComm(Kunshan)Co.\,Ltd., EndpointPolicy=Nortel-Device, EndpointProperty=StaticAssignment=false\,PostureApplicable=Yes\,PolicyVersion=402\,IdentityGroupID=0c1d9270-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=10\,BYODRegistration=Unknown\,FeedService=false\,EndPointPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1407397543718\,MatchedPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,TimeToProfile=19\,StaticGroupAssignment=false\,NmapSubnetScanID=0\,DeviceRegistrationStatus=NotRegistered\,PortalUser=, EndpointSourceEvent=SNMPQuery Probe, EndpointIdentityGroup=Profile
 d, ProfilerServer=stage-pdp01.cisco.com,
-Aug  6 17:26:31 10.34.84.145 Aug  7 00:45:43 stage-pdp01 CISE_Profiler 0000024856 1 0 2014-08-07 00:45:43.786 -07:00 0000288543 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=113, EndpointCertainityMetric=10, EndpointIPAddress=10.56.111.14, EndpointMacAddress=3C:97:0E:C3:F8:F1, EndpointMatchedPolicy=Nortel-Device, EndpointNADAddress=10.56.72.127, EndpointOUI=Wistron InfoComm(Kunshan)Co.\,Ltd., EndpointPolicy=Nortel-Device, EndpointProperty=StaticAssignment=false\,PostureApplicable=Yes\,BYODRegistration=Unknown\,EndPointPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1407397543718\,TimeToProfile=19\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,UpdateTime=0\,PolicyVersion=402\,IdentityGroupID=0c1d9270-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=10\,FeedService=false\,MatchedPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,NmapScanCount=0\,NmapSubnetScanID=0\,PortalUser=, EndpointSourceE
 vent=SNMPQuery Probe, EndpointIdentityGroup=Profiled, ProfilerServer=stage-pdp01.cisco.com,
-Aug  6 20:00:52 10.42.7.64 Aug  7 03:20:05 npf-sjca-pdp02 CISE_Profiler 0000373185 1 0 2014-08-07 03:20:05.549 -07:00 0011310202 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=90, EndpointIPAddress=10.56.129.142, EndpointMacAddress=3C:A9:F4:46:75:CC, EndpointMatchedPolicy=Windows7-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Windows7-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-46-75-cc\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=EXAMPLE\,BYODRegistration=Unknown\,EndPointPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406112353750\,TimeToProfile=11\,Framed-IP-Address=10.56.129.142\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407394245820\,PolicyVersion=403\,Identi
 tyGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=90\,FeedService=false\,MatchedPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1394526689397\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=Mozilla/5.0 (Windows NT 6.1\\\; WOW64\\ rv:30.0) Gecko/20100101 Firefox/30.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf.example.com,
-Aug  6 21:00:48 10.42.7.64 Aug  7 04:20:00 npf-sjca-pdp02 CISE_Profiler 0000373902 1 0 2014-08-07 04:20:00.983 -07:00 0011322557 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=30, EndpointIPAddress=10.56.129.142, EndpointMacAddress=3C:A9:F4:46:75:CC, EndpointMatchedPolicy=Microsoft-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Microsoft-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-46-75-cc\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=EXAMPLE\,BYODRegistration=Unknown\,EndPointPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406112353750\,TimeToProfile=11\,Framed-IP-Address=10.56.129.142\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407406806572\,PolicyVersion=403\,Iden
 tityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=30\,FeedService=false\,MatchedPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1394526689397\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=MS-WebServices/1.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf.example.com,
-Aug  6 22:22:50 10.42.7.64 Aug  7 05:42:03 npf-sjca-pdp02 CISE_Profiler 0000374846 1 0 2014-08-07 05:42:03.617 -07:00 0011340138 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=10, EndpointMacAddress=68:A8:6D:4E:0D:86, EndpointMatchedPolicy=Apple-Device, EndpointOUI=Apple, EndpointPolicy=Apple-Device, EndpointProperty=StaticAssignment=false\,PostureApplicable=Yes\,host-name=PEXAMPLE\,BYODRegistration=Unknown\,EndPointPolicyID=377d8ba0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1407415322895\,TimeToProfile=717\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,PolicyVersion=403\,IdentityGroupID=abbbcac0-89e6-11e1-bf14-005056aa4dd7\,Total Certainty Factor=10\,ciaddr=0.0.0.0\,FeedService=false\,dhcp-parameter-request-list=1\, 3\, 6\, 15\, 119\, 95\, 252\, 44\, 46\,MatchedPolicyID=377d8ba0-68a6-11e1-bc72-0050568e013c\,NmapSubnetScanID=0\,PortalUser=, EndpointSourceEvent=DHCP Probe, EndpointIdentityGroup
 =Apple-Device, ProfilerServer=npf.example.com,
-Aug  6 23:30:10 10.42.7.64 Aug  7 06:49:23 npf-sjca-pdp02 CISE_Profiler 0000375603 1 0 2014-08-07 06:49:23.920 -07:00 0011353768 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=90, EndpointIPAddress=10.56.129.142, EndpointMacAddress=3C:A9:F4:46:75:CC, EndpointMatchedPolicy=Windows7-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Windows7-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-46-75-cc\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=EXAMPLE\,BYODRegistration=Unknown\,EndPointPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406112353750\,TimeToProfile=11\,Framed-IP-Address=10.56.129.142\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407410402099\,PolicyVersion=403\,Identi
 tyGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=90\,FeedService=false\,MatchedPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1394526689397\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=Mozilla/5.0 (Windows NT 6.1\\\; WOW64\\ rv:30.0) Gecko/20100101 Firefox/30.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf.example.com,
-Aug  6 23:30:48 10.42.7.64 Aug  7 06:50:01 npf-sjca-pdp02 CISE_Profiler 0000375611 1 0 2014-08-07 06:50:01.377 -07:00 0011353875 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=50, EndpointIPAddress=10.34.92.103, EndpointMacAddress=3C:A9:F4:29:FC:3C, EndpointMatchedPolicy=Microsoft-Workstation, EndpointNADAddress=10.34.76.212, EndpointOUI=Intel Corporate, EndpointPolicy=Microsoft-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-29-fc-3c\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=EXAMPLE\,BYODRegistration=Unknown\,EndPointPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406109860322\,L4_DST_PORT=50428\,TimeToProfile=7\,Framed-IP-Address=10.34.92.103\,LastNmapScanTime=1380758278898\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=140668603
 4558\,PolicyVersion=403\,IdentityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=50\,operating-system=Microsoft Windows Vista SP0 - SP2\, Server 2008\, or Windows 7 Ultimate\,FeedService=false\,MatchedPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1373657280926\,NmapScanCount=3\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=MS-WebServices/1.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf.example.com,
-Aug  6 23:32:52 10.42.7.64 Aug  7 06:52:05 npf-sjca-pdp02 CISE_Profiler 0000375636 1 0 2014-08-07 06:52:05.272 -07:00 0011354313 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=30, EndpointIPAddress=10.56.129.143, EndpointMacAddress=E8:2A:EA:23:5E:3D, EndpointMatchedPolicy=Microsoft-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Microsoft-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=e8-2a-ea-23-5e-3d\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=ANOY-WS01\,BYODRegistration=Unknown\,EndPointPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406114784910\,TimeToProfile=7\,Framed-IP-Address=10.56.129.143\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407395211208\,PolicyVersion=403\,Ide
 ntityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=30\,FeedService=false\,MatchedPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1405408515121\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=MS-WebServices/1.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf.example.com,
-Aug  6 16:40:52 10.42.7.64 Aug  7 00:00:04 npf-sjca-pdp02 CISE_Failed_Attempts 0000370855 1 0 2014-08-07 00:00:04.527 -07:00 0011266584 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270932, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056EF53E323F4, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:40:57 10.42.7.63 Aug  7 00:00:09 npf-sjca-pdp01 CISE_Failed_Attempts 0001969834 1 0 2014-08-07 00:00:09.568 -07:00 0098648519 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2084839, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4A53E323F9, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:41:24 10.34.84.145 Aug  7 00:00:36 stage-pdp01 CISE_Failed_Attempts 0000024616 1 0 2014-08-07 00:00:36.332 -07:00 0000287007 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19317, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:41:26 10.34.84.145 Aug  7 00:00:38 stage-pdp01 CISE_Failed_Attempts 0000024617 1 0 2014-08-07 00:00:38.336 -07:00 0000287011 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19318, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:41:28 10.34.84.145 Aug  7 00:00:40 stage-pdp01 CISE_Failed_Attempts 0000024618 1 0 2014-08-07 00:00:40.336 -07:00 0000287015 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19319, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:41:30 10.34.84.145 Aug  7 00:00:42 stage-pdp01 CISE_Failed_Attempts 0000024619 1 0 2014-08-07 00:00:42.340 -07:00 0000287019 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19320, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:41:32 10.34.84.145 Aug  7 00:00:44 stage-pdp01 CISE_Failed_Attempts 0000024620 1 0 2014-08-07 00:00:44.340 -07:00 0000287023 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19321, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:41:34 10.34.84.145 Aug  7 00:00:46 stage-pdp01 CISE_Failed_Attempts 0000024621 1 0 2014-08-07 00:00:46.344 -07:00 0000287027 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19322, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:42:02 10.42.7.64 Aug  7 00:01:14 npf-sjca-pdp02 CISE_Failed_Attempts 0000370865 1 0 2014-08-07 00:01:14.610 -07:00 0011266810 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=7, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270940, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F053E3243A, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:42:07 10.42.7.63 Aug  7 00:01:19 npf-sjca-pdp01 CISE_Failed_Attempts 0001969923 1 0 2014-08-07 00:01:19.665 -07:00 0098652715 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2084986, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4B53E3243F, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:42:12 10.42.7.64 Aug  7 00:01:24 npf-sjca-pdp02 CISE_Failed_Attempts 0000370867 1 0 2014-08-07 00:01:24.701 -07:00 0011266815 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270941, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F153E32444, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:42:17 10.42.7.63 Aug  7 00:01:29 npf-sjca-pdp01 CISE_Failed_Attempts 0001969935 1 0 2014-08-07 00:01:29.746 -07:00 0098653362 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085007, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4C53E32449, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:43:22 10.42.7.64 Aug  7 00:02:34 npf-sjca-pdp02 CISE_Failed_Attempts 0000370885 1 0 2014-08-07 00:02:34.792 -07:00 0011267367 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=4, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270956, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F353E3248A, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:43:27 10.42.7.63 Aug  7 00:02:39 npf-sjca-pdp01 CISE_Failed_Attempts 0001970043 1 0 2014-08-07 00:02:39.808 -07:00 0098657578 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085161, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4D53E3248F, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:43:56 10.42.7.64 Aug  7 00:03:08 npf-sjca-pdp02 CISE_Failed_Attempts 0000370897 1 0 2014-08-07 00:03:08.902 -07:00 0011267657 5435 NOTICE RADIUS: NAS conducted several failed authentications of the same scenario, ConfigVersionId=240, Device IP Address=10.56.129.4, Device Port=32770, DestinationIPAddress=10.42.7.64, DestinationPort=1813, RadiusPacketType=AccountingRequest, UserName=yshchory, Protocol=Radius, RequestLatency=49, NetworkDeviceName=NTN-WLC1, User-Name=yshchory, NAS-IP-Address=10.56.129.4, NAS-Port=1, Framed-IP-Address=10.56.129.141, Class=CACS:0a388104000045cd53e2be75:npf-sjca-pdp02/195481465/270958, Called-Station-ID=6c-41-6a-5f-6e-c0, Calling-Station-ID=90-18-7c-7b-59-01, NAS-Identifier=ntn01-11a-wlc1, Acct-Status-Type=Interim-Update, Acct-Delay-Time=0, Acct-Input-Octets=2359603, Acct-Output-Octets=26928466, Acct-Session-Id=53e2be78/90:18:7c:7b:59:01/13844, Acct-Authentic=RADIUS, Acct-Session-Time=1466, Acct-Input-Packets=14866, Acct-Output-Packets=23043, und
 efined-52=
-Aug  6 16:44:01 10.42.7.63 Aug  7 00:03:13 npf-sjca-pdp01 CISE_Failed_Attempts 0001970072 1 0 2014-08-07 00:03:13.112 -07:00 0098658804 5435 NOTICE RADIUS: NAS conducted several failed authentications of the same scenario, ConfigVersionId=133, Device IP Address=10.56.72.127, Device Port=1646, DestinationIPAddress=10.42.7.63, DestinationPort=1813, Protocol=Radius, NetworkDeviceName=ntn01-11a-sw4, User-Name=host/salfi-pc.cisco.com, NAS-IP-Address=10.56.72.127, NAS-Port=50212, Service-Type=Framed, Framed-IP-Address=10.56.111.14, Class=CACS:0A38487F00000397BDA7BCAC:npf-sjca-pdp02/195481465/270957, Called-Station-ID=00-26-99-28-5E-BB, Calling-Station-ID=3C-97-0E-C3-F8-F1, Acct-Status-Type=Interim-Update, Acct-Delay-Time=4, Acct-Input-Octets=225395, Acct-Output-Octets=761436, Acct-Session-Id=00000560, Acct-Authentic=RADIUS, Acct-Session-Time=43, Acct-Input-Packets=1163, Acct-Output-Packets=1080, NAS-Port-Type=Ethernet, NAS-Port-Id=GigabitEthernet2/12, undefined-151=F54C88B0, cisco-av-pair
 =audit-session-id=0A38487F00000397BDA7BCAC, cisco-av-pair=connect-progress=Auth Open, AcsSessionID=npf-sjca-pdp01/195491152/2085221, FailureReason=11038 RADIUS Accounting-Request header contains invalid Authenticator field, Step=11004, Step=11017, Step=11038, Step=5435, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0A38487F00000397BDA7BCAC, TotalFailedAttempts=2, TotalFailedTime=42, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired,
-Aug  6 16:44:32 10.42.7.64 Aug  7 00:03:44 npf-sjca-pdp02 CISE_Failed_Attempts 0000370899 1 0 2014-08-07 00:03:44.851 -07:00 0011267663 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=7, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270963, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F453E324D0, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:44:36 10.34.84.145 Aug  7 00:03:48 stage-pdp01 CISE_Failed_Attempts 0000024632 1 0 2014-08-07 00:03:48.375 -07:00 0000287084 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19329, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:44:37 10.42.7.63 Aug  7 00:03:49 npf-sjca-pdp01 CISE_Failed_Attempts 0001970128 1 0 2014-08-07 00:03:49.893 -07:00 0098661643 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085307, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4E53E324D5, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:44:38 10.34.84.145 Aug  7 00:03:50 stage-pdp01 CISE_Failed_Attempts 0000024633 1 0 2014-08-07 00:03:50.379 -07:00 0000287088 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19330, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:44:40 10.34.84.145 Aug  7 00:03:52 stage-pdp01 CISE_Failed_Attempts 0000024634 1 0 2014-08-07 00:03:52.379 -07:00 0000287092 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19331, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:44:42 10.34.84.145 Aug  7 00:03:54 stage-pdp01 CISE_Failed_Attempts 0000024635 1 0 2014-08-07 00:03:54.387 -07:00 0000287096 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19332, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:44:42 10.42.7.64 Aug  7 00:03:54 npf-sjca-pdp02 CISE_Failed_Attempts 0000370903 1 0 2014-08-07 00:03:54.924 -07:00 0011267670 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=4, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270964, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F553E324DA, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:44:44 10.34.84.145 Aug  7 00:03:56 stage-pdp01 CISE_Failed_Attempts 0000024636 1 0 2014-08-07 00:03:56.386 -07:00 0000287100 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19333, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:44:46 10.34.84.145 Aug  7 00:03:58 stage-pdp01 CISE_Failed_Attempts 0000024637 1 0 2014-08-07 00:03:58.390 -07:00 0000287104 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19334, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:44:47 10.42.7.63 Aug  7 00:03:59 npf-sjca-pdp01 CISE_Failed_Attempts 0001970140 1 0 2014-08-07 00:03:59.951 -07:00 0098662310 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085331, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4F53E324DF, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:44:48 10.42.7.64 Aug  7 00:04:00 npf-sjca-pdp02 CISE_Failed_Attempts 0000370905 1 0 2014-08-07 00:04:00.526 -07:00 0011267674 5435 NOTICE RADIUS: NAS conducted several failed authentications of the same scenario, ConfigVersionId=240, Device IP Address=10.56.72.127, Device Port=1646, DestinationIPAddress=10.42.7.64, DestinationPort=1813, Protocol=Radius, NetworkDeviceName=ntn01-11a-sw4, User-Name=host/salfi-pc.cisco.com, NAS-IP-Address=10.56.72.127, NAS-Port=50212, Service-Type=Framed, Framed-IP-Address=169.254.53.87, Class=CACS:0A38487F00000397BDA7BCAC:npf-sjca-pdp02/195481465/270957, Called-Station-ID=00-26-99-28-5E-BB, Calling-Station-ID=3C-97-0E-C3-F8-F1, Acct-Status-Type=Interim-Update, Acct-Delay-Time=0, Acct-Input-Octets=1458615, Acct-Output-Octets=3836368, Acct-Session-Id=00000560, Acct-Authentic=RADIUS, Acct-Session-Time=95, Acct-Input-Packets=4505, Acct-Output-Packets=5619, NAS-Port-Type=Ethernet, NAS-Port-Id=GigabitEthernet2/12, undefined-151=F54C88B0, cisco-av-p
 air=audit-session-id=0A38487F00000397BDA7BCAC, cisco-av-pair=connect-progress=Auth Open, AcsSessionID=npf-sjca-pdp02/195481465/270965, FailureReason=11038 RADIUS Accounting-Request header contains invalid Authenticator field, Step=11004, Step=11017, Step=11038, Step=5435, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0A38487F00000397BDA7BCAC, TotalFailedAttempts=2, TotalFailedTime=52, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired,
-Aug  6 16:45:52 10.42.7.64 Aug  7 00:05:04 npf-sjca-pdp02 CISE_Failed_Attempts 0000370920 1 0 2014-08-07 00:05:04.969 -07:00 0011267987 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=6, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270977, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F653E32520, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:45:58 10.42.7.63 Aug  7 00:05:09 npf-sjca-pdp01 CISE_Failed_Attempts 0001970212 1 0 2014-08-07 00:05:09.998 -07:00 0098665518 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085460, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5053E32525, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:47:03 10.42.7.64 Aug  7 00:06:15 npf-sjca-pdp02 CISE_Failed_Attempts 0000370931 1 0 2014-08-07 00:06:15.016 -07:00 0011268196 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270985, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F753E32567, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:47:08 10.42.7.63 Aug  7 00:06:20 npf-sjca-pdp01 CISE_Failed_Attempts 0001970324 1 0 2014-08-07 00:06:20.055 -07:00 0098669942 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085599, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5153E3256C, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:47:13 10.42.7.64 Aug  7 00:06:25 npf-sjca-pdp02 CISE_Failed_Attempts 0000370934 1 0 2014-08-07 00:06:25.097 -07:00 0011268209 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270987, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F853E32571, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:47:18 10.42.7.63 Aug  7 00:06:30 npf-sjca-pdp01 CISE_Failed_Attempts 0001970335 1 0 2014-08-07 00:06:30.119 -07:00 0098670037 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085618, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5253E32576, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:47:48 10.34.84.145 Aug  7 00:07:00 stage-pdp01 CISE_Failed_Attempts 0000024649 1 0 2014-08-07 00:07:00.418 -07:00 0000287210 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19342, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:47:50 10.34.84.145 Aug  7 00:07:02 stage-pdp01 CISE_Failed_Attempts 0000024650 1 0 2014-08-07 00:07:02.421 -07:00 0000287214 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19343, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:47:52 10.34.84.145 Aug  7 00:07:04 stage-pdp01 CISE_Failed_Attempts 0000024651 1 0 2014-08-07 00:07:04.425 -07:00 0000287218 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19344, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:47:54 10.34.84.145 Aug  7 00:07:06 stage-pdp01 CISE_Failed_Attempts 0000024652 1 0 2014-08-07 00:07:06.429 -07:00 0000287222 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19345, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:47:56 10.34.84.145 Aug  7 00:07:08 stage-pdp01 CISE_Failed_Attempts 0000024653 1 0 2014-08-07 00:07:08.429 -07:00 0000287226 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19346, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:47:58 10.34.84.145 Aug  7 00:07:10 stage-pdp01 CISE_Failed_Attempts 0000024654 1 0 2014-08-07 00:07:10.433 -07:00 0000287230 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19347, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:48:23 10.42.7.64 Aug  7 00:07:35 npf-sjca-pdp02 CISE_Failed_Attempts 0000370955 1 0 2014-08-07 00:07:35.138 -07:00 0011268472 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271001, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F953E325B7, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:48:28 10.42.7.63 Aug  7 00:07:40 npf-sjca-pdp01 CISE_Failed_Attempts 0001970420 1 0 2014-08-07 00:07:40.178 -07:00 0098673462 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085757, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5353E325BC, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:49:33 10.42.7.64 Aug  7 00:08:45 npf-sjca-pdp02 CISE_Failed_Attempts 0000370984 1 0 2014-08-07 00:08:45.219 -07:00 0011269071 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271016, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FB53E325FD, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:49:38 10.42.7.63 Aug  7 00:08:50 npf-sjca-pdp01 CISE_Failed_Attempts 0001970519 1 0 2014-08-07 00:08:50.259 -07:00 0098677825 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085892, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5453E32602, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:49:43 10.42.7.64 Aug  7 00:08:55 npf-sjca-pdp02 CISE_Failed_Attempts 0000370986 1 0 2014-08-07 00:08:55.298 -07:00 0011269076 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271017, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FC53E32607, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:49:48 10.42.7.63 Aug  7 00:09:00 npf-sjca-pdp01 CISE_Failed_Attempts 0001970524 1 0 2014-08-07 00:09:00.330 -07:00 0098678019 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085909, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5553E3260C, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:50:53 10.42.7.64 Aug  7 00:10:05 npf-sjca-pdp02 CISE_Failed_Attempts 0000370999 1 0 2014-08-07 00:10:05.339 -07:00 0011269371 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271027, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FD53E3264D, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:50:58 10.42.7.63 Aug  7 00:10:10 npf-sjca-pdp01 CISE_Failed_Attempts 0001970625 1 0 2014-08-07 00:10:10.388 -07:00 0098682297 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086061, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5653E32652, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:51:00 10.34.84.145 Aug  7 00:10:12 stage-pdp01 CISE_Failed_Attempts 0000024661 1 0 2014-08-07 00:10:12.492 -07:00 0000287258 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19354, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:51:02 10.34.84.145 Aug  7 00:10:14 stage-pdp01 CISE_Failed_Attempts 0000024662 1 0 2014-08-07 00:10:14.496 -07:00 0000287262 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19355, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:51:04 10.34.84.145 Aug  7 00:10:16 stage-pdp01 CISE_Failed_Attempts 0000024663 1 0 2014-08-07 00:10:16.496 -07:00 0000287266 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19356, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:51:06 10.34.84.145 Aug  7 00:10:18 stage-pdp01 CISE_Failed_Attempts 0000024664 1 0 2014-08-07 00:10:18.500 -07:00 0000287270 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19357, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:51:08 10.34.84.145 Aug  7 00:10:20 stage-pdp01 CISE_Failed_Attempts 0000024665 1 0 2014-08-07 00:10:20.504 -07:00 0000287274 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19358, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:51:10 10.34.84.145 Aug  7 00:10:22 stage-pdp01 CISE_Failed_Attempts 0000024667 1 0 2014-08-07 00:10:22.507 -07:00 0000287279 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19359, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:52:03 10.42.7.64 Aug  7 00:11:15 npf-sjca-pdp02 CISE_Failed_Attempts 0000371005 1 0 2014-08-07 00:11:15.432 -07:00 0011269421 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=4, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271031, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FE53E32693, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:52:08 10.42.7.63 Aug  7 00:11:20 npf-sjca-pdp01 CISE_Failed_Attempts 0001970691 1 0 2014-08-07 00:11:20.468 -07:00 0098685176 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086181, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5753E32698, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:52:13 10.42.7.64 Aug  7 00:11:25 npf-sjca-pdp02 CISE_Failed_Attempts 0000371007 1 0 2014-08-07 00:11:25.515 -07:00 0011269426 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271032, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FF53E3269D, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:52:18 10.42.7.63 Aug  7 00:11:30 npf-sjca-pdp01 CISE_Failed_Attempts 0001970708 1 0 2014-08-07 00:11:30.551 -07:00 0098685669 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=8, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086202, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5853E326A2, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:53:23 10.42.7.64 Aug  7 00:12:35 npf-sjca-pdp02 CISE_Failed_Attempts 0000371016 1 0 2014-08-07 00:12:35.547 -07:00 0011269586 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271040, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570053E326E3, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:53:28 10.42.7.63 Aug  7 00:12:40 npf-sjca-pdp01 CISE_Failed_Attempts 0001970802 1 0 2014-08-07 00:12:40.596 -07:00 0098689883 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086334, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5953E326E8, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:54:12 10.34.84.145 Aug  7 00:13:24 stage-pdp01 CISE_Failed_Attempts 0000024680 1 0 2014-08-07 00:13:24.527 -07:00 0000287388 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19368, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:54:14 10.34.84.145 Aug  7 00:13:26 stage-pdp01 CISE_Failed_Attempts 0000024681 1 0 2014-08-07 00:13:26.531 -07:00 0000287392 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19369, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:54:16 10.34.84.145 Aug  7 00:13:28 stage-pdp01 CISE_Failed_Attempts 0000024682 1 0 2014-08-07 00:13:28.534 -07:00 0000287396 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19370, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:54:18 10.34.84.145 Aug  7 00:13:30 stage-pdp01 CISE_Failed_Attempts 0000024683 1 0 2014-08-07 00:13:30.538 -07:00 0000287400 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19371, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:54:20 10.34.84.145 Aug  7 00:13:32 stage-pdp01 CISE_Failed_Attempts 0000024684 1 0 2014-08-07 00:13:32.538 -07:00 0000287404 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19372, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:54:22 10.34.84.145 Aug  7 00:13:34 stage-pdp01 CISE_Failed_Attempts 0000024685 1 0 2014-08-07 00:13:34.542 -07:00 0000287408 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19373, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:54:33 10.42.7.64 Aug  7 00:13:45 npf-sjca-pdp02 CISE_Failed_Attempts 0000371020 1 0 2014-08-07 00:13:45.628 -07:00 0011269631 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271044, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570153E32729, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:54:38 10.42.7.63 Aug  7 00:13:50 npf-sjca-pdp01 CISE_Failed_Attempts 0001970913 1 0 2014-08-07 00:13:50.668 -07:00 0098695334 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086486, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5A53E3272E, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:54:43 10.42.7.64 Aug  7 00:13:55 npf-sjca-pdp02 CISE_Failed_Attempts 0000371025 1 0 2014-08-07 00:13:55.694 -07:00 0011269740 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=7, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271048, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570253E32733, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:54:48 10.42.7.63 Aug  7 00:14:00 npf-sjca-pdp01 CISE_Failed_Attempts 0001970924 1 0 2014-08-07 00:14:00.705 -07:00 0098695591 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086505, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5B53E32738, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:55:53 10.42.7.64 Aug  7 00:15:05 npf-sjca-pdp02 CISE_Failed_Attempts 0000371036 1 0 2014-08-07 00:15:05.742 -07:00 0011270054 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=6, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271057, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570353E32779, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:55:58 10.42.7.63 Aug  7 00:15:10 npf-sjca-pdp01 CISE_Failed_Attempts 0001970997 1 0 2014-08-07 00:15:10.772 -07:00 0098698954 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086621, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5C53E3277E, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:57:03 10.42.7.64 Aug  7 00:16:15 npf-sjca-pdp02 CISE_Failed_Attempts 0000371051 1 0 2014-08-07 00:16:15.827 -07:00 0011270497 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=6, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271067, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570453E327BF, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:57:08 10.42.7.63 Aug  7 00:16:20 npf-sjca-pdp01 CISE_Failed_Attempts 0001971096 1 0 2014-08-07 00:16:20.857 -07:00 0098703837 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086806, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5D53E327C4, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:57:24 10.34.84.145 Aug  7 00:16:36 stage-pdp01 CISE_Failed_Attempts 0000024697 1 0 2014-08-07 00:16:36.602 -07:00 0000287553 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19384, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:57:26 10.34.84.145 Aug  7 00:16:38 stage-pdp01 CISE_Failed_Attempts 0000024698 1 0 2014-08-07 00:16:38.605 -07:00 0000287557 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19385, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:57:28 10.34.84.145 Aug  7 00:16:40 stage-pdp01 CISE_Failed_Attempts 0000024699 1 0 2014-08-07 00:16:40.609 -07:00 0000287561 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19386, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:57:30 10.34.84.145 Aug  7 00:16:42 stage-pdp01 CISE_Failed_Attempts 0000024700 1 0 2014-08-07 00:16:42.613 -07:00 0000287565 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19387, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:57:32 10.34.84.145 Aug  7 00:16:44 stage-pdp01 CISE_Failed_Attempts 0000024701 1 0 2014-08-07 00:16:44.613 -07:00 0000287569 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19388, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:57:34 10.34.84.145 Aug  7 00:16:46 stage-pdp01 CISE_Failed_Attempts 0000024702 1 0 2014-08-07 00:16:46.617 -07:00 0000287573 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19389, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:58:03 10.42.7.64 Aug  7 00:17:15 npf-sjca-pdp02 CISE_Failed_Attempts 0000371063 1 0 2014-08-07 00:17:15.966 -07:00 0011270832 5435 NOTICE RADIUS: NAS conducted several failed authentications of the same scenario, ConfigVersionId=240, Device IP Address=10.34.76.212, Device Port=32770, DestinationIPAddress=10.42.7.64, DestinationPort=1813, RadiusPacketType=AccountingRequest, UserName=hslai, Protocol=Radius, RequestLatency=25, NetworkDeviceName=sjcm-00a-npf-wlc1, User-Name=hslai, NAS-IP-Address=10.34.76.212, NAS-Port=1, Framed-IP-Address=10.34.94.11, Class=CACS:0a224cd40002fdf953e327f2:npf-sjca-pdp02/195481465/271072, Called-Station-ID=88-43-e1-62-1d-20, Calling-Station-ID=24-a2-e1-3b-4b-cb, NAS-Identifier=sjcm-00a-npf-wlc1, Acct-Status-Type=Interim-Update, Acct-Delay-Time=0, Acct-Input-Octets=5198, Acct-Output-Octets=4093, Acct-Session-Id=53e327f2/24:a2:e1:3b:4b:cb/174403, Acct-Authentic=RADIUS, Acct-Session-Time=9, Acct-Input-Packets=37, Acct-Output-Packets=13, undefined-52
 =
-Aug  6 16:58:13 10.42.7.64 Aug  7 00:17:25 npf-sjca-pdp02 CISE_Failed_Attempts 0000371065 1 0 2014-08-07 00:17:25.902 -07:00 0011270838 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=4, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271076, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570553E32805, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:58:18 10.42.7.63 Aug  7 00:17:30 npf-sjca-pdp01 CISE_Failed_Attempts 0001971204 1 0 2014-08-07 00:17:30.916 -07:00 0098707928 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086981, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5E53E3280A, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:59:23 10.42.7.64 Aug  7 00:18:35 npf-sjca-pdp02 CISE_Failed_Attempts 0000371070 1 0 2014-08-07 00:18:35.942 -07:00 0011271044 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271081, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570653E3284B, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:59:28 10.42.7.64 Aug  7 00:18:40 npf-sjca-pdp02 CISE_Failed_Attempts 0000371072 1 0 2014-08-07 00:18:40.669 -07:00 0011271053 5400 NOTICE Failed-Attempt: Authentication failed, ConfigVersionId=240, Device IP Address=10.56.129.4, Device Port=32770, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=istern, Protocol=Radius, RequestLatency=12, NetworkDeviceName=NTN-WLC1, User-Name=istern, NAS-IP-Address=10.56.129.4, NAS-Port=1, Service-Type=Framed, Framed-MTU=1300, State=37CPMSessionID=0a388104000045de53e2c750\;41SessionID=npf-sjca-pdp02/195481465/271077\;, Called-Station-ID=70-10-5c-f3-2f-80:alpha_example, Calling-Station-ID=f0-27-65-48-8c-8f, NAS-Identifier=ntn01-11a-wlc1, NAS-Port-Type=Wireless - IEEE 802.11, Tunnel-Type=(tag=0) VLAN, Tunnel-Medium-Type=(tag=0) 802, Tunnel-Private-Group-ID=(tag=0) 604, undefined-89=
-Aug  6 16:59:28 10.42.7.63 Aug  7 00:18:40 npf-sjca-pdp01 CISE_Failed_Attempts 0001971282 1 0 2014-08-07 00:18:40.981 -07:00 0098711291 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2087140, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5F53E32850, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 17:00:33 10.42.7.64 Aug  7 00:19:46 npf-sjca-pdp02 CISE_Failed_Attempts 0000371080 1 0 2014-08-07 00:19:46.020 -07:00 0011271232 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271087, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570753E32892, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 17:00:36 10.34.84.145 Aug  7 00:19:48 stage-pdp01 CISE_Failed_Attempts 0000024712 1 0 2014-08-07 00:19:48.660 -07:00 0000287604 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19396, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 17:00:38 10.34.84.145 Aug  7 00:19:50 stage-pdp01 CISE_Failed_Attempts 0000024713 1 0 2014-08-07 00:19:50.664 -07:00 0000287608 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19397, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 17:00:39 10.42.7.63 Aug  7 00:19:51 npf-sjca-pdp01 CISE_Failed_Attempts 0001971393 1 0 2014-08-07 00:19:51.042 -07:00 0098716185 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2087311, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D6053E32897, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 17:00:40 10.34.84.145 Aug  7 00:19:52 stage-pdp01 CISE_Failed_Attempts 0000024714 1 0 2014-08-07 00:19:52.664 -07:00 0000287612 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19398, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 17:00:42 10.34.84.145 Aug  7 00:19:54 stage-pdp01 CISE_Failed_Attempts 0000024715 1 0 2014-08-07 00:19:54.668 -07:00 0000287616 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19399, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 17:00:44 10.34.84.145 Aug  7 00:19:56 stage-pdp01 CISE_Failed_Attempts 0000024716 1 0 2014-08-07 00:19:56.672 -07:00 0000287620 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19400, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 17:00:46 10.34.84.145 Aug  7 00:19:58 stage-pdp01 CISE_Failed_Attempts 0000024717 1 0 2014-08-07 00:19:58.675 -07:00 0000287624 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19401, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 17:01:44 10.42.7.64 Aug  7 00:20:56 npf-sjca-pdp02 CISE_Failed_Attempts 0000371095 1 0 2014-08-07 00:20

<TRUNCATED>


[18/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicBroParser.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicBroParser.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicBroParser.java
index bdb4c52..741fd75 100644
--- a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicBroParser.java
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicBroParser.java
@@ -17,22 +17,26 @@
 
 package com.opensoc.parsing.parsers;
 
+import org.json.simple.JSONArray;
 import org.json.simple.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.opensoc.tldextractor.BasicTldExtractor;
+
 @SuppressWarnings("serial")
 public class BasicBroParser extends AbstractParser {
 
 	protected static final Logger _LOG = LoggerFactory
 			.getLogger(BasicBroParser.class);
-	JSONCleaner cleaner = new JSONCleaner();
+	private JSONCleaner cleaner = new JSONCleaner();
+	private BasicTldExtractor tldex = new BasicTldExtractor();
 
 	@SuppressWarnings("unchecked")
 	public JSONObject parse(byte[] msg) {
 
 		_LOG.trace("[OpenSOC] Starting to parse incoming message");
-		
+
 		String raw_message = null;
 
 		try {
@@ -42,33 +46,62 @@ public class BasicBroParser extends AbstractParser {
 
 			JSONObject cleaned_message = cleaner.Clean(raw_message);
 			_LOG.debug("[OpenSOC] Cleaned message: " + raw_message);
-			
-			if(cleaned_message == null || cleaned_message.isEmpty())
+
+			if (cleaned_message == null || cleaned_message.isEmpty())
 				throw new Exception("Unable to clean message: " + raw_message);
 
 			String key = cleaned_message.keySet().iterator().next().toString();
-			
-			if(key == null)
-				throw new Exception("Unable to retrieve key for message: " + raw_message);
+
+			if (key == null)
+				throw new Exception("Unable to retrieve key for message: "
+						+ raw_message);
 
 			JSONObject payload = (JSONObject) cleaned_message.get(key);
-			
-			if(payload == null)
-				throw new Exception("Unable to retrieve payload for message: " + raw_message);
+
+			String originalString = " |";
+			for (Object k : payload.keySet()) {
+				originalString = originalString + " " + k.toString() + ":"
+						+ payload.get(k).toString();
+			}
+			originalString = key.toUpperCase() + originalString;
+			payload.put("original_string", originalString);
+
+			if (payload == null)
+				throw new Exception("Unable to retrieve payload for message: "
+						+ raw_message);
+
+			if (payload.containsKey("ts")) {
+				String ts = payload.remove("ts").toString();
+				payload.put("timestamp", ts);
+				_LOG.trace("[OpenSOC] Added ts to: " + payload);
+			}
 
 			if (payload.containsKey("id.orig_h")) {
 				String source_ip = payload.remove("id.orig_h").toString();
 				payload.put("ip_src_addr", source_ip);
 				_LOG.trace("[OpenSOC] Added ip_src_addr to: " + payload);
+			} else if (payload.containsKey("tx_hosts")) {
+				JSONArray txHosts = (JSONArray) payload.remove("tx_hosts");
+				if (txHosts != null && !txHosts.isEmpty()) {
+					payload.put("ip_src_addr", txHosts.get(0));
+					_LOG.trace("[OpenSOC] Added ip_src_addr to: " + payload);
+				}
 			}
+			
 			if (payload.containsKey("id.resp_h")) {
 				String source_ip = payload.remove("id.resp_h").toString();
 				payload.put("ip_dst_addr", source_ip);
 				_LOG.trace("[OpenSOC] Added ip_dst_addr to: " + payload);
+			} else if (payload.containsKey("rx_hosts")) {
+				JSONArray rxHosts = (JSONArray) payload.remove("rx_hosts");
+				if (rxHosts != null && !rxHosts.isEmpty()) {
+					payload.put("ip_dst_addr", rxHosts.get(0));
+					_LOG.trace("[OpenSOC] Added ip_dst_addr to: " + payload);
+				}
 			}
+			
 			if (payload.containsKey("id.orig_p")) {
-				String source_port = payload.remove("id.orig_p")
-						.toString();
+				String source_port = payload.remove("id.orig_p").toString();
 				payload.put("ip_src_port", source_port);
 				_LOG.trace("[OpenSOC] Added ip_src_port to: " + payload);
 			}
@@ -77,38 +110,42 @@ public class BasicBroParser extends AbstractParser {
 				payload.put("ip_dst_port", dest_port);
 				_LOG.trace("[OpenSOC] Added ip_dst_port to: " + payload);
 			}
-			if (payload.containsKey("host")) {
-
-				String host = payload.get("host").toString().trim();
-				String[] parts = host.split("\\.");
-				int length = parts.length;
-				payload.put("tld", parts[length - 2] + "."
-						+ parts[length - 1]);
-				_LOG.trace("[OpenSOC] Added tld to: " + payload);
-			}
-			if (payload.containsKey("query")) {
-				String host = payload.get("query").toString();
-				String[] parts = host.split("\\.");
-				int length = parts.length;
-				payload.put("tld", parts[length - 2] + "."
-						+ parts[length - 1]);
-				_LOG.trace("[OpenSOC] Added tld to: " + payload);
-			}
+			
+//			if (payload.containsKey("host")) {
+//
+//				String host = payload.get("host").toString().trim();
+//				String tld = tldex.extractTLD(host);
+//
+//				payload.put("tld", tld);
+//				_LOG.trace("[OpenSOC] Added tld to: " + payload);
+//
+//			}
+//			if (payload.containsKey("query")) {
+//				String host = payload.get("query").toString();
+//				String[] parts = host.split("\\.");
+//				int length = parts.length;
+//				if (length >= 2) {
+//					payload.put("tld", parts[length - 2] + "."
+//							+ parts[length - 1]);
+//					_LOG.trace("[OpenSOC] Added tld to: " + payload);
+//				}
+//			}
 
 			_LOG.trace("[OpenSOC] Inner message: " + payload);
 
 			payload.put("protocol", key);
 			_LOG.debug("[OpenSOC] Returning parsed message: " + payload);
-			
+
 			return payload;
-			
+
 		} catch (Exception e) {
 
 			_LOG.error("Unable to Parse Message: " + raw_message);
 			e.printStackTrace();
 			return null;
 		}
-		
+
 	}
 
+	
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicFireEyeParser.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicFireEyeParser.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicFireEyeParser.java
new file mode 100644
index 0000000..baa2857
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicFireEyeParser.java
@@ -0,0 +1,234 @@
+package com.opensoc.parsing.parsers;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Serializable;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Locale;
+import java.util.Map;
+import java.util.TimeZone;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.commons.lang3.StringUtils;
+import org.json.simple.JSONObject;
+
+import com.google.common.base.Joiner;
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.Multimap;
+
+import oi.thekraken.grok.api.Grok;
+import oi.thekraken.grok.api.Match;
+import oi.thekraken.grok.api.exception.GrokException;
+
+public class BasicFireEyeParser extends AbstractParser implements Serializable {
+
+	private static final long serialVersionUID = 6328907550159134550L;
+	//String tsRegex = "(.*)([a-z][A-Z]+)\\s+(\\d+)\\s+(\\d+\\:\\d+\\:\\d+)\\s+(\\d+\\.\\d+\\.\\d+\\.\\d+)(.*)$";
+	String tsRegex ="([a-zA-Z]{3})\\s+(\\d+)\\s+(\\d+\\:\\d+\\:\\d+)\\s+(\\d+\\.\\d+\\.\\d+\\.\\d+)";
+	
+	
+	Pattern tsPattern = Pattern.compile(tsRegex);
+	// private transient static OpenSOCGrok grok;
+	// private transient static InputStream pattern_url;
+
+	public BasicFireEyeParser() throws Exception {
+		// pattern_url = getClass().getClassLoader().getResourceAsStream(
+		// "patterns/fireeye");
+		//
+		// File file = ParserUtils.stream2file(pattern_url);
+		// grok = OpenSOCGrok.create(file.getPath());
+		//
+		// grok.compile("%{FIREEYE_BASE}");
+	}
+
+	@Override
+	public JSONObject parse(byte[] raw_message) {
+		String toParse = "";
+
+		try {
+
+			toParse = new String(raw_message, "UTF-8");
+
+			// String[] mTokens = toParse.split(" ");
+
+			String positveIntPattern = "<[1-9][0-9]*>";
+			Pattern p = Pattern.compile(positveIntPattern);
+			Matcher m = p.matcher(toParse);
+
+			String delimiter = "";
+
+			while (m.find()) {
+				delimiter = m.group();
+
+			}
+
+			if (!StringUtils.isBlank(delimiter)) {
+				String[] tokens = toParse.split(delimiter);
+
+				if (tokens.length > 1)
+					toParse = delimiter + tokens[1];
+
+			}
+
+			JSONObject toReturn = parseMessage(toParse);
+
+			toReturn.put("timestamp", getTimeStamp(toParse,delimiter));
+
+			return toReturn;
+
+		} catch (Exception e) {
+			e.printStackTrace();
+			return null;
+		}
+
+	}
+
+	public static Long convertToEpoch(String m, String d, String ts,
+			boolean adjust_timezone) throws ParseException {
+		d = d.trim();
+
+		if (d.length() <= 2)
+			d = "0" + d;
+
+		Date date = new SimpleDateFormat("MMM", Locale.ENGLISH).parse(m);
+		Calendar cal = Calendar.getInstance();
+		cal.setTime(date);
+		String month = String.valueOf(cal.get(Calendar.MONTH));
+		int year = Calendar.getInstance().get(Calendar.YEAR);
+
+		if (month.length() <= 2)
+			month = "0" + month;
+
+		String coglomerated_ts = year + "-" + month + "-" + d + " " + ts;
+
+		System.out.println(coglomerated_ts);
+
+		SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+
+		if (adjust_timezone)
+			sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
+
+		date = sdf.parse(coglomerated_ts);
+		long timeInMillisSinceEpoch = date.getTime();
+
+		return timeInMillisSinceEpoch;
+	}
+
+	private long getTimeStamp(String toParse,String delimiter) throws ParseException {
+		
+		long ts = 0;
+		String month = null;
+		String day = null;
+		String time = null;
+		Matcher tsMatcher = tsPattern.matcher(toParse);
+		if (tsMatcher.find()) {
+			month = tsMatcher.group(1);
+			day = tsMatcher.group(2);
+			time = tsMatcher.group(3);
+	
+				} else {
+			_LOG.warn("Unable to find timestamp in message: " + toParse);
+			ts = convertToEpoch(month, day, time, true);
+		}
+
+			return ts;
+	
+	}
+
+	private JSONObject parseMessage(String toParse) {
+
+		// System.out.println("Received message: " + toParse);
+
+		// OpenSOCMatch gm = grok.match(toParse);
+		// gm.captures();
+
+		JSONObject toReturn = new JSONObject();
+		//toParse = toParse.replaceAll("  ", " ");
+		String[] mTokens = toParse.split("\\s+");
+	 //mTokens = toParse.split(" ");
+
+		// toReturn.putAll(gm.toMap());
+
+		String id = mTokens[4];
+
+		// We are not parsing the fedata for multi part message as we cannot
+		// determine how we can split the message and how many multi part
+		// messages can there be.
+		// The message itself will be stored in the response.
+
+		String[] tokens = id.split("\\.");
+		if (tokens.length == 2) {
+
+			String[] array = Arrays.copyOfRange(mTokens, 1, mTokens.length - 1);
+			String syslog = Joiner.on(" ").join(array);
+
+			Multimap<String, String> multiMap = formatMain(syslog);
+
+			for (String key : multiMap.keySet()) {
+
+				String value = Joiner.on(",").join(multiMap.get(key));
+				toReturn.put(key, value.trim());
+			}
+
+		}
+
+		toReturn.put("original_string", toParse);
+
+		String ip_src_addr = (String) toReturn.get("dvc");
+		String ip_src_port = (String) toReturn.get("src_port");
+		String ip_dst_addr = (String) toReturn.get("dst_ip");
+		String ip_dst_port = (String) toReturn.get("dst_port");
+
+		if (ip_src_addr != null)
+			toReturn.put("ip_src_addr", ip_src_addr);
+		if (ip_src_port != null)
+			toReturn.put("ip_src_port", ip_src_port);
+		if (ip_dst_addr != null)
+			toReturn.put("ip_dst_addr", ip_dst_addr);
+		if (ip_dst_port != null)
+			toReturn.put("ip_dst_port", ip_dst_port);
+
+		System.out.println(toReturn);
+
+		return toReturn;
+	}
+
+	private Multimap<String, String> formatMain(String in) {
+		Multimap<String, String> multiMap = ArrayListMultimap.create();
+		String input = in.replaceAll("cn3", "dst_port")
+				.replaceAll("cs5", "cncHost").replaceAll("proto", "protocol")
+				.replaceAll("rt=", "timestamp=").replaceAll("cs1", "malware")
+				.replaceAll("dst=", "dst_ip=")
+				.replaceAll("shost", "src_hostname")
+				.replaceAll("dmac", "dst_mac").replaceAll("smac", "src_mac")
+				.replaceAll("spt", "src_port")
+				.replaceAll("\\bsrc\\b", "src_ip");
+		String[] tokens = input.split("\\|");
+
+		if (tokens.length > 0) {
+			String message = tokens[tokens.length - 1];
+
+			String pattern = "([\\w\\d]+)=([^=]*)(?=\\s*\\w+=|\\s*$) ";
+			Pattern p = Pattern.compile(pattern);
+			Matcher m = p.matcher(message);
+
+			while (m.find()) {
+				String[] str = m.group().split("=");
+				multiMap.put(str[0], str[1]);
+
+			}
+
+		}
+		return multiMap;
+	}
+
+	
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicIseParser.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicIseParser.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicIseParser.java
index f7a4b71..6990273 100644
--- a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicIseParser.java
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicIseParser.java
@@ -79,4 +79,6 @@ public class BasicIseParser extends AbstractParser {
 		}
 		return null;
 	}
+
+	
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicLancopeParser.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicLancopeParser.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicLancopeParser.java
index 23f6c62..73682ea 100644
--- a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicLancopeParser.java
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicLancopeParser.java
@@ -69,4 +69,6 @@ public class BasicLancopeParser extends AbstractParser {
 			return null;
 		}
 	}
+
+	
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicLogstashParser.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicLogstashParser.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicLogstashParser.java
new file mode 100644
index 0000000..10bfcd2
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicLogstashParser.java
@@ -0,0 +1,65 @@
+package com.opensoc.parsing.parsers;
+
+import java.io.UnsupportedEncodingException;
+import java.text.SimpleDateFormat;
+
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+public class BasicLogstashParser extends AbstractParser {
+
+	@Override
+	public JSONObject parse(byte[] raw_message) {
+		
+		try {
+			
+			/*
+			 * We need to create a new JSONParser each time because its 
+			 * not serializable and the parser is created on the storm nimbus
+			 * node, then transfered to the workers.
+			 */
+			JSONParser jsonParser = new JSONParser();
+			String rawString = new String(raw_message, "UTF-8");
+			JSONObject rawJson = (JSONObject) jsonParser.parse(rawString);
+			
+			// remove logstash meta fields
+			rawJson.remove("@version");
+			rawJson.remove("type");
+			rawJson.remove("host");
+			rawJson.remove("tags");
+			
+			// rename other keys
+			rawJson = mutate(rawJson, "message", "original_string");
+			rawJson = mutate(rawJson, "src_ip", "ip_src_addr");
+			rawJson = mutate(rawJson, "dst_ip", "ip_dst_addr");
+			rawJson = mutate(rawJson, "src_port", "ip_src_port");
+			rawJson = mutate(rawJson, "dst_port", "ip_dst_port");
+			rawJson = mutate(rawJson, "src_ip", "ip_src_addr");
+			
+			// convert timestamp to milli since epoch
+			rawJson.put("timestamp", LogstashToEpoch((String) rawJson.remove("@timestamp")));
+
+			return rawJson;
+		} catch (Exception e) {
+			e.printStackTrace();
+			return null;
+		}	
+	}
+	
+	private JSONObject mutate(JSONObject json, String oldKey, String newKey) {
+		if (json.containsKey(oldKey)) {
+			json.put(newKey, json.remove(oldKey));
+		}	
+		return json;
+	}
+	
+	private long LogstashToEpoch(String timestamp) throws java.text.ParseException {
+		SimpleDateFormat logstashDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
+		return logstashDateFormat.parse(timestamp).getTime();
+		
+	}
+
+	
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicPaloAltoFirewallParser.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicPaloAltoFirewallParser.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicPaloAltoFirewallParser.java
new file mode 100644
index 0000000..315ca3d
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicPaloAltoFirewallParser.java
@@ -0,0 +1,184 @@
+package com.opensoc.parsing.parsers;
+
+
+import org.json.simple.JSONObject;
+
+import java.net.MalformedURLException;
+import java.net.URL;
+
+import com.opensoc.parser.interfaces.MessageParser;
+
+public class BasicPaloAltoFirewallParser extends AbstractParser implements MessageParser{
+
+	private static final long serialVersionUID = 3147090149725343999L;
+	public static final String PaloAltoDomain  = "palo_alto_domain";
+	public static final String ReceiveTime  = "receive_time";
+	public static final String SerialNum  = "serial_num";
+	public static final String Type  = "type";
+	public static final String ThreatContentType  = "threat_content_type";
+	public static final String ConfigVersion  = "config_version";
+	public static final String GenerateTime  = "generate_time";
+	public static final String SourceAddress  = "source_address";
+	public static final String DestinationAddress  = "destination_address";
+	public static final String NATSourceIP  = "nat_source_ip";
+	public static final String NATDestinationIP  = "nat_destination_ip";
+	public static final String Rule  = "rule";
+	public static final String SourceUser  = "source_user";
+	public static final String DestinationUser  = "destination_user";
+	public static final String Application  = "application";
+	public static final String VirtualSystem  = "virtual_system";
+	public static final String SourceZone  = "source_zone";
+	public static final String DestinationZone  = "destination_zone";
+	public static final String InboundInterface  = "inbound_interface";
+	public static final String OutboundInterface  = "outbound_interface";
+	public static final String LogAction  = "log_action";
+	public static final String TimeLogged  = "time_logged";
+	public static final String SessionID  = "session_id";
+	public static final String RepeatCount  = "repeat_count";
+	public static final String SourcePort  = "source_port";
+	public static final String DestinationPort  = "destination_port";
+	public static final String NATSourcePort  = "nats_source_port";
+	public static final String NATDestinationPort  = "nats_destination_port";
+	public static final String Flags  = "flags";
+	public static final String IPProtocol  = "ip_protocol";
+	public static final String Action  = "action";
+	
+	//Threat
+	public static final String URL  = "url";
+	public static final String HOST  = "host";
+	public static final String ThreatContentName  = "threat_content_name";
+	public static final String Category  = "category";
+	public static final String Direction  = "direction";
+	public static final String Seqno  = "seqno";
+	public static final String ActionFlags  = "action_flags";
+	public static final String SourceCountry  = "source_country";
+	public static final String DestinationCountry  = "destination_country";
+	public static final String Cpadding  = "cpadding";
+	public static final String ContentType = "content_type";
+	
+	//Traffic
+	public static final String Bytes = "content_type";
+	public static final String BytesSent = "content_type";
+	public static final String BytesReceived = "content_type";
+	public static final String Packets = "content_type";
+	public static final String StartTime = "content_type";
+	public static final String ElapsedTimeInSec = "content_type";
+	public static final String Padding = "content_type";
+	public static final String PktsSent = "pkts_sent";
+	public static final String PktsReceived = "pkts_received";
+	
+
+	@SuppressWarnings({ "unchecked", "unused" })
+	public JSONObject parse(byte[] msg) {
+
+		JSONObject outputMessage = new JSONObject();
+		String toParse = "";
+
+		try {
+
+			toParse = new String(msg, "UTF-8");
+			_LOG.debug("Received message: " + toParse);
+			
+			
+			parseMessage(toParse,outputMessage);
+			
+				outputMessage.put("timestamp", System.currentTimeMillis());
+				outputMessage.put("ip_src_addr", outputMessage.remove("source_address"));
+				outputMessage.put("ip_src_port", outputMessage.remove("source_port"));
+				outputMessage.put("ip_dst_addr", outputMessage.remove("destination_address"));
+				outputMessage.put("ip_dst_port", outputMessage.remove("destination_port"));
+				outputMessage.put("protocol", outputMessage.remove("ip_protocol"));
+				
+				outputMessage.put("original_string", toParse);
+			return outputMessage;
+		} catch (Exception e) {
+			e.printStackTrace();
+			_LOG.error("Failed to parse: " + toParse);
+			return null;
+		}
+	}
+		
+		@SuppressWarnings("unchecked")
+		private void parseMessage(String message,JSONObject outputMessage) {
+			
+			String[] tokens = message.split(",");
+			
+			String type = tokens[3].trim();
+			
+			//populate common objects
+			outputMessage.put(PaloAltoDomain, tokens[0].trim());
+			outputMessage.put(ReceiveTime, tokens[1].trim());
+			outputMessage.put(SerialNum, tokens[2].trim());
+			outputMessage.put(Type, type);
+			outputMessage.put(ThreatContentType, tokens[4].trim());
+			outputMessage.put(ConfigVersion, tokens[5].trim());
+			outputMessage.put(GenerateTime, tokens[6].trim());
+			outputMessage.put(SourceAddress, tokens[7].trim());
+			outputMessage.put(DestinationAddress, tokens[8].trim());
+			outputMessage.put(NATSourceIP, tokens[9].trim());
+			outputMessage.put(NATDestinationIP, tokens[10].trim());
+			outputMessage.put(Rule, tokens[11].trim());
+			outputMessage.put(SourceUser, tokens[12].trim());
+			outputMessage.put(DestinationUser, tokens[13].trim());
+			outputMessage.put(Application, tokens[14].trim());
+			outputMessage.put(VirtualSystem, tokens[15].trim());
+			outputMessage.put(SourceZone, tokens[16].trim());
+			outputMessage.put(DestinationZone, tokens[17].trim());
+			outputMessage.put(InboundInterface, tokens[18].trim());
+			outputMessage.put(OutboundInterface, tokens[19].trim());
+			outputMessage.put(LogAction, tokens[20].trim());
+			outputMessage.put(TimeLogged, tokens[21].trim());
+			outputMessage.put(SessionID, tokens[22].trim());
+			outputMessage.put(RepeatCount, tokens[23].trim());
+			outputMessage.put(SourcePort, tokens[24].trim());
+			outputMessage.put(DestinationPort, tokens[25].trim());
+			outputMessage.put(NATSourcePort, tokens[26].trim());
+			outputMessage.put(NATDestinationPort, tokens[27].trim());
+			outputMessage.put(Flags, tokens[28].trim());
+			outputMessage.put(IPProtocol, tokens[29].trim());
+			outputMessage.put(Action, tokens[30].trim());
+			
+			
+			if("THREAT".equals(type.toUpperCase())) {
+				outputMessage.put(URL, tokens[31].trim());
+				try {
+					URL url = new URL(tokens[31].trim());
+					outputMessage.put(HOST, url.getHost());
+				} catch (MalformedURLException e) {
+				}
+				outputMessage.put(ThreatContentName, tokens[32].trim());
+				outputMessage.put(Category, tokens[33].trim());
+				outputMessage.put(Direction, tokens[34].trim());
+				outputMessage.put(Seqno, tokens[35].trim());
+				outputMessage.put(ActionFlags, tokens[36].trim());
+				outputMessage.put(SourceCountry, tokens[37].trim());
+				outputMessage.put(DestinationCountry, tokens[38].trim());
+				outputMessage.put(Cpadding, tokens[39].trim());
+				outputMessage.put(ContentType, tokens[40].trim());
+				
+			}
+			else
+			{
+				outputMessage.put(Bytes, tokens[31].trim());
+				outputMessage.put(BytesSent, tokens[32].trim());
+				outputMessage.put(BytesReceived, tokens[33].trim());
+				outputMessage.put(Packets, tokens[34].trim());
+				outputMessage.put(StartTime, tokens[35].trim());
+				outputMessage.put(ElapsedTimeInSec, tokens[36].trim());
+				outputMessage.put(Category, tokens[37].trim());
+				outputMessage.put(Padding, tokens[38].trim());
+				outputMessage.put(Seqno, tokens[39].trim());
+				outputMessage.put(ActionFlags, tokens[40].trim());
+				outputMessage.put(SourceCountry, tokens[41].trim());
+				outputMessage.put(DestinationCountry, tokens[42].trim());
+				outputMessage.put(Cpadding, tokens[43].trim());
+				outputMessage.put(PktsSent, tokens[44].trim());
+				outputMessage.put(PktsReceived, tokens[45].trim());
+			}
+			
+		}
+
+		
+		
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicSourcefireParser.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicSourcefireParser.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicSourcefireParser.java
index b8eaf21..be6d8ff 100644
--- a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicSourcefireParser.java
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/BasicSourcefireParser.java
@@ -17,15 +17,21 @@
 
 package com.opensoc.parsing.parsers;
 
+import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
 import org.json.simple.JSONObject;
 
+import com.opensoc.parser.interfaces.MessageParser;
+
 @SuppressWarnings("serial")
-public class BasicSourcefireParser extends AbstractParser {
+public class BasicSourcefireParser extends AbstractParser implements MessageParser{
 
 	public static final String hostkey = "host";
 	String domain_name_regex = "([^\\.]+)\\.([a-z]{2}|[a-z]{3}|([a-z]{2}\\.[a-z]{2}))$";
+	String sidRegex = "(.*)(\\[[0-9]+:[0-9]+:[0-9]\\])(.*)$";
+	//String sidRegex = "(\\[[0-9]+:[0-9]+:[0-9]\\])(.*)$";
+	Pattern sidPattern = Pattern.compile(sidRegex);	
 	Pattern pattern = Pattern.compile(domain_name_regex);
 
 	@SuppressWarnings({ "unchecked", "unused" })
@@ -76,8 +82,19 @@ public class BasicSourcefireParser extends AbstractParser {
 			}
 
 			payload.put("timestamp", System.currentTimeMillis());
-			payload.put("original_string",
-					toParse.substring(0, toParse.indexOf("{")));
+			
+			Matcher sidMatcher = sidPattern.matcher(toParse);
+			String originalString = null;
+			String signatureId = "";
+			if (sidMatcher.find()) {
+				signatureId = sidMatcher.group(2);
+				originalString = sidMatcher.group(1) +" "+ sidMatcher.group(2) + " " + sidMatcher.group(3);
+			} else {
+				_LOG.warn("Unable to find SID in message: " + toParse);
+				originalString = toParse;
+			}
+			payload.put("original_string", originalString);
+			payload.put("signature_id", signatureId);
 
 			return payload;
 		} catch (Exception e) {
@@ -87,5 +104,7 @@ public class BasicSourcefireParser extends AbstractParser {
 		}
 	}
 
+	
+
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/GrokAsaParser.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/GrokAsaParser.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/GrokAsaParser.java
new file mode 100644
index 0000000..ff75313
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/GrokAsaParser.java
@@ -0,0 +1,269 @@
+package com.opensoc.parsing.parsers;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Serializable;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Locale;
+import java.util.Map;
+import java.util.TimeZone;
+
+import oi.thekraken.grok.api.Grok;
+import oi.thekraken.grok.api.Match;
+import oi.thekraken.grok.api.exception.GrokException;
+
+import org.apache.commons.io.IOUtils;
+import org.json.simple.JSONObject;
+
+public class GrokAsaParser extends AbstractParser implements Serializable {
+
+	private static final long serialVersionUID = 945353287115350798L;
+	private transient  Grok  grok;
+	Map<String, String> patternMap;
+	private transient  Map<String, Grok> grokMap;
+	private transient  InputStream pattern_url;
+
+	public static final String PREFIX = "stream2file";
+	public static final String SUFFIX = ".tmp";
+
+	public static File stream2file(InputStream in) throws IOException {
+		final File tempFile = File.createTempFile(PREFIX, SUFFIX);
+		tempFile.deleteOnExit();
+		try (FileOutputStream out = new FileOutputStream(tempFile)) {
+			IOUtils.copy(in, out);
+		}
+		return tempFile;
+	}
+
+	public GrokAsaParser() throws Exception {
+		// pattern_url = Resources.getResource("patterns/asa");
+
+		pattern_url = getClass().getClassLoader().getResourceAsStream(
+				"patterns/asa");
+
+		File file = stream2file(pattern_url);
+		grok = Grok.create(file.getPath());
+
+		patternMap = getPatternMap();
+		grokMap = getGrokMap();
+
+		grok.compile("%{CISCO_TAGGED_SYSLOG}");
+	}
+
+	public GrokAsaParser(String filepath) throws Exception {
+
+		grok = Grok.create(filepath);
+		// grok.getNamedRegexCollection().put("ciscotag","CISCOFW302013_302014_302015_302016");
+		grok.compile("%{CISCO_TAGGED_SYSLOG}");
+
+	}
+
+	public GrokAsaParser(String filepath, String pattern) throws Exception {
+
+		grok = Grok.create(filepath);
+		grok.compile("%{" + pattern + "}");
+	}
+
+	private Map<String, Object> getMap(String pattern, String text)
+			throws GrokException {
+
+		Grok g = grokMap.get(pattern);
+		if (g != null) {
+			Match gm = g.match(text);
+			gm.captures();
+			return gm.toMap();
+		} else {
+			return new HashMap<String, Object>();
+		}
+
+	}
+
+	private Map<String, Grok> getGrokMap() throws GrokException, IOException {
+		Map<String, Grok> map = new HashMap<String, Grok>();
+
+		for (Map.Entry<String, String> entry : patternMap.entrySet()) {
+			File file = stream2file(pattern_url);
+			Grok grok = Grok.create(file.getPath());
+			grok.compile("%{" + entry.getValue() + "}");
+
+			map.put(entry.getValue(), grok);
+
+		}
+
+		return map;
+	}
+
+	private Map<String, String> getPatternMap() {
+		Map<String, String> map = new HashMap<String, String>();
+
+		map.put("ASA-2-106001", "CISCOFW106001");
+		map.put("ASA-2-106006", "CISCOFW106006_106007_106010");
+		map.put("ASA-2-106007", "CISCOFW106006_106007_106010");
+		map.put("ASA-2-106010", "CISCOFW106006_106007_106010");
+		map.put("ASA-3-106014", "CISCOFW106014");
+		map.put("ASA-6-106015", "CISCOFW106015");
+		map.put("ASA-1-106021", "CISCOFW106021");
+		map.put("ASA-4-106023", "CISCOFW106023");
+		map.put("ASA-5-106100", "CISCOFW106100");
+		map.put("ASA-6-110002", "CISCOFW110002");
+		map.put("ASA-6-302010", "CISCOFW302010");
+		map.put("ASA-6-302013", "CISCOFW302013_302014_302015_302016");
+		map.put("ASA-6-302014", "CISCOFW302013_302014_302015_302016");
+		map.put("ASA-6-302015", "CISCOFW302013_302014_302015_302016");
+		map.put("ASA-6-302016", "CISCOFW302013_302014_302015_302016");
+		map.put("ASA-6-302020", "CISCOFW302020_302021");
+		map.put("ASA-6-302021", "CISCOFW302020_302021");
+		map.put("ASA-6-305011", "CISCOFW305011");
+		map.put("ASA-3-313001", "CISCOFW313001_313004_313008");
+		map.put("ASA-3-313004", "CISCOFW313001_313004_313008");
+		map.put("ASA-3-313008", "CISCOFW313001_313004_313008");
+		map.put("ASA-4-313005", "CISCOFW313005");
+		map.put("ASA-4-402117", "CISCOFW402117");
+		map.put("ASA-4-402119", "CISCOFW402119");
+		map.put("ASA-4-419001", "CISCOFW419001");
+		map.put("ASA-4-419002", "CISCOFW419002");
+		map.put("ASA-4-500004", "CISCOFW500004");
+		map.put("ASA-6-602303", "CISCOFW602303_602304");
+		map.put("ASA-6-602304", "CISCOFW602303_602304");
+		map.put("ASA-7-710001", "CISCOFW710001_710002_710003_710005_710006");
+		map.put("ASA-7-710002", "CISCOFW710001_710002_710003_710005_710006");
+		map.put("ASA-7-710003", "CISCOFW710001_710002_710003_710005_710006");
+		map.put("ASA-7-710005", "CISCOFW710001_710002_710003_710005_710006");
+		map.put("ASA-7-710006", "CISCOFW710001_710002_710003_710005_710006");
+		map.put("ASA-6-713172", "CISCOFW713172");
+		map.put("ASA-4-733100", "CISCOFW733100");
+		map.put("ASA-6-305012", "CISCOFW305012");
+		map.put("ASA-7-609001", "CISCOFW609001");
+		map.put("ASA-7-609002", "CISCOFW609002");
+
+		return map;
+	}
+
+	public static Long convertToEpoch(String m, String d, String ts,
+			boolean adjust_timezone) throws ParseException {
+		d = d.trim();
+
+		if (d.length() <= 2)
+			d = "0" + d;
+
+		Date date = new SimpleDateFormat("MMM", Locale.ENGLISH).parse(m);
+		Calendar cal = Calendar.getInstance();
+		cal.setTime(date);
+		String month = String.valueOf(cal.get(Calendar.MONTH));
+		int year = Calendar.getInstance().get(Calendar.YEAR);
+
+		if (month.length() <= 2)
+			month = "0" + month;
+
+		String coglomerated_ts = year + "-" + month + "-" + d + " " + ts;
+
+		System.out.println(coglomerated_ts);
+
+		SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+
+		if (adjust_timezone)
+			sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
+
+		date = sdf.parse(coglomerated_ts);
+		long timeInMillisSinceEpoch = date.getTime();
+
+		return timeInMillisSinceEpoch;
+	}
+	
+	@Override
+	public void init() {
+		// pattern_url = Resources.getResource("patterns/asa");
+
+				pattern_url = getClass().getClassLoader().getResourceAsStream(
+						"patterns/asa");
+
+				File file = null;
+				try {
+					file = stream2file(pattern_url);
+				} catch (IOException e) {
+					// TODO Auto-generated catch block
+					e.printStackTrace();
+				}
+				try {
+					grok = Grok.create(file.getPath());
+				} catch (GrokException e) {
+					// TODO Auto-generated catch block
+					e.printStackTrace();
+				}
+
+				patternMap = getPatternMap();
+				try {
+					grokMap = getGrokMap();
+				} catch (GrokException | IOException e1) {
+					// TODO Auto-generated catch block
+					e1.printStackTrace();
+				}
+
+				try {
+					grok.compile("%{CISCO_TAGGED_SYSLOG}");
+				} catch (GrokException e) {
+					// TODO Auto-generated catch block
+					e.printStackTrace();
+				}
+	}
+
+	@Override
+	public JSONObject parse(byte[] raw_message) {
+
+		String toParse = "";
+		JSONObject toReturn;
+
+		try {
+
+			toParse = new String(raw_message, "UTF-8");
+
+			System.out.println("Received message: " + toParse);
+
+			Match gm = grok.match(toParse);
+			gm.captures();
+
+			toReturn = new JSONObject();
+
+			toReturn.putAll(gm.toMap());
+
+			String str = toReturn.get("ciscotag").toString();
+			String pattern = patternMap.get(str);
+
+			Map<String, Object> response = getMap(pattern, toParse);
+
+			toReturn.putAll(response);
+
+			//System.out.println("*******I MAPPED: " + toReturn);
+
+			toReturn.put("timestamp", convertToEpoch(toReturn.get("MONTH").toString(), toReturn
+					.get("MONTHDAY").toString(), 
+					toReturn.get("TIME").toString(),
+					true));
+			
+			toReturn.remove("MONTHDAY");
+			toReturn.remove("TIME");
+			toReturn.remove("MINUTE");
+			toReturn.remove("HOUR");
+			toReturn.remove("YEAR");
+			toReturn.remove("SECOND");
+			
+			toReturn.put("ip_src_addr", toReturn.remove("IPORHOST"));
+			toReturn.put("original_string", toParse);
+
+			return toReturn;
+
+		} catch (Exception e) {
+			e.printStackTrace();
+			return null;
+		}
+
+	}
+
+	
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/GrokSourcefireParser.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/GrokSourcefireParser.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/GrokSourcefireParser.java
index c75bfd5..a5eabcd 100644
--- a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/GrokSourcefireParser.java
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/GrokSourcefireParser.java
@@ -71,4 +71,6 @@ public class GrokSourcefireParser extends AbstractParser{
 		
 	}
 
+	
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/GrokUtils.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/GrokUtils.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/GrokUtils.java
new file mode 100644
index 0000000..de2ba54
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/GrokUtils.java
@@ -0,0 +1,26 @@
+package com.opensoc.parsing.parsers;
+import java.io.Serializable;
+
+import com.google.code.regexp.Pattern;
+
+public class GrokUtils implements Serializable {
+
+	private static final long serialVersionUID = 7465176887422419286L;
+	/**
+	   * Extract Grok patter like %{FOO} to FOO, Also Grok pattern with semantic.
+	   */
+	  public static final Pattern GROK_PATTERN = Pattern.compile(
+	      "%\\{" +
+	      "(?<name>" +
+	        "(?<pattern>[A-z0-9]+)" +
+	          "(?::(?<subname>[A-z0-9_:;\\/\\s\\.]+))?" +
+	          ")" +
+	          "(?:=(?<definition>" +
+	            "(?:" +
+	            "(?:[^{}]+|\\.+)+" +
+	            ")+" +
+	            ")" +
+	      ")?" +
+	      "\\}");
+
+	}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/OpenSOCConverter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/OpenSOCConverter.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/OpenSOCConverter.java
new file mode 100644
index 0000000..5d495a6
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/OpenSOCConverter.java
@@ -0,0 +1,183 @@
+package com.opensoc.parsing.parsers;
+
+import java.io.Serializable;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Map;
+
+public class OpenSOCConverter implements Serializable {
+	
+	private static final long serialVersionUID = 4319897815285922962L;
+	public static Map<String, IConverter<?>> _converters = new HashMap<String, IConverter<?>>();
+
+	static {
+		_converters.put("byte", new ByteConverter());
+		_converters.put("boolean", new BooleanConverter());
+		_converters.put("short", new ShortConverter());
+		_converters.put("int", new IntegerConverter());
+		_converters.put("long", new LongConverter());
+		_converters.put("float", new FloatConverter());
+		_converters.put("double", new DoubleConverter());
+		_converters.put("date", new DateConverter());
+		_converters.put("datetime", new DateConverter());
+		_converters.put("string", new StringConverter());
+
+	}
+	
+	private static IConverter getConverter(String key) throws Exception {
+		IConverter converter = _converters.get(key);
+		if (converter == null) {
+			throw new Exception("Invalid data type :" + key);
+		}
+		return converter;
+	}
+	
+	public static KeyValue convert(String key, Object value) {
+		String[] spec = key.split(";");
+		try {
+			if (spec.length == 1) {
+				return new KeyValue(spec[0], value);
+			} else if (spec.length == 2) {
+				return new KeyValue(spec[0], getConverter(spec[1]).convert(String.valueOf(value)));
+			} else if (spec.length == 3) {
+				return new KeyValue(spec[0], getConverter(spec[1]).convert(String.valueOf(value), spec[2]));
+			} else {
+				return new KeyValue(spec[0], value, "Unsupported spec :" + key);
+			}
+		} catch (Exception e) {
+			return new KeyValue(spec[0], value, e.toString());
+		}
+	}
+}
+
+
+//
+// KeyValue
+//
+
+class KeyValue {
+
+	private String key = null;
+	private Object value = null;
+	private String grokFailure = null;
+	
+	public KeyValue(String key, Object value) {
+		this.key = key;
+		this.value = value;
+	}
+	
+	public KeyValue(String key, Object value, String grokFailure) {
+		this.key = key;
+		this.value = value;
+		this.grokFailure = grokFailure;
+	}
+
+	public boolean hasGrokFailure() {
+		return grokFailure != null;
+	}
+
+	public String getGrokFailure() {
+		return this.grokFailure;
+	}
+
+	public String getKey() {
+		return key;
+	}
+
+	public void setKey(String key) {
+		this.key = key;
+	}
+
+	public Object getValue() {
+		return value;
+	}
+
+	public void setValue(Object value) {
+		this.value = value;
+	}
+}
+
+
+//
+// Converters
+//
+abstract class IConverter<T> {
+	
+	public T convert(String value, String informat) throws Exception {
+		return null;
+	}
+	
+	public abstract T convert(String value) throws Exception;
+}
+
+class ByteConverter extends IConverter<Byte> {
+	@Override
+	public Byte convert(String value) throws Exception {
+		return Byte.parseByte(value);
+	}
+}
+
+class BooleanConverter extends IConverter<Boolean> {
+	@Override
+	public Boolean convert(String value) throws Exception {
+		return Boolean.parseBoolean(value);
+	}
+}
+
+class ShortConverter extends IConverter<Short> {
+	@Override
+	public Short convert(String value) throws Exception {
+		return Short.parseShort(value);
+	}
+}
+
+class IntegerConverter extends IConverter<Integer> {
+	@Override
+	public Integer convert(String value) throws Exception {
+		return Integer.parseInt(value);
+	}
+}
+
+class LongConverter extends IConverter<Long> {
+	@Override
+	public Long convert(String value) throws Exception {
+		return Long.parseLong(value);
+	}
+}
+
+class FloatConverter extends IConverter<Float> {
+	@Override
+	public Float convert(String value) throws Exception {
+		return Float.parseFloat(value);
+	}
+}
+
+class DoubleConverter extends IConverter<Double> {
+	@Override
+	public Double convert(String value) throws Exception {
+		return Double.parseDouble(value);
+	}
+}
+
+class StringConverter extends IConverter<String> {
+	@Override
+	public String convert(String value) throws Exception {
+		return value;
+	}
+}
+
+class DateConverter extends IConverter<Date> {
+	@Override
+	public Date convert(String value) throws Exception {
+		return DateFormat.getInstance().parse(value);
+	}
+	
+	@Override
+	public Date convert(String value, String informat) throws Exception {
+		SimpleDateFormat formatter =  new SimpleDateFormat(informat);
+		return formatter.parse(value);
+	}
+	
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/OpenSOCGarbage.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/OpenSOCGarbage.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/OpenSOCGarbage.java
new file mode 100644
index 0000000..1f7f3c8
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/OpenSOCGarbage.java
@@ -0,0 +1,130 @@
+package com.opensoc.parsing.parsers;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+public class OpenSOCGarbage implements Serializable {
+
+	private static final long serialVersionUID = -7158895945268018603L;
+	private List<String> toRemove;
+	  private Map<String, Object> toRename;
+
+	  /**
+	   * Create a new {@code Garbage} object.
+	   */
+	  public OpenSOCGarbage() {
+
+	    toRemove = new ArrayList<String>();
+	    toRename = new TreeMap<String, Object>();
+	    /** this is a default value to remove */
+	    toRemove.add("UNWANTED");
+	  }
+
+	  /**
+	   * Set a new name to be change when exporting the final output.
+	   *
+	   * @param origin : original field name
+	   * @param value : New field name to apply
+	   */
+	  public void addToRename(String origin, Object value) {
+	    if (origin == null || value == null) {
+	      return;
+	    }
+
+	    if (!origin.isEmpty() && !value.toString().isEmpty()) {
+	      toRename.put(origin, value);
+	    }
+	  }
+
+	  /**
+	   * Set a field to be remove when exporting the final output.
+	   *
+	   * @param name of the field to remove
+	   */
+	  public void addToRemove(String name) {
+	    if (name == null) {
+	      return;
+	    }
+
+	    if (!name.isEmpty()) {
+	      toRemove.add(name);
+	    }
+	  }
+
+	  /**
+	   * Set a list of field name to be remove when exporting the final output.
+	   *
+	   * @param lst
+	   */
+	  public void addToRemove(List<String> lst) {
+	    if (lst == null) {
+	      return;
+	    }
+
+	    if (!lst.isEmpty()) {
+	      toRemove.addAll(lst);
+	    }
+	  }
+
+	  /**
+	   * Remove from the map the unwilling items.
+	   *
+	   * @param map to clean
+	   * @return nb of deleted item
+	   */
+	  public int remove(Map<String, Object> map) {
+	    int item = 0;
+
+	    if (map == null) {
+	      return item;
+	    }
+
+	    if (map.isEmpty()) {
+	      return item;
+	    }
+
+	    for (Iterator<Map.Entry<String, Object>> it = map.entrySet().iterator(); it.hasNext();) {
+	      Map.Entry<String, Object> entry = it.next();
+	      for (int i = 0; i < toRemove.size(); i++) {
+	        if (entry.getKey().equals(toRemove.get(i))) {
+	          it.remove();
+	          item++;
+	        }
+	      }
+	    }
+	    return item;
+	  }
+
+	  /**
+	   * Rename the item from the map.
+	   *
+	   * @param map
+	   * @return nb of renamed items
+	   */
+	  public int rename(Map<String, Object> map) {
+	    int item = 0;
+
+	    if (map == null) {
+	      return item;
+	    }
+
+	    if (map.isEmpty() || toRename.isEmpty()) {
+	      return item;
+	    }
+
+	    for (Iterator<Map.Entry<String, Object>> it = toRename.entrySet().iterator(); it.hasNext();) {
+	      Map.Entry<String, Object> entry = it.next();
+	      if (map.containsKey(entry.getKey())) {
+	        Object obj = map.remove(entry.getKey());
+	        map.put(entry.getValue().toString(), obj);
+	        item++;
+	      }
+	    }
+	    return item;
+	  }
+
+	}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/OpenSOCGrok.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/OpenSOCGrok.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/OpenSOCGrok.java
new file mode 100644
index 0000000..0cf998e
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/OpenSOCGrok.java
@@ -0,0 +1,367 @@
+package com.opensoc.parsing.parsers;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.Reader;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.code.regexp.Matcher;
+import com.google.code.regexp.Pattern;
+
+public class OpenSOCGrok implements Serializable {
+
+	private static final long serialVersionUID = 2002441320075020721L;
+	private static final Logger LOG = LoggerFactory.getLogger(OpenSOCGrok.class);
+	  /**
+	   * Named regex of the originalGrokPattern.
+	   */
+	  private String namedRegex;
+	  /**
+	   * Map of the named regex of the originalGrokPattern
+	   * with id = namedregexid and value = namedregex.
+	   */
+	  private Map<String, String> namedRegexCollection;
+	  /**
+	   * Original {@code Grok} pattern (expl: %{IP}).
+	   */
+	  private String originalGrokPattern;
+	  /**
+	   * Pattern of the namedRegex.
+	   */
+	  private Pattern compiledNamedRegex;
+	  /**
+	   * {@code Grok} discovery.
+	   */
+	  private Map<String, String> grokPatternDefinition;
+
+	  /** only use in grok discovery. */
+	  private String savedPattern;
+
+	  /**
+	   * Create Empty {@code Grok}.
+	   */
+	  public static final OpenSOCGrok EMPTY = new OpenSOCGrok();
+
+	  /**
+	   * Create a new <i>empty</i>{@code Grok} object.
+	   */
+	  public OpenSOCGrok() {
+	    originalGrokPattern = StringUtils.EMPTY;
+	    namedRegex = StringUtils.EMPTY;
+	    compiledNamedRegex = null;
+	    grokPatternDefinition = new TreeMap<String, String>();
+	    namedRegexCollection = new TreeMap<String, String>();
+	    savedPattern = StringUtils.EMPTY;
+	  }
+
+	  public String getSaved_pattern() {
+	    return savedPattern;
+	  }
+
+	  public void setSaved_pattern(String savedpattern) {
+	    this.savedPattern = savedpattern;
+	  }
+
+	  /**
+	   * Create a {@code Grok} instance with the given patterns file and
+	   * a {@code Grok} pattern.
+	   *
+	   * @param grokPatternPath Path to the pattern file
+	   * @param grokExpression  - <b>OPTIONAL</b> - Grok pattern to compile ex: %{APACHELOG}
+	   * @return {@code Grok} instance
+	   * @throws Exception
+	   */
+	  public static OpenSOCGrok create(String grokPatternPath, String grokExpression)
+	      throws Exception {
+	    if (StringUtils.isBlank(grokPatternPath)) {
+	      throw new Exception("{grokPatternPath} should not be empty or null");
+	    }
+	    OpenSOCGrok g = new OpenSOCGrok();
+	    g.addPatternFromFile(grokPatternPath);
+	    if (StringUtils.isNotBlank(grokExpression)) {
+	      g.compile(grokExpression);
+	    }
+	    return g;
+	  }
+
+	  /**
+	   * Create a {@code Grok} instance with the given grok patterns file.
+	   *
+	   * @param  grokPatternPath : Path to the pattern file
+	   * @return Grok
+	   * @throws Exception
+	   */
+	  public static OpenSOCGrok create(String grokPatternPath) throws Exception {
+	    return create(grokPatternPath, null);
+	  }
+
+	  /**
+	   * Add custom pattern to grok in the runtime.
+	   *
+	   * @param name : Pattern Name
+	   * @param pattern : Regular expression Or {@code Grok} pattern
+	   * @throws Exception
+	   **/
+	  public void addPattern(String name, String pattern) throws Exception {
+	    if (StringUtils.isBlank(name)) {
+	      throw new Exception("Invalid Pattern name");
+	    }
+	    if (StringUtils.isBlank(name)) {
+	      throw new Exception("Invalid Pattern");
+	    }
+	    grokPatternDefinition.put(name, pattern);
+	  }
+
+	  /**
+	   * Copy the given Map of patterns (pattern name, regular expression) to {@code Grok},
+	   * duplicate element will be override.
+	   *
+	   * @param cpy : Map to copy
+	   * @throws Exception
+	   **/
+	  public void copyPatterns(Map<String, String> cpy) throws Exception {
+	    if (cpy == null) {
+	      throw new Exception("Invalid Patterns");
+	    }
+
+	    if (cpy.isEmpty()) {
+	      throw new Exception("Invalid Patterns");
+	    }
+	    for (Map.Entry<String, String> entry : cpy.entrySet()) {
+	      grokPatternDefinition.put(entry.getKey().toString(), entry.getValue().toString());
+	    }
+	  }
+
+	  /**
+	   * Get the current map of {@code Grok} pattern.
+	   *
+	   * @return Patterns (name, regular expression)
+	   */
+	  public Map<String, String> getPatterns() {
+	    return grokPatternDefinition;
+	  }
+
+	  /**
+	   * Get the named regex from the {@code Grok} pattern. <p></p>
+	   * See {@link #compile(String)} for more detail.
+	   * @return named regex
+	   */
+	  public String getNamedRegex() {
+	    return namedRegex;
+	  }
+
+	  /**
+	   * Add patterns to {@code Grok} from the given file.
+	   *
+	   * @param file : Path of the grok pattern
+	   * @throws Exception
+	   */
+	  public void addPatternFromFile(String file) throws Exception {
+
+	    File f = new File(file);
+	    if (!f.exists()) {
+	      throw new Exception("Pattern not found");
+	    }
+
+	    if (!f.canRead()) {
+	      throw new Exception("Pattern cannot be read");
+	    }
+
+	    FileReader r = null;
+	    try {
+	      r = new FileReader(f);
+	      addPatternFromReader(r);
+	    } catch (FileNotFoundException e) {
+	      throw new Exception(e.getMessage());
+	    } catch (@SuppressWarnings("hiding") IOException e) {
+	      throw new Exception(e.getMessage());
+	    } finally {
+	      try {
+	        if (r != null) {
+	          r.close();
+	        }
+	      } catch (IOException io) {
+	        // TODO(anthony) : log the error
+	      }
+	    }
+	  }
+
+	  /**
+	   * Add patterns to {@code Grok} from a Reader.
+	   *
+	   * @param r : Reader with {@code Grok} patterns
+	   * @throws Exception
+	   */
+	  public void addPatternFromReader(Reader r) throws Exception {
+	    BufferedReader br = new BufferedReader(r);
+	    String line;
+	    // We dont want \n and commented line
+	    Pattern pattern = Pattern.compile("^([A-z0-9_]+)\\s+(.*)$");
+	    try {
+	      while ((line = br.readLine()) != null) {
+	        Matcher m = pattern.matcher(line);
+	        if (m.matches()) {
+	          this.addPattern(m.group(1), m.group(2));
+	        }
+	      }
+	      br.close();
+	    } catch (IOException e) {
+	      throw new Exception(e.getMessage());
+	    } catch (Exception e) {
+	      throw new Exception(e.getMessage());
+	    }
+
+	  }
+
+	  /**
+	   * Match the given <tt>log</tt> with the named regex.
+	   * And return the json representation of the matched element
+	   *
+	   * @param log : log to match
+	   * @return json representation og the log
+	   */
+	  public String capture(String log){
+		  OpenSOCMatch match = match(log);
+	    match.captures();
+	    return match.toJson();
+	  }
+
+	  /**
+	   * Match the given list of <tt>log</tt> with the named regex
+	   * and return the list of json representation of the matched elements.
+	   *
+	   * @param logs : list of log
+	   * @return list of json representation of the log
+	   */
+	  public List<String> captures(List<String> logs){
+	    List<String> matched = new ArrayList<String>();
+	    for (String log : logs) {
+	    	OpenSOCMatch match = match(log);
+	      match.captures();
+	      matched.add(match.toJson());
+	    }
+	    return matched;
+	  }
+
+	  /**
+	   * Match the given <tt>text</tt> with the named regex
+	   * {@code Grok} will extract data from the string and get an extence of {@link Match}.
+	   *
+	   * @param text : Single line of log
+	   * @return Grok Match
+	   */
+	  public OpenSOCMatch match(String text) {
+	    if (compiledNamedRegex == null || StringUtils.isBlank(text)) {
+	      return OpenSOCMatch.EMPTY;
+	    }
+
+	    Matcher m = compiledNamedRegex.matcher(text);
+	    OpenSOCMatch match = new OpenSOCMatch();
+	    if (m.find()) {
+	      match.setSubject(text);
+	      match.setGrok(this);
+	      match.setMatch(m);
+	      match.setStart(m.start(0));
+	      match.setEnd(m.end(0));
+	    }
+	    return match;
+	  }
+
+	  /**
+	   * Compile the {@code Grok} pattern to named regex pattern.
+	   *
+	   * @param pattern : Grok pattern (ex: %{IP})
+	   * @throws Exception
+	   */
+	  public void compile(String pattern) throws Exception {
+
+	    if (StringUtils.isBlank(pattern)) {
+	      throw new Exception("{pattern} should not be empty or null");
+	    }
+
+	    namedRegex = pattern;
+	    originalGrokPattern = pattern;
+	    int index = 0;
+	    /** flag for infinite recurtion */
+	    int iterationLeft = 1000;
+	    Boolean continueIteration = true;
+
+	    // Replace %{foo} with the regex (mostly groupname regex)
+	    // and then compile the regex
+	    while (continueIteration) {
+	      continueIteration = false;
+	      if (iterationLeft <= 0) {
+	        throw new Exception("Deep recursion pattern compilation of " + originalGrokPattern);
+	      }
+	      iterationLeft--;
+
+	      Matcher m = GrokUtils.GROK_PATTERN.matcher(namedRegex);
+	      // Match %{Foo:bar} -> pattern name and subname
+	      // Match %{Foo=regex} -> add new regex definition
+	      if (m.find()) {
+	        continueIteration = true;
+	        Map<String, String> group = m.namedGroups();
+	        if (group.get("definition") != null) {
+	          try {
+	            addPattern(group.get("pattern"), group.get("definition"));
+	            group.put("name", group.get("name") + "=" + group.get("definition"));
+	          } catch (Exception e) {
+	            // Log the exeception
+	          }
+	        }
+	        namedRegexCollection.put("name" + index,
+	            (group.get("subname") != null ? group.get("subname") : group.get("name")));
+	        namedRegex =
+	            StringUtils.replace(namedRegex, "%{" + group.get("name") + "}", "(?<name" + index + ">"
+	                + grokPatternDefinition.get(group.get("pattern")) + ")");
+	        // System.out.println(_expanded_pattern);
+	        index++;
+	      }
+	    }
+
+	    if (namedRegex.isEmpty()) {
+	      throw new Exception("Pattern not fount");
+	    }
+	    // Compile the regex
+	    compiledNamedRegex = Pattern.compile(namedRegex);
+	  }
+
+	 	  /**
+	   * Original grok pattern used to compile to the named regex.
+	   *
+	   * @return String Original Grok pattern
+	   */
+	  public String getOriginalGrokPattern(){
+	    return originalGrokPattern;
+	  }
+
+	  /**
+	   * Get the named regex from the given id.
+	   *
+	   * @param id : named regex id
+	   * @return String of the named regex
+	   */
+	  public String getNamedRegexCollectionById(String id) {
+	    return namedRegexCollection.get(id);
+	  }
+
+	  /**
+	   * Get the full collection of the named regex.
+	   *
+	   * @return named RegexCollection
+	   */
+	  public Map<String, String> getNamedRegexCollection() {
+	    return namedRegexCollection;
+	  }
+	}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/OpenSOCMatch.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/OpenSOCMatch.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/OpenSOCMatch.java
new file mode 100644
index 0000000..bd4f0ad
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/OpenSOCMatch.java
@@ -0,0 +1,280 @@
+package com.opensoc.parsing.parsers;
+
+import java.io.Serializable;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.TreeMap;
+
+import com.google.code.regexp.Matcher;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+
+public class OpenSOCMatch implements Serializable {
+
+	private static final long serialVersionUID = -1129245286587945311L;
+	private String subject; // texte
+	  private Map<String, Object> capture;
+	  private OpenSOCGarbage garbage;
+	  private OpenSOCGrok grok;
+	  private Matcher match;
+	  private int start;
+	  private int end;
+
+	  /**
+	   * For thread safety
+	   */
+	  private static ThreadLocal<OpenSOCMatch> matchHolder = new ThreadLocal<OpenSOCMatch>() {
+		  @Override
+		  protected OpenSOCMatch initialValue() {
+			  return new OpenSOCMatch();
+		  }
+	  };
+
+	  /**
+	   *Create a new {@code Match} object.
+	   */
+	  public OpenSOCMatch() {
+	    subject = "Nothing";
+	    grok = null;
+	    match = null;
+	    capture = new TreeMap<String, Object>();
+	    garbage = new OpenSOCGarbage();
+	    start = 0;
+	    end = 0;
+	  }
+
+	  /**
+	   * Create Empty grok matcher
+	   */
+	  public static final OpenSOCMatch EMPTY = new OpenSOCMatch();
+
+	  public void setGrok(OpenSOCGrok grok){
+	    if (grok != null) {
+	      this.grok = grok;
+	    }
+	  }
+
+	  public Matcher getMatch() {
+	    return match;
+	  }
+
+	  public void setMatch(Matcher match) {
+	    this.match = match;
+	  }
+
+	  public int getStart() {
+	    return start;
+	  }
+
+	  public void setStart(int start) {
+	    this.start = start;
+	  }
+
+	  public int getEnd() {
+	    return end;
+	  }
+
+	  public void setEnd(int end) {
+	    this.end = end;
+	  }
+
+	  /**
+	   * Singleton.
+	   *
+	   * @return instance of Match
+	   */
+	  public static OpenSOCMatch getInstance() {
+		 return matchHolder.get();
+	  }
+
+	  /**
+	   *  Set the single line of log to parse.
+	   *
+	   * @param text : single line of log
+	   */
+	  public void setSubject(String text) {
+	    if (text == null) {
+	      return;
+	    }
+	    if (text.isEmpty()) {
+	      return;
+	    }
+	    subject = text;
+	  }
+
+	  /**
+	   * Retrurn the single line of log.
+	   *
+	   * @return the single line of log
+	   */
+	  public String getSubject() {
+	    return subject;
+	  }
+
+	  /**
+	   * Match to the <tt>subject</tt> the <tt>regex</tt> and save the matched element into a map.
+	   *
+	   */
+	  public void captures() {
+	    if (match == null) {
+	      return;
+	    }
+	    capture.clear();
+
+	    // _capture.put("LINE", this.line);
+	    // _capture.put("LENGTH", this.line.length() +"");
+
+	    Map<String, String> mappedw = this.match.namedGroups();
+	    Iterator<Entry<String, String>> it = mappedw.entrySet().iterator();
+	    while (it.hasNext()) {
+
+	      @SuppressWarnings("rawtypes")
+	      Map.Entry pairs = (Map.Entry) it.next();
+	      String key = null;
+	      Object value = null;
+	      if (this.grok.getNamedRegexCollectionById(pairs.getKey().toString()) == null) {
+	        key = pairs.getKey().toString();
+	      } else if (!this.grok.getNamedRegexCollectionById(pairs.getKey().toString()).isEmpty()) {
+	        key = this.grok.getNamedRegexCollectionById(pairs.getKey().toString());
+	      }
+	      if (pairs.getValue() != null) {
+	        value = pairs.getValue().toString();
+	        
+	        KeyValue keyValue = OpenSOCConverter.convert(key, value);
+	        
+	        //get validated key
+	        key = keyValue.getKey();
+	        
+	        //resolve value
+	        if (keyValue.getValue() instanceof String) {
+	        	 value = cleanString((String)keyValue.getValue());
+	        } else {
+	        	value = keyValue.getValue();
+	        }
+	        
+	        //set if grok failure
+	        if (keyValue.hasGrokFailure()) {
+	        	capture.put(key + "_grokfailure", keyValue.getGrokFailure());
+	        }
+	      }
+
+	      capture.put(key, value);
+	      it.remove(); // avoids a ConcurrentModificationException
+	    }
+	  }
+
+
+	  /**
+	   * remove from the string the quote and double quote.
+	   *
+	   * @param string to pure: "my/text"
+	   * @return unquoted string: my/text
+	   */
+	  private String cleanString(String value) {
+	    if (value == null) {
+	      return value;
+	    }
+	    if (value.isEmpty()) {
+	      return value;
+	    }
+	    char[] tmp = value.toCharArray();
+	    if ((tmp[0] == '"' && tmp[value.length() - 1] == '"')
+	        || (tmp[0] == '\'' && tmp[value.length() - 1] == '\'')) {
+	      value = value.substring(1, value.length() - 1);
+	    }
+	    return value;
+	  }
+
+
+	  /**
+	   * Get the json representation of the matched element.
+	   * <p>
+	   * example:
+	   * map [ {IP: 127.0.0.1}, {status:200}]
+	   * will return
+	   * {"IP":"127.0.0.1", "status":200}
+	   * </p>
+	   * If pretty is set to true, json will return prettyprint json string.
+	   *
+	   * @return Json of the matched element in the text
+	   */
+	  public String toJson(Boolean pretty) {
+	    if (capture == null) {
+	      return "{}";
+	    }
+	    if (capture.isEmpty()) {
+	      return "{}";
+	    }
+
+	    this.cleanMap();
+	    Gson gs;
+	    if (pretty) {
+	     gs = new GsonBuilder().setPrettyPrinting().create();
+	    } else {
+	      gs = new Gson();
+	    }
+	    return gs.toJson(/* cleanMap( */capture/* ) */);
+	  }
+
+	  /**
+	   * Get the json representation of the matched element.
+	   * <p>
+	   * example:
+	   * map [ {IP: 127.0.0.1}, {status:200}]
+	   * will return
+	   * {"IP":"127.0.0.1", "status":200}
+	   * </p>
+	   *
+	   * @return Json of the matched element in the text
+	   */
+	  public String toJson() {
+	    return toJson(false);
+	  }
+
+	  /**
+	   * Get the map representation of the matched element in the text.
+	   *
+	   * @return map object from the matched element in the text
+	   */
+	  public Map<String, Object> toMap() {
+	    this.cleanMap();
+	    return capture;
+	  }
+
+	  /**
+	   * Remove and rename the unwanted elelents in the matched map.
+	   */
+	  private void cleanMap() {
+	    garbage.rename(capture);
+	    garbage.remove(capture);
+	  }
+
+	  /**
+	   * Util fct.
+	   *
+	   * @return boolean
+	   */
+	  public Boolean isNull() {
+	    if (this.match == null) {
+	      return true;
+	    }
+	    return false;
+	  }
+
+	  /**
+	   * Util fct.
+	   *
+	   * @param s
+	   * @return boolean
+	   */
+	  private boolean isInteger(String s) {
+	    try {
+	      Integer.parseInt(s);
+	    } catch (NumberFormatException e) {
+	      return false;
+	    }
+	    return true;
+	  }
+	  
+	}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/ParserUtils.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/ParserUtils.java b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/ParserUtils.java
new file mode 100644
index 0000000..b986cae
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/java/com/opensoc/parsing/parsers/ParserUtils.java
@@ -0,0 +1,23 @@
+package com.opensoc.parsing.parsers;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.commons.io.IOUtils;
+
+public class ParserUtils {
+	
+	public static final String PREFIX = "stream2file";
+	public static final String SUFFIX = ".tmp";
+
+	public static File stream2file(InputStream in) throws IOException {
+		final File tempFile = File.createTempFile(PREFIX, SUFFIX);
+		tempFile.deleteOnExit();
+		try (FileOutputStream out = new FileOutputStream(tempFile)) {
+			IOUtils.copy(in, out);
+		}
+		return tempFile;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/resources/patterns/asa
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/resources/patterns/asa b/opensoc-streaming/OpenSOC-MessageParsers/src/main/resources/patterns/asa
new file mode 100644
index 0000000..8c2da93
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/resources/patterns/asa
@@ -0,0 +1,176 @@
+# Forked from https://github.com/elasticsearch/logstash/tree/v1.4.0/patterns
+
+USERNAME [a-zA-Z0-9._-]+
+USER %{USERNAME:UNWANTED}
+INT (?:[+-]?(?:[0-9]+))
+BASE10NUM (?<![0-9.+-])(?>[+-]?(?:(?:[0-9]+(?:\.[0-9]+)?)|(?:\.[0-9]+)))
+NUMBER (?:%{BASE10NUM:UNWANTED})
+BASE16NUM (?<![0-9A-Fa-f])(?:[+-]?(?:0x)?(?:[0-9A-Fa-f]+))
+BASE16FLOAT \b(?<![0-9A-Fa-f.])(?:[+-]?(?:0x)?(?:(?:[0-9A-Fa-f]+(?:\.[0-9A-Fa-f]*)?)|(?:\.[0-9A-Fa-f]+)))\b
+
+POSINT \b(?:[1-9][0-9]*)\b
+NONNEGINT \b(?:[0-9]+)\b
+WORD \b\w+\b
+NOTSPACE \S+
+SPACE \s*
+DATA .*?
+GREEDYDATA .*
+#QUOTEDSTRING (?:(?<!\\)(?:"(?:\\.|[^\\"])*"|(?:'(?:\\.|[^\\'])*')|(?:`(?:\\.|[^\\`])*`)))
+QUOTEDSTRING (?>(?<!\\)(?>"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``))
+UUID [A-Fa-f0-9]{8}-(?:[A-Fa-f0-9]{4}-){3}[A-Fa-f0-9]{12}
+
+# Networking
+MAC (?:%{CISCOMAC:UNWANTED}|%{WINDOWSMAC:UNWANTED}|%{COMMONMAC:UNWANTED})
+CISCOMAC (?:(?:[A-Fa-f0-9]{4}\.){2}[A-Fa-f0-9]{4})
+WINDOWSMAC (?:(?:[A-Fa-f0-9]{2}-){5}[A-Fa-f0-9]{2})
+COMMONMAC (?:(?:[A-Fa-f0-9]{2}:){5}[A-Fa-f0-9]{2})
+IPV6 ((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5
 ]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?
+IPV4 (?<![0-9])(?:(?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2}))(?![0-9])
+IP (?:%{IPV6:UNWANTED}|%{IPV4:UNWANTED})
+HOSTNAME \b(?:[0-9A-Za-z][0-9A-Za-z-]{0,62})(?:\.(?:[0-9A-Za-z][0-9A-Za-z-]{0,62}))*(\.?|\b)
+HOST %{HOSTNAME:UNWANTED}
+IPORHOST (?:%{HOSTNAME:UNWANTED}|%{IP:UNWANTED})
+HOSTPORT (?:%{IPORHOST}:%{POSINT:PORT})
+
+# paths
+PATH (?:%{UNIXPATH}|%{WINPATH})
+UNIXPATH (?>/(?>[\w_%!$@:.,~-]+|\\.)*)+
+#UNIXPATH (?<![\w\/])(?:/[^\/\s?*]*)+
+TTY (?:/dev/(pts|tty([pq])?)(\w+)?/?(?:[0-9]+))
+WINPATH (?>[A-Za-z]+:|\\)(?:\\[^\\?*]*)+
+URIPROTO [A-Za-z]+(\+[A-Za-z+]+)?
+URIHOST %{IPORHOST}(?::%{POSINT:port})?
+# uripath comes loosely from RFC1738, but mostly from what Firefox
+# doesn't turn into %XX
+URIPATH (?:/[A-Za-z0-9$.+!*'(){},~:;=@#%_\-]*)+
+#URIPARAM \?(?:[A-Za-z0-9]+(?:=(?:[^&]*))?(?:&(?:[A-Za-z0-9]+(?:=(?:[^&]*))?)?)*)?
+URIPARAM \?[A-Za-z0-9$.+!*'|(){},~@#%&/=:;_?\-\[\]]*
+URIPATHPARAM %{URIPATH}(?:%{URIPARAM})?
+URI %{URIPROTO}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST})?(?:%{URIPATHPARAM})?
+
+# Months: January, Feb, 3, 03, 12, December
+MONTH \b(?:Jan(?:uary)?|Feb(?:ruary)?|Mar(?:ch)?|Apr(?:il)?|May|Jun(?:e)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|Oct(?:ober)?|Nov(?:ember)?|Dec(?:ember)?)\b
+MONTHNUM (?:0?[1-9]|1[0-2])
+MONTHNUM2 (?:0[1-9]|1[0-2])
+MONTHDAY (?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9])
+
+# Days: Monday, Tue, Thu, etc...
+DAY (?:Mon(?:day)?|Tue(?:sday)?|Wed(?:nesday)?|Thu(?:rsday)?|Fri(?:day)?|Sat(?:urday)?|Sun(?:day)?)
+
+# Years?
+YEAR (?>\d\d){1,2}
+# Time: HH:MM:SS
+#TIME \d{2}:\d{2}(?::\d{2}(?:\.\d+)?)?
+# I'm still on the fence about using grok to perform the time match,
+# since it's probably slower.
+# TIME %{POSINT<24}:%{POSINT<60}(?::%{POSINT<60}(?:\.%{POSINT})?)?
+HOUR (?:2[0123]|[01]?[0-9])
+MINUTE (?:[0-5][0-9])
+# '60' is a leap second in most time standards and thus is valid.
+SECOND (?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?)
+TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9])
+# datestamp is YYYY/MM/DD-HH:MM:SS.UUUU (or something like it)
+DATE_US %{MONTHNUM}[/-]%{MONTHDAY}[/-]%{YEAR}
+DATE_EU %{MONTHDAY}[./-]%{MONTHNUM}[./-]%{YEAR}
+ISO8601_TIMEZONE (?:Z|[+-]%{HOUR}(?::?%{MINUTE}))
+ISO8601_SECOND (?:%{SECOND}|60)
+TIMESTAMP_ISO8601 %{YEAR}-%{MONTHNUM}-%{MONTHDAY}[T ]%{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}?
+DATE %{DATE_US}|%{DATE_EU}
+DATESTAMP %{DATE}[- ]%{TIME}
+TZ (?:[PMCE][SD]T|UTC)
+DATESTAMP_RFC822 %{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{TIME} %{TZ}
+DATESTAMP_RFC2822 %{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{TIME} %{ISO8601_TIMEZONE}
+DATESTAMP_OTHER %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{TZ} %{YEAR}
+DATESTAMP_EVENTLOG %{YEAR}%{MONTHNUM2}%{MONTHDAY}%{HOUR}%{MINUTE}%{SECOND}
+GREEDYDATA .*
+
+# Syslog Dates: Month Day HH:MM:SS
+SYSLOGTIMESTAMP %{MONTH} +%{MONTHDAY} %{TIME}
+PROG (?:[\w._/%-]+)
+SYSLOGPROG %{PROG:program}(?:\[%{POSINT:pid}\])?
+SYSLOGHOST %{IPORHOST}
+SYSLOGFACILITY <%{NONNEGINT:facility}.%{NONNEGINT:priority}>
+HTTPDATE %{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME} %{INT}
+
+# Shortcuts
+QS %{QUOTEDSTRING:UNWANTED}
+
+# Log formats
+SYSLOGBASE %{SYSLOGTIMESTAMP:timestamp} (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource} %{SYSLOGPROG}:
+
+MESSAGESLOG %{SYSLOGBASE} %{DATA}
+
+COMMONAPACHELOG %{IPORHOST:clientip} %{USER:ident} %{USER:auth} \[%{HTTPDATE:timestamp}\] "(?:%{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})" %{NUMBER:response} (?:%{NUMBER:bytes}|-)
+COMBINEDAPACHELOG %{COMMONAPACHELOG} %{QS:referrer} %{QS:agent}
+
+# Log Levels
+LOGLEVEL ([A|a]lert|ALERT|[T|t]race|TRACE|[D|d]ebug|DEBUG|[N|n]otice|NOTICE|[I|i]nfo|INFO|[W|w]arn?(?:ing)?|WARN?(?:ING)?|[E|e]rr?(?:or)?|ERR?(?:OR)?|[C|c]rit?(?:ical)?|CRIT?(?:ICAL)?|[F|f]atal|FATAL|[S|s]evere|SEVERE|EMERG(?:ENCY)?|[Ee]merg(?:ency)?)
+
+#== Cisco ASA ==
+CISCO_TAGGED_SYSLOG ^<%{POSINT:syslog_pri}>%{CISCOTIMESTAMP:timestamp}( %{SYSLOGHOST:sysloghost})? ?:? %%{CISCOTAG:ciscotag}:
+CISCOTIMESTAMP %{MONTH} +%{MONTHDAY}(?: %{YEAR})? %{TIME}
+CISCOTAG [A-Z0-9]+-%{INT}-(?:[A-Z0-9_]+)
+
+# Common Particles
+CISCO_ACTION Built|Teardown|Deny|Denied|denied|requested|permitted|denied by ACL|discarded|est-allowed|Dropping|created|deleted
+CISCO_REASON Duplicate TCP SYN|Failed to locate egress interface|Invalid transport field|No matching connection|DNS Response|DNS Query|(?:%{WORD}\s*)*
+CISCO_DIRECTION Inbound|inbound|Outbound|outbound
+CISCO_INTERVAL first hit|%{INT}-second interval
+CISCO_XLATE_TYPE static|dynamic
+# ASA-2-106001
+CISCOFW106001 : %{CISCO_DIRECTION:direction} %{WORD:protocol} connection %{CISCO_ACTION:action} from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} flags %{GREEDYDATA:tcp_flags} on interface %{GREEDYDATA:interface}
+# ASA-2-106006, ASA-2-106007, ASA-2-106010
+CISCOFW106006_106007_106010 : %{CISCO_ACTION:action} %{CISCO_DIRECTION:direction} %{WORD:protocol} (?:from|src) %{IP:src_ip}/%{INT:src_port}(\(%{DATA:src_fwuser}\))? (?:to|dst) %{IP:dst_ip}/%{INT:dst_port}(\(%{DATA:dst_fwuser}\))? (?:on interface %{DATA:interface}|due to %{CISCO_REASON:reason})
+# ASA-3-106014
+CISCOFW106014 : %{CISCO_ACTION:action} %{CISCO_DIRECTION:direction} %{WORD:protocol} src %{DATA:src_interface}:%{IP:src_ip}(\(%{DATA:src_fwuser}\))? dst %{DATA:dst_interface}:%{IP:dst_ip}(\(%{DATA:dst_fwuser}\))? \(type %{INT:icmp_type}, code %{INT:icmp_code}\)
+# ASA-6-106015
+CISCOFW106015 : %{CISCO_ACTION:action} %{WORD:protocol} \(%{DATA:policy_id}\) from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} flags %{DATA:tcp_flags}  on interface %{GREEDYDATA:interface}
+# ASA-1-106021
+CISCOFW106021 : %{CISCO_ACTION:action} %{WORD:protocol} reverse path check from %{IP:src_ip} to %{IP:dst_ip} on interface %{GREEDYDATA:interface}
+# ASA-4-106023
+CISCOFW106023 : %{CISCO_ACTION:action} %{WORD:protocol} src %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? dst %{DATA:dst_interface}:%{IP:dst_ip}(/%{INT:dst_port})?(\(%{DATA:dst_fwuser}\))?( \(type %{INT:icmp_type}, code %{INT:icmp_code}\))? by access-group %{DATA:policy_id} \[%{DATA:hashcode1}, %{DATA:hashcode2}\]
+# ASA-5-106100
+CISCOFW106100 : access-list %{WORD:policy_id} %{CISCO_ACTION:action} %{WORD:protocol} %{DATA:src_interface}/%{IP:src_ip}\(%{INT:src_port}\)(\(%{DATA:src_fwuser}\))? -> %{DATA:dst_interface}/%{IP:dst_ip}\(%{INT:dst_port}\)(\(%{DATA:src_fwuser}\))? hit-cnt %{INT:hit_count} %{CISCO_INTERVAL:interval} \[%{DATA:hashcode1}, %{DATA:hashcode2}\]
+# ASA-6-110002
+CISCOFW110002 : %{CISCO_REASON:reason} for %{WORD:protocol} from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port}
+# ASA-6-302010
+CISCOFW302010 : %{INT:connection_count} in use, %{INT:connection_count_max} most used
+# ASA-6-302013, ASA-6-302014, ASA-6-302015, ASA-6-302016
+CISCOFW302013_302014_302015_302016 : %{CISCO_ACTION:action}(?: %{CISCO_DIRECTION:direction})? %{WORD:protocol} connection %{INT:connection_id} for %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port}( \(%{IP:src_mapped_ip}/%{INT:src_mapped_port}\))?(\(%{DATA:src_fwuser}\))? to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}( \(%{IP:dst_mapped_ip}/%{INT:dst_mapped_port}\))?(\(%{DATA:dst_fwuser}\))?( duration %{TIME:duration} bytes %{INT:bytes})?(?: %{CISCO_REASON:reason})?( \(%{DATA:user}\))?
+# ASA-6-302020, ASA-6-302021
+CISCOFW302020_302021 : %{CISCO_ACTION:action}(?: %{CISCO_DIRECTION:direction})? %{WORD:protocol} connection for faddr %{IP:dst_ip}/%{INT:icmp_seq_num}(?:\(%{DATA:fwuser}\))? gaddr %{IP:src_xlated_ip}/%{INT:icmp_code_xlated} laddr %{IP:src_ip}/%{INT:icmp_code}( \(%{DATA:user}\))?
+# ASA-6-305011
+CISCOFW305011 : %{CISCO_ACTION:action} %{CISCO_XLATE_TYPE:xlate_type} %{WORD:protocol} translation from %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? to %{DATA:src_xlated_interface}:%{IP:src_xlated_ip}/%{DATA:src_xlated_port}
+# ASA-3-313001, ASA-3-313004, ASA-3-313008
+CISCOFW313001_313004_313008 : %{CISCO_ACTION:action} %{WORD:protocol} type=%{INT:icmp_type}, code=%{INT:icmp_code} from %{IP:src_ip} on interface %{DATA:interface}( to %{IP:dst_ip})?
+# ASA-4-313005
+CISCOFW313005 : %{CISCO_REASON:reason} for %{WORD:protocol} error message: %{WORD:err_protocol} src %{DATA:err_src_interface}:%{IP:err_src_ip}(\(%{DATA:err_src_fwuser}\))? dst %{DATA:err_dst_interface}:%{IP:err_dst_ip}(\(%{DATA:err_dst_fwuser}\))? \(type %{INT:err_icmp_type}, code %{INT:err_icmp_code}\) on %{DATA:interface} interface\.  Original IP payload: %{WORD:protocol} src %{IP:orig_src_ip}/%{INT:orig_src_port}(\(%{DATA:orig_src_fwuser}\))? dst %{IP:orig_dst_ip}/%{INT:orig_dst_port}(\(%{DATA:orig_dst_fwuser}\))?
+# ASA-4-402117
+CISCOFW402117 : %{WORD:protocol}: Received a non-IPSec packet \(protocol= %{WORD:orig_protocol}\) from %{IP:src_ip} to %{IP:dst_ip}
+# ASA-4-402119
+CISCOFW402119 : %{WORD:protocol}: Received an %{WORD:orig_protocol} packet \(SPI= %{DATA:spi}, sequence number= %{DATA:seq_num}\) from %{IP:src_ip} \(user= %{DATA:user}\) to %{IP:dst_ip} that failed anti-replay checking
+# ASA-4-419001
+CISCOFW419001 : %{CISCO_ACTION:action} %{WORD:protocol} packet from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}, reason: %{GREEDYDATA:reason}
+# ASA-4-419002
+CISCOFW419002 : %{CISCO_REASON:reason} from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port} with different initial sequence number
+# ASA-4-500004
+CISCOFW500004 : %{CISCO_REASON:reason} for protocol=%{WORD:protocol}, from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port}
+# ASA-6-602303, ASA-6-602304
+CISCOFW602303_602304 : %{WORD:protocol}: An %{CISCO_DIRECTION:direction} %{GREEDYDATA:tunnel_type} SA \(SPI= %{DATA:spi}\) between %{IP:src_ip} and %{IP:dst_ip} \(user= %{DATA:user}\) has been %{CISCO_ACTION:action}
+# ASA-7-710001, ASA-7-710002, ASA-7-710003, ASA-7-710005, ASA-7-710006
+CISCOFW710001_710002_710003_710005_710006 : %{WORD:protocol} (?:request|access) %{CISCO_ACTION:action} from %{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}
+# ASA-6-713172
+CISCOFW713172 : Group = %{GREEDYDATA:group}, IP = %{IP:src_ip}, Automatic NAT Detection Status:\s+Remote end\s*%{DATA:is_remote_natted}\s*behind a NAT device\s+This\s+end\s*%{DATA:is_local_natted}\s*behind a NAT device
+# ASA-4-733100
+CISCOFW733100 : \[\s*%{DATA:drop_type}\s*\] drop %{DATA:drop_rate_id} exceeded. Current burst rate is %{INT:drop_rate_current_burst} per second, max configured rate is %{INT:drop_rate_max_burst}; Current average rate is %{INT:drop_rate_current_avg} per second, max configured rate is %{INT:drop_rate_max_avg}; Cumulative total count is %{INT:drop_total_count}
+
+
+# ASA-6-305012
+CISCOFW305012 : %{CISCO_ACTION:action} %{CISCO_XLATE_TYPE:xlate_type} %{WORD:protocol} translation from %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? to %{DATA:src_xlated_interface}:%{IP:src_xlated_ip}/%{DATA:src_xlated_port} duration %{TIME:duration}
+# ASA-7-609001
+CISCOFW609001 : %{CISCO_ACTION:action} %{WORD:protocol} %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))?
+# ASA-7-609002
+CISCOFW609002 : %{CISCO_ACTION:action} %{WORD:protocol} %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? duration %{TIME:duration}
+
+
+#== End Cisco ASA ==
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/resources/patterns/fireeye
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/resources/patterns/fireeye b/opensoc-streaming/OpenSOC-MessageParsers/src/main/resources/patterns/fireeye
new file mode 100644
index 0000000..5dc99bf
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/resources/patterns/fireeye
@@ -0,0 +1,9 @@
+GREEDYDATA .*
+POSINT \b(?:[1-9][0-9]*)\b
+UID [0-9.]+
+DATA .*?
+
+FIREEYE_BASE ^<%{POSINT:syslog_pri}>fenotify-%{UID:uid}.alert: %{GREEDYDATA:syslog}
+FIREEYE_MAIN <%{POSINT:syslog_pri}>fenotify-%{DATA:uid}.alert: %{DATA:meta}\|%{DATA:meta}\|%{DATA:meta}\|%{DATA:meta}\|%{DATA:meta}\|%{DATA:meta}\|%{DATA:meta}\|%{GREEDYDATA:fedata}
+#\|(.?)\|(.?)\|(.?)\|(.?)\|%{DATA:type}\|(.?)\|%{GREEDYDATA:fedata}
+FIREEYE_SUB ^<%{POSINT:syslog_pri}>fenotify-%{UID:uid}.alert: .?*\|.?*\|.?*\|.?*\|.?*\|%{DATA:type}\|.?*\|%{GREEDYDATA:fedata}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-MessageParsers/src/main/resources/patterns/sourcefire
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-MessageParsers/src/main/resources/patterns/sourcefire b/opensoc-streaming/OpenSOC-MessageParsers/src/main/resources/patterns/sourcefire
new file mode 100644
index 0000000..672f684
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-MessageParsers/src/main/resources/patterns/sourcefire
@@ -0,0 +1,30 @@
+POSINT \b(?:[1-9][0-9]*)\b
+NONNEGINT \b(?:[0-9]+)\b
+WORD \b\w+\b
+NOTSPACE \S+
+SPACE \s*
+DATA .*?
+GREEDYDATA .*
+QUOTEDSTRING (?>(?<!\\)(?>"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``))
+UUID [A-Fa-f0-9]{8}-(?:[A-Fa-f0-9]{4}-){3}[A-Fa-f0-9]{12}
+
+# Networking
+MAC (?:%{CISCOMAC}|%{WINDOWSMAC}|%{COMMONMAC})
+CISCOMAC (?:(?:[A-Fa-f0-9]{4}\.){2}[A-Fa-f0-9]{4})
+WINDOWSMAC (?:(?:[A-Fa-f0-9]{2}-){5}[A-Fa-f0-9]{2})
+COMMONMAC (?:(?:[A-Fa-f0-9]{2}:){5}[A-Fa-f0-9]{2})
+IPV6 ((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5
 ]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?
+IPV4 (?<![0-9])(?:(?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2}))(?![0-9])
+IP (?:%{IPV6}|%{IPV4})
+HOSTNAME \b(?:[0-9A-Za-z][0-9A-Za-z-]{0,62})(?:\.(?:[0-9A-Za-z][0-9A-Za-z-]{0,62}))*(\.?|\b)
+HOST %{HOSTNAME}
+IPORHOST (?:%{HOSTNAME}|%{IP})
+HOSTPORT %{IPORHOST}:%{POSINT}
+
+#Sourcefire Logs
+protocol \{[a-zA-Z0-9]+\}
+ip_src_addr (?<![0-9])(?:(?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2}))(?![0-9])
+ip_dst_addr (?<![0-9])(?:(?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2}))(?![0-9])
+ip_src_port [0-9]+
+ip_dst_port [0-9]+
+SOURCEFIRE %{GREEDYDATA}%{protocol}\s%{ip_src_addr}\:%{ip_src_port}\s->\s%{ip_dst_addr}\:%{ip_dst_port}
\ No newline at end of file


[24/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Alerts/src/main/resources/hbase-site.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Alerts/src/main/resources/hbase-site.xml b/opensoc-streaming/OpenSOC-Alerts/src/main/resources/hbase-site.xml
index dc7cba5..8d812a9 100644
--- a/opensoc-streaming/OpenSOC-Alerts/src/main/resources/hbase-site.xml
+++ b/opensoc-streaming/OpenSOC-Alerts/src/main/resources/hbase-site.xml
@@ -1,90 +1,131 @@
-<!--Tue Feb 11 02:34:08 2014 -->
-<configuration>
-
-	<property>
-		<name>hbase.regionserver.global.memstore.lowerLimit</name>
-		<value>0.38</value>
-	</property>
-	<property>
-		<name>zookeeper.session.timeout</name>
-		<value>20</value>
-	</property>
-
-	<property>
-		<name>hbase.security.authorization</name>
-		<value>false</value>
-	</property>
-	<property>
-		<name>hbase.cluster.distributed</name>
-		<value>true</value>
-	</property>
-	
-	<property>
-		<name>hbase.hstore.flush.retries.number</name>
-		<value>120</value>
-	</property>
-	<property>
-		<name>hbase.hregion.memstore.block.multiplier</name>
-		<value>4</value>
-	</property>
-	<property>
-		<name>hbase.hstore.blockingStoreFiles</name>
-		<value>200</value>
-	</property>
-	<property>
-		<name>hbase.defaults.for.version.skip</name>
-		<value>true</value>
-	</property>
-	<property>
-		<name>hbase.regionserver.global.memstore.upperLimit</name>
-		<value>0.4</value>
-	</property>
-	<property>
-		<name>hbase.hregion.memstore.mslab.enabled</name>
-		<value>true</value>
-	</property>
-	<property>
-		<name>hbase.client.keyvalue.maxsize</name>
-		<value>10485760</value>
-	</property>
-	<property>
-		<name>hbase.superuser</name>
-		<value>hbase</value>
-	</property>
-	<property>
-		<name>hfile.block.cache.size</name>
-		<value>0.40</value>
-	</property>
-	<property>
-		<name>zookeeper.znode.parent</name>
-		<value>/hbase-unsecure</value>
-	</property>
-	<property>
-		<name>hbase.hregion.max.filesize</name>
-		<value>10737418240</value>
-	</property>
-	<property>
-		<name>hbase.zookeeper.property.clientPort</name>
-		<value>2181</value>
-	</property>
-	<property>
-		<name>hbase.security.authentication</name>
-		<value>simple</value>
-	</property>
-	<property>
-		<name>hbase.client.scanner.caching</name>
-		<value>100</value>
-	</property>
-	<property>
-		<name>hbase.hregion.memstore.flush.size</name>
-		<value>134217728</value>
-	</property>
-	<property>
-		<name>hbase.hregion.majorcompaction</name>
-		<value>86400000</value>
-	</property>
-	<property>
-		<name>hbase.client.write.buffer</name>
-		<value>500000000</value>
-	</property>
-</configuration>
\ No newline at end of file
+<!--Tue Apr  1 18:16:39 2014-->
+  <configuration>
+    <property>
+    <name>hbase.tmp.dir</name>
+    <value>/disk/h/hbase</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.chunkpool.maxsize</name>
+    <value>0.5</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.codecs</name>
+    <value>lzo,gz,snappy</value>
+  </property>
+    <property>
+    <name>hbase.hstore.flush.retries.number</name>
+    <value>120</value>
+  </property>
+    <property>
+    <name>hbase.client.keyvalue.maxsize</name>
+    <value>10485760</value>
+  </property>
+    <property>
+    <name>hbase.rootdir</name>
+    <value>hdfs://nn1:8020/apps/hbase/data</value>
+  </property>
+    <property>
+    <name>hbase.defaults.for.version.skip</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.client.scanner.caching</name>
+    <value>100</value>
+  </property>
+    <property>
+    <name>hbase.superuser</name>
+    <value>hbase</value>
+  </property>
+    <property>
+    <name>hfile.block.cache.size</name>
+    <value>0.40</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.checksum.verify</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.mslab.enabled</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.max.filesize</name>
+    <value>107374182400</value>
+  </property>
+    <property>
+    <name>hbase.cluster.distributed</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>zookeeper.session.timeout</name>
+    <value>30000</value>
+  </property>
+    <property>
+    <name>zookeeper.znode.parent</name>
+    <value>/hbase-unsecure</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.global.memstore.lowerLimit</name>
+    <value>0.38</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.handler.count</name>
+    <value>240</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.mslab.chunksize</name>
+    <value>8388608</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.quorum</name>
+    <value>zkpr1,zkpr2,zkpr3</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.useMulti</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.majorcompaction</name>
+    <value>86400000</value>
+  </property>
+    <property>
+    <name>hbase.hstore.blockingStoreFiles</name>
+    <value>200</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.property.clientPort</name>
+    <value>2181</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.flush.size</name>
+    <value>134217728</value>
+  </property>
+    <property>
+    <name>hbase.security.authorization</name>
+    <value>false</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.global.memstore.upperLimit</name>
+    <value>0.4</value>
+  </property>
+    <property>
+    <name>hbase.hstore.compactionThreshold</name>
+    <value>4</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.block.multiplier</name>
+    <value>8</value>
+  </property>
+    <property>
+    <name>hbase.security.authentication</name>
+    <value>simple</value>
+  </property>
+    <property>
+    <name>dfs.client.read.shortcircuit</name>
+    <value>true</value>
+  </property>
+  <property>
+    <name>dfs.domain.socket.path</name>
+    <value>/var/run/hdfs/dn_socket</value>
+  </property>
+  </configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Alerts/src/test/java/com/opensoc/alerts/adapters/AllAlertAdapterTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Alerts/src/test/java/com/opensoc/alerts/adapters/AllAlertAdapterTest.java b/opensoc-streaming/OpenSOC-Alerts/src/test/java/com/opensoc/alerts/adapters/AllAlertAdapterTest.java
new file mode 100644
index 0000000..65c74c0
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Alerts/src/test/java/com/opensoc/alerts/adapters/AllAlertAdapterTest.java
@@ -0,0 +1,166 @@
+ /*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.opensoc.alerts.adapters;
+
+import java.lang.reflect.Constructor;
+import java.util.Map;
+import java.util.Properties;
+
+import com.opensoc.test.AbstractConfigTest;
+import com.opensoc.alerts.adapters.AllAlertAdapter;
+
+ /**
+ * <ul>
+ * <li>Title: AllAlertAdapterTest</li>
+ * <li>Description: Tests for AllAlertAdapter</li>
+ * <li>Created: Oct 8, 2014</li>
+ * </ul>
+ * @version $Revision: 1.1 $
+ */
+public class AllAlertAdapterTest extends AbstractConfigTest {
+
+     /**
+     * The allAlertAdapter.
+     */
+    private static AllAlertAdapter allAlertAdapter=null;
+    
+     /**
+     * The connected.
+     */
+    private static boolean connected=false;
+
+    /**
+     * Constructs a new <code>AllAlertAdapterTest</code> instance.
+     * @param name
+     */
+    public AllAlertAdapterTest(String name) {
+        super(name);
+    }
+
+    /**
+     * @throws java.lang.Exception
+     */
+    protected static void setUpBeforeClass() throws Exception {
+    }
+
+    /**
+     * @throws java.lang.Exception
+     */
+    protected static void tearDownAfterClass() throws Exception {
+    }
+
+    /* 
+     * (non-Javadoc)
+     * @see junit.framework.TestCase#setUp()
+     */
+
+    @SuppressWarnings("unchecked")
+    protected void setUp() throws Exception {
+          super.setUp("com.opensoc.alerts.adapters.AllAlertAdapter");
+          Properties prop = super.getTestProperties();
+          assertNotNull(prop);   
+       // this.setMode("global");
+        if(skipTests(this.getMode())){
+            System.out.println(getClass().getName()+" Skipping Tests !!Local Mode");
+            return;//skip tests
+       }else{      
+           Map<String, String> settings = super.getSettings();
+           @SuppressWarnings("rawtypes")
+        Class loaded_class = Class.forName("com.opensoc.alerts.adapters.AllAlertAdapter");
+           @SuppressWarnings("rawtypes")
+        Constructor constructor = loaded_class.getConstructor(new Class[] { Map.class});
+           
+           AllAlertAdapterTest.allAlertAdapter = (AllAlertAdapter) constructor.newInstance(settings);
+            // AllAlertAdapterTest.allAlertAdapter = new AllAlertAdapter(settings)
+      }
+    }
+
+    /* 
+     * (non-Javadoc)
+     * @see junit.framework.TestCase#tearDown()
+     */
+
+    protected void tearDown() throws Exception {
+        super.tearDown();
+    }
+
+
+    /**
+     * Test method for {@link com.opensoc.alerts.adapters.AlllterAdapter#initialize()}.
+     */
+    public void testInitializeAdapter() {
+        if(skipTests(this.getMode())){
+            return;//skip tests
+       }else{        
+           
+        boolean initialized =AllAlertAdapterTest.getAllAlertAdapter().initialize();
+        assertTrue(initialized);
+       }
+    }
+    
+    /**
+     * Test method for containsAlertId(@link  com.opensoc.alerts.adapters.AlllterAdapter#containsAlertId()}.
+     */
+    public void testContainsAlertId(){
+        if(skipTests(this.getMode())){
+            return;//skip tests
+       }else{          
+            boolean containsAlert =AllAlertAdapterTest.getAllAlertAdapter().containsAlertId("test");
+            assertFalse(containsAlert);
+       }
+    }
+ 
+   
+
+    /**
+     * Returns the allAlertAdapter.
+     * @return the allAlertAdapter.
+     */
+    
+    public static AllAlertAdapter getAllAlertAdapter() {
+        return allAlertAdapter;
+    }
+
+    /**
+     * Sets the allAlertAdapter.
+     * @param allAlertAdapter the allAlertAdapter.
+     */
+    
+    public static void setAllAlertAdapter(AllAlertAdapter allAlertAdapter) {
+    
+        AllAlertAdapterTest.allAlertAdapter = allAlertAdapter;
+    }
+    /**
+     * Returns the connected.
+     * @return the connected.
+     */
+    
+    public static boolean isConnected() {
+        return connected;
+    }
+
+    /**
+     * Sets the connected.
+     * @param connected the connected.
+     */
+    
+    public static void setConnected(boolean connected) {
+    
+        AllAlertAdapterTest.connected = connected;
+    }    
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Alerts/src/test/resources/AllAlertAdapterTest.properties
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Alerts/src/test/resources/AllAlertAdapterTest.properties b/opensoc-streaming/OpenSOC-Alerts/src/test/resources/AllAlertAdapterTest.properties
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Alerts/src/test/resources/AllAlertAdapterTest.properties
@@ -0,0 +1 @@
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Alerts/src/test/resources/TestSchemas/AllAlertAdapterSchema.json
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Alerts/src/test/resources/TestSchemas/AllAlertAdapterSchema.json b/opensoc-streaming/OpenSOC-Alerts/src/test/resources/TestSchemas/AllAlertAdapterSchema.json
new file mode 100644
index 0000000..c4f2a82
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Alerts/src/test/resources/TestSchemas/AllAlertAdapterSchema.json
@@ -0,0 +1,42 @@
+{
+"title": "GeoMySql Schema",
+"type": "object",
+"properties": {
+
+         "city"    : {
+					   "type": "string"
+				  },
+		 "country" : {
+						"type": "string"
+					},
+		 "dmaCode" :
+		 			 {
+						"type": "string"
+					},
+	     "geoHash" : 
+	     			{
+						"type": "string"
+					},
+		 "latitude" : 
+		 			{
+						"type": "string"
+				   },
+		 "locID" : 
+		 			{
+					   "type": "string"
+				   },
+		 "location_point" : 
+		 			{
+					   "type": "string"
+				    },
+		 "longitude" : 
+		 			{
+						"type": "string"
+					},
+		 "postalCode" : 
+		 			{
+						"type": "string"
+					}
+   },
+   "required": ["city", "country", "dmaCode","latitude","locID","location_point","postalCode"]
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Alerts/src/test/resources/config/AllAlertAdapterTest.config
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Alerts/src/test/resources/config/AllAlertAdapterTest.config b/opensoc-streaming/OpenSOC-Alerts/src/test/resources/config/AllAlertAdapterTest.config
new file mode 100644
index 0000000..f6e5dd1
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Alerts/src/test/resources/config/AllAlertAdapterTest.config
@@ -0,0 +1,8 @@
+#Alerts Bolt
+bolt.alerts.adapter=com.opensoc.alerts.adapters.AllAlertAdapter
+com.opensoc.alerts.adapters.AllAlertAdapter.whitelist_table_name = ip_whitelist
+com.opensoc.alerts.adapters.AllAlertAdapter.blacklist_table_name = ip_blacklist
+com.opensoc.alerts.adapters.AllAlertAdapter.quorum=zkpr1,zkpr2,zkpr3
+com.opensoc.alerts.adapters.AllAlertAdapter.port=2181
+com.opensoc.alerts.adapters.AllAlertAdapter._MAX_CACHE_SIZE_OBJECTS_NUM=3600
+com.opensoc.alerts.adapters.AllAlertAdapter._MAX_TIME_RETAIN_MINUTES=1000
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/.gitignore
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/.gitignore b/opensoc-streaming/OpenSOC-Common/.gitignore
new file mode 100644
index 0000000..b83d222
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Common/.gitignore
@@ -0,0 +1 @@
+/target/

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/pom.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/pom.xml b/opensoc-streaming/OpenSOC-Common/pom.xml
index 582093d..ad1382f 100644
--- a/opensoc-streaming/OpenSOC-Common/pom.xml
+++ b/opensoc-streaming/OpenSOC-Common/pom.xml
@@ -15,21 +15,23 @@
 	<parent>
 		<groupId>com.opensoc</groupId>
 		<artifactId>OpenSOC-Streaming</artifactId>
-		<version>0.3BETA-SNAPSHOT</version>
+		<version>0.6BETA</version>
 	</parent>
 	<artifactId>OpenSOC-Common</artifactId>
 	<name>OpenSOC-Common</name>
 	<description>Components common to all enrichments</description>
 	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+		<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
 		<kafka.version>0.8.0</kafka.version>
 		<commons.config.version>1.10</commons.config.version>
 		<hbase.version>0.98.5-hadoop2</hbase.version>
 	</properties>
 	<repositories>
 		<repository>
-			<id>Kraken-Repo</id>
-			<name>Kraken Repository</name>
-			<url>http://download.krakenapps.org</url>
+			<id>OpenSOC-Kraken-Repo</id>
+			<name>OpenSOC Kraken Repository</name>
+			<url>https://raw.github.com/opensoc/kraken/mvn-repo</url>
 		</repository>
 	</repositories>
 	<dependencies>
@@ -43,6 +45,15 @@
 			<artifactId>storm-core</artifactId>
 			<version>${global_storm_version}</version>
 			<scope>provided</scope>
+			<exclusions>
+				<exclusion>
+				
+				   <artifactId>servlet-api</artifactId>
+				
+				   <groupId>javax.servlet</groupId>
+				
+				  </exclusion>
+			</exclusions>			
 		</dependency>
 		<dependency>
 			<groupId>org.apache.kafka</groupId>
@@ -82,7 +93,7 @@
 		<dependency>
 			<groupId>org.krakenapps</groupId>
 			<artifactId>kraken-pcap</artifactId>
-			<version>1.5.0</version>
+			<version>1.7.1</version>
 		</dependency>
 		<dependency>
 			<groupId>junit</groupId>
@@ -93,6 +104,21 @@
 			<groupId>org.apache.hbase</groupId>
 			<artifactId>hbase-client</artifactId>
 			<version>${hbase.version}</version>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-log4j12</artifactId>
+				</exclusion>
+				<exclusion>
+					<groupId>log4j</groupId>
+					<artifactId>log4j</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>com.github.fge</groupId>
+			<artifactId>json-schema-validator</artifactId>
+			<version>${global_json_schema_validator_version}</version>
 		</dependency>
 	</dependencies>
 
@@ -123,6 +149,18 @@
 		</plugins>
 	</reporting>
 	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>3.1</version>
+				<configuration>
+					<source>1.7</source>
+					<compilerArgument>-Xlint:unchecked</compilerArgument>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+		</plugins>
 		<resources>
 			<resource>
 				<directory>src/main/resources</directory>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/configuration/ConfigurationManager.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/configuration/ConfigurationManager.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/configuration/ConfigurationManager.java
new file mode 100644
index 0000000..74f19a5
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/configuration/ConfigurationManager.java
@@ -0,0 +1,119 @@
+package com.opensoc.configuration;
+
+
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.configuration.CombinedConfiguration;
+import org.apache.commons.configuration.Configuration;
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.commons.configuration.DefaultConfigurationBuilder;
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+
+/**
+ * Configuration manager class which loads all 'config-definition.xml' files and
+ * creates a Configuration object which holds all properties from the underlying
+ * configuration resource
+ */
+public class ConfigurationManager {
+
+  /** configuration definition file name. */
+  private static String DEFAULT_CONFIG_DEFINITION_FILE_NAME = "config-definition.xml";
+
+  /** Stores a map with the configuration for each path specified. */
+  private static Map<String, Configuration> configurationsCache = new HashMap<String, Configuration>();
+
+  /** The Constant LOGGER. */
+  private static final Logger LOGGER = Logger
+      .getLogger(ConfigurationManager.class);
+
+  /**
+   * Common method to load content of all configuration resources defined in
+   * 'config-definition.xml'.
+   * 
+   * @param configDefFilePath
+   *          the config def file path
+   * @return Configuration
+   */
+  public static Configuration getConfiguration(String configDefFilePath) {
+    if (configurationsCache.containsKey(configDefFilePath)) {
+      return configurationsCache.get(configDefFilePath);
+    }
+    CombinedConfiguration configuration = null;
+    synchronized (configurationsCache) {
+      if (configurationsCache.containsKey(configDefFilePath)) {
+        return configurationsCache.get(configDefFilePath);
+      }
+      DefaultConfigurationBuilder builder = new DefaultConfigurationBuilder();
+      String fielPath = getConfigDefFilePath(configDefFilePath);
+      LOGGER.info("loading from 'configDefFilePath' :" + fielPath);
+      builder.setFile(new File(fielPath));
+      try {
+        configuration = builder.getConfiguration(true);
+        configurationsCache.put(fielPath, configuration);
+      } catch (ConfigurationException e) {
+        LOGGER.info("Exception in loading property files.", e);
+      }
+    }
+    return configuration;
+  }
+
+  /**
+   * Removes the configuration created from a config definition file located at
+   * 'configDefFilePath'.
+   * 
+   * @param configDefFilePath
+   *          path to the config definition file
+   */
+  public static void clearConfiguration(String configDefFilePath) {
+    configurationsCache.remove(configDefFilePath);
+  }
+
+  /**
+   * Gets the configuration.
+   * 
+   * @return the configuration
+   */
+  public static Configuration getConfiguration() {
+    return getConfiguration(null);
+  }
+
+  /**
+   * Returns the 'config-definition.xml' file path. 1. If the param
+   * 'configDefFilePath' has a valid value, returns configDefFilePath 2. If the
+   * system property key 'configDefFilePath' has a valid value, returns the
+   * value 3. By default, it returns the file name 'config-definition.xml'
+   * 
+   * @param configDefFilePath
+   *          given input path to the config definition file
+   * @return the config def file path
+   */
+  private static String getConfigDefFilePath(String configDefFilePath) {
+    if (StringUtils.isNotEmpty(configDefFilePath)) {
+      return configDefFilePath;
+    }
+    return DEFAULT_CONFIG_DEFINITION_FILE_NAME;
+  }
+
+  /**
+   * The main method.
+   * 
+   * @param args
+   *          the args
+   * @throws InterruptedException
+   *           the interrupted exception
+   */
+  public static void main(String[] args) throws InterruptedException {
+    Configuration config = ConfigurationManager
+        .getConfiguration("/Users/Sayi/Documents/config/config-definition-dpi.xml");
+    System.out.println("elastic.search.cluster ="
+        + config.getString("elastic.search.cluster"));
+    Thread.sleep(10000);
+    System.out.println("storm.topology.dpi.bolt.es-index.index.name ="
+        + config.getString("storm.topology.dpi.bolt.es-index.index.name"));
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/dataloads/interfaces/ThreatIntelSource.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/dataloads/interfaces/ThreatIntelSource.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/dataloads/interfaces/ThreatIntelSource.java
new file mode 100644
index 0000000..e19646a
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/dataloads/interfaces/ThreatIntelSource.java
@@ -0,0 +1,11 @@
+package com.opensoc.dataloads.interfaces;
+
+import java.util.Iterator;
+import org.apache.commons.configuration.Configuration;
+import org.json.simple.JSONObject;
+
+public interface ThreatIntelSource extends Iterator<JSONObject> {
+
+	void initializeSource(Configuration config);
+	void cleanupSource();
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/hbase/HBaseBolt.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/hbase/HBaseBolt.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/hbase/HBaseBolt.java
index 9c8f604..ef155f1 100644
--- a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/hbase/HBaseBolt.java
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/hbase/HBaseBolt.java
@@ -5,16 +5,9 @@ package com.opensoc.hbase;
 import java.io.IOException;
 import java.util.Map;
 
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.log4j.Logger;
 import org.json.simple.JSONObject;
 
-import com.opensoc.topologyhelpers.ErrorGenerator;
-
 import backtype.storm.task.OutputCollector;
 import backtype.storm.task.TopologyContext;
 import backtype.storm.topology.IRichBolt;
@@ -23,6 +16,8 @@ import backtype.storm.tuple.Fields;
 import backtype.storm.tuple.Tuple;
 import backtype.storm.tuple.Values;
 
+import com.opensoc.helpers.topology.ErrorGenerator;
+
 /**
  * A Storm bolt for putting data into HBase.
  * <p>
@@ -76,12 +71,9 @@ public class HBaseBolt implements IRichBolt {
     try {
       this.connector.getTable().put(conf.getPutFromTuple(input));
     } catch (IOException ex) {
-    	
-        String error_as_string = org.apache.commons.lang.exception.ExceptionUtils
-  				.getStackTrace(ex);
 
   		JSONObject error = ErrorGenerator.generateErrorMessage(
-  				"Alerts problem: " + input.getBinary(0), error_as_string);
+  				"Alerts problem: " + input.getBinary(0), ex);
   		collector.emit("error", new Values(error));
   		
       throw new RuntimeException(ex);

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/helpers/services/PcapServiceCli.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/helpers/services/PcapServiceCli.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/helpers/services/PcapServiceCli.java
new file mode 100644
index 0000000..70f8683
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/helpers/services/PcapServiceCli.java
@@ -0,0 +1,110 @@
+package com.opensoc.helpers.services;
+
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+
+public class PcapServiceCli {
+
+	private String[] args = null;
+	private Options options = new Options();
+
+	int port = 8081;
+	String uri = "/pcapGetter";
+
+	public int getPort() {
+		return port;
+	}
+
+	public void setPort(int port) {
+		this.port = port;
+	}
+
+	public String getUri() {
+		return uri;
+	}
+
+	public void setUri(String uri) {
+		this.uri = uri;
+	}
+
+	public PcapServiceCli(String[] args) {
+
+		this.args = args;
+
+		Option help = new Option("h", "Display help menue");
+		options.addOption(help);
+		options.addOption(
+				"port",
+				true,
+				"OPTIONAL ARGUMENT [portnumber] If this argument sets the port for starting the service.  If this argument is not set the port will start on defaut port 8081");
+		options.addOption(
+				"endpoint_uri",
+				true,
+				"OPTIONAL ARGUMENT [/uri/to/service] This sets the URI for the service to be hosted.  The default URI is /pcapGetter");
+	}
+
+	public void parse() {
+		CommandLineParser parser = new BasicParser();
+
+		CommandLine cmd = null;
+
+		try {
+			cmd = parser.parse(options, args);
+		} catch (ParseException e1) {
+
+			e1.printStackTrace();
+		}
+
+		if (cmd.hasOption("h"))
+			help();
+
+		if (cmd.hasOption("port")) {
+
+			try {
+				port = Integer.parseInt(cmd.getOptionValue("port").trim());
+			} catch (Exception e) {
+
+				System.out.println("[OpenSOC] Invalid value for port entered");
+				help();
+			}
+		}
+		if (cmd.hasOption("endpoint_uri")) {
+
+			try {
+
+				if (uri == null || uri.equals(""))
+					throw new Exception("invalid uri");
+
+				uri = cmd.getOptionValue("uri").trim();
+
+				if (uri.charAt(0) != '/')
+					uri = "/" + uri;
+
+				if (uri.charAt(uri.length()) == '/')
+					uri = uri.substring(0, uri.length() - 1);
+
+			} catch (Exception e) {
+				System.out.println("[OpenSOC] Invalid URI entered");
+				help();
+			}
+		}
+
+	}
+
+	private void help() {
+		// This prints out some help
+		HelpFormatter formater = new HelpFormatter();
+
+		formater.printHelp("Topology Options:", options);
+
+		// System.out
+		// .println("[OpenSOC] Example usage: \n storm jar OpenSOC-Topologies-0.3BETA-SNAPSHOT.jar com.opensoc.topology.Bro -local_mode true -config_path OpenSOC_Configs/ -generator_spout true");
+
+		System.exit(0);
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/helpers/topology/Cli.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/helpers/topology/Cli.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/helpers/topology/Cli.java
new file mode 100644
index 0000000..0d9486e
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/helpers/topology/Cli.java
@@ -0,0 +1,186 @@
+package com.opensoc.helpers.topology;
+
+import java.io.File;
+
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+
+public class Cli {
+
+	private String[] args = null;
+	private Options options = new Options();
+
+	private String path = null;
+	private boolean debug = true;
+	private boolean local_mode = true;
+	private boolean generator_spout = false;
+
+	public boolean isGenerator_spout() {
+		return generator_spout;
+	}
+
+	public void setGenerator_spout(boolean generator_spout) {
+		this.generator_spout = generator_spout;
+	}
+
+	public String getPath() {
+		return path;
+	}
+
+	public void setPath(String path) {
+		this.path = path;
+	}
+
+	public boolean isDebug() {
+		return debug;
+	}
+
+	public void setDebug(boolean debug) {
+		this.debug = debug;
+	}
+
+	public boolean isLocal_mode() {
+		return local_mode;
+	}
+
+	public void setLocal_mode(boolean local_mode) {
+		this.local_mode = local_mode;
+	}
+
+	public Cli(String[] args) {
+
+		this.args = args;
+
+		Option help = new Option("h", "Display help menue");
+		options.addOption(help);
+		options.addOption(
+				"config_path",
+				true,
+				"OPTIONAL ARGUMENT [/path/to/configs] Path to configuration folder. If not provided topology will initialize with default configs");
+		options.addOption(
+				"local_mode",
+				true,
+				"REQUIRED ARGUMENT [true|false] Local mode or cluster mode.  If set to true the topology will run in local mode.  If set to false the topology will be deployed to Storm nimbus");
+		options.addOption(
+				"debug",
+				true,
+				"OPTIONAL ARGUMENT [true|false] Storm debugging enabled.  Default value is true");
+		options.addOption(
+				"generator_spout",
+				true,
+				"REQUIRED ARGUMENT [true|false] Turn on test generator spout.  Default is set to false.  If test generator spout is turned on then kafka spout is turned off.  Instead the generator spout will read telemetry from file and ingest it into a topology");
+	}
+
+	public void parse() {
+		CommandLineParser parser = new BasicParser();
+
+		CommandLine cmd = null;
+		try {
+			cmd = parser.parse(options, args);
+
+			if (cmd.hasOption("h"))
+				help();
+
+			if (cmd.hasOption("local_mode")) {
+
+				String local_value = cmd.getOptionValue("local_mode").trim()
+						.toLowerCase();
+
+				if (local_value.equals("true"))
+					local_mode = true;
+
+				else if (local_value.equals("false"))
+					local_mode = false;
+				else {
+					System.out
+							.println("[OpenSOC] ERROR: Invalid value for local mode");
+					System.out
+							.println("[OpenSOC] ERROR: Using cli argument -local_mode="
+									+ cmd.getOptionValue("local_mode"));
+					help();
+				}
+			} else {
+				System.out
+						.println("[OpenSOC] ERROR: Invalid value for local mode");
+				help();
+			}
+			if (cmd.hasOption("generator_spout")) {
+
+				String local_value = cmd.getOptionValue("generator_spout").trim()
+						.toLowerCase();
+
+				if (local_value.equals("true"))
+					generator_spout = true;
+
+				else if (local_value.equals("false"))
+					generator_spout = false;
+				else {
+					System.out
+							.println("[OpenSOC] ERROR: Invalid value for local generator_spout");
+					System.out
+							.println("[OpenSOC] ERROR: Using cli argument -generator_spout="
+									+ cmd.getOptionValue("generator_spout"));
+					help();
+				}
+			} else {
+				System.out
+						.println("[OpenSOC] ERROR: Invalid value for generator_spout");
+				help();
+			}
+			if (cmd.hasOption("config_path")) {
+
+				path = cmd.getOptionValue("config_path").trim();
+
+				File file = new File(path);
+
+				if (!file.isDirectory() || !file.exists()) {
+					System.out
+							.println("[OpenSOC] ERROR: Invalid settings directory name given");
+					System.out
+							.println("[OpenSOC] ERROR: Using cli argument -config_path="
+									+ cmd.getOptionValue("config_path"));
+					help();
+				}
+			}
+
+			if (cmd.hasOption("debug")) {
+				String debug_value = cmd.getOptionValue("debug");
+
+				if (debug_value.equals("true"))
+					debug = true;
+				else if (debug_value.equals("false"))
+					debug = false;
+				else {
+					System.out
+							.println("[OpenSOC] ERROR: Invalid value for debug_value");
+					System.out
+							.println("[OpenSOC] ERROR: Using cli argument -debug_value="
+									+ cmd.getOptionValue("debug_value"));
+					help();
+				}
+			}
+
+		} catch (ParseException e) {
+			System.out
+					.println("[OpenSOC] ERROR: Failed to parse command line arguments");
+			help();
+		}
+	}
+
+	private void help() {
+		// This prints out some help
+		HelpFormatter formater = new HelpFormatter();
+
+		formater.printHelp("Topology Options:", options);
+
+		System.out
+				.println("[OpenSOC] Example usage: \n storm jar OpenSOC-Topologies-0.3BETA-SNAPSHOT.jar com.opensoc.topology.Bro -local_mode true -config_path OpenSOC_Configs/ -generator_spout true");
+
+		System.exit(0);
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/helpers/topology/ErrorGenerator.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/helpers/topology/ErrorGenerator.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/helpers/topology/ErrorGenerator.java
new file mode 100644
index 0000000..97f0ba7
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/helpers/topology/ErrorGenerator.java
@@ -0,0 +1,37 @@
+package com.opensoc.helpers.topology;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+
+import org.apache.commons.lang.exception.ExceptionUtils;
+import org.json.simple.JSONObject;
+
+public class ErrorGenerator {
+
+	@SuppressWarnings("unchecked")
+	public static JSONObject generateErrorMessage(String message, Exception e)
+	{
+		JSONObject error_message = new JSONObject();
+		
+		/*
+		 * Save full stack trace in object.
+		 */
+		String stackTrace = ExceptionUtils.getStackTrace(e);
+		
+		String exception = e.toString();
+		
+		error_message.put("time", System.currentTimeMillis());
+		try {
+			error_message.put("hostname", InetAddress.getLocalHost().getHostName());
+		} catch (UnknownHostException ex) {
+			// TODO Auto-generated catch block
+			ex.printStackTrace();
+		}
+		
+		error_message.put("message", message);
+		error_message.put("exception", exception);
+		error_message.put("stack", stackTrace);
+		
+		return error_message;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/helpers/topology/SettingsLoader.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/helpers/topology/SettingsLoader.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/helpers/topology/SettingsLoader.java
new file mode 100644
index 0000000..261d481
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/helpers/topology/SettingsLoader.java
@@ -0,0 +1,149 @@
+package com.opensoc.helpers.topology;
+
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+import org.apache.commons.configuration.Configuration;
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.apache.commons.configuration.XMLConfiguration;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+public class SettingsLoader {
+
+	@SuppressWarnings("unchecked")
+	public static JSONObject loadEnvironmentIdnetifier(String config_path)
+			throws ConfigurationException {
+		Configuration config = new PropertiesConfiguration(config_path);
+
+		String customer = config.getString("customer.id", "unknown");
+		String datacenter = config.getString("datacenter.id", "unknown");
+		String instance = config.getString("instance.id", "unknown");
+
+		JSONObject identifier = new JSONObject();
+		identifier.put("customer", customer);
+		identifier.put("datacenter", datacenter);
+		identifier.put("instance", instance);
+
+		return identifier;
+	}
+
+	@SuppressWarnings("unchecked")
+	public static JSONObject loadTopologyIdnetifier(String config_path)
+			throws ConfigurationException {
+		Configuration config = new PropertiesConfiguration(config_path);
+
+		String topology = config.getString("topology.id", "unknown");
+		String instance = config.getString("instance.id", "unknown");
+
+		JSONObject identifier = new JSONObject();
+		identifier.put("topology", topology);
+		identifier.put("topology_instance", instance);
+
+		return identifier;
+	}
+	
+
+	public static String generateTopologyName(JSONObject env, JSONObject topo) {
+
+		return (env.get("customer") + "_" + env.get("datacenter") + "_"
+				+ env.get("instance") + "_" + topo.get("topology") + "_" + topo.get("topology_instance"));
+	}
+	
+	@SuppressWarnings("unchecked")
+	public static JSONObject generateAlertsIdentifier(JSONObject env, JSONObject topo)
+	{
+		JSONObject identifier = new JSONObject();
+		identifier.put("environment", env);
+		identifier.put("topology", topo);
+		
+		return identifier;
+	}
+
+	public static Map<String, JSONObject> loadRegexAlerts(String config_path)
+			throws ConfigurationException, ParseException {
+		XMLConfiguration alert_rules = new XMLConfiguration();
+		alert_rules.setDelimiterParsingDisabled(true);
+		alert_rules.load(config_path);
+
+		//int number_of_rules = alert_rules.getList("rule.pattern").size();
+
+		String[] patterns = alert_rules.getStringArray("rule.pattern");
+		String[] alerts = alert_rules.getStringArray("rule.alert");
+
+		JSONParser pr = new JSONParser();
+		Map<String, JSONObject> rules = new HashMap<String, JSONObject>();
+
+		for (int i = 0; i < patterns.length; i++)
+			rules.put(patterns[i], (JSONObject) pr.parse(alerts[i]));
+
+		return rules;
+	}
+
+	public static Map<String, JSONObject> loadKnownHosts(String config_path)
+			throws ConfigurationException, ParseException {
+		Configuration hosts = new PropertiesConfiguration(config_path);
+
+		Iterator<String> keys = hosts.getKeys();
+		Map<String, JSONObject> known_hosts = new HashMap<String, JSONObject>();
+		JSONParser parser = new JSONParser();
+
+		while (keys.hasNext()) {
+			String key = keys.next().trim();
+			JSONArray value = (JSONArray) parser.parse(hosts.getProperty(key)
+					.toString());
+			known_hosts.put(key, (JSONObject) value.get(0));
+		}
+
+		return known_hosts;
+	}
+
+	public static void printConfigOptions(PropertiesConfiguration config, String path_fragment)
+	{
+		Iterator<String> itr = config.getKeys();
+		
+		while(itr.hasNext())
+		{
+			String key = itr.next();
+			
+			if(key.contains(path_fragment))
+			{
+				
+				System.out.println("[OpenSOC] Key: " + key + " -> " + config.getString(key));
+			}
+		}
+
+	}
+	
+	public static void printOptionalSettings(Map<String, String> settings)
+	{
+		for(String setting: settings.keySet())
+		{
+			System.out.println("[OpenSOC] Optional Setting: " + setting + " -> " +settings.get(setting));
+		}
+
+	}
+	
+	public static Map<String, String> getConfigOptions(PropertiesConfiguration config, String path_fragment)
+	{
+		Iterator<String> itr = config.getKeys();
+		Map<String, String> settings = new HashMap<String, String>();
+		
+		while(itr.hasNext())
+		{
+			String key = itr.next();
+			
+			if(key.contains(path_fragment))
+			{
+				String tmp_key = key.replace(path_fragment, "");
+				settings.put(tmp_key, config.getString(key));
+			}
+		}
+
+		return settings;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/index/interfaces/IndexAdapter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/index/interfaces/IndexAdapter.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/index/interfaces/IndexAdapter.java
index 1f88342..dfdfc8e 100644
--- a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/index/interfaces/IndexAdapter.java
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/index/interfaces/IndexAdapter.java
@@ -1,11 +1,15 @@
 package com.opensoc.index.interfaces;
 
+import java.util.Map;
+
 import org.json.simple.JSONObject;
 
 public interface IndexAdapter {
 
 	boolean initializeConnection(String ip, int port, String cluster_name,
-			String index_name, String document_name, int bulk) throws Exception;
+			String index_name, String document_name, int bulk, String date_format) throws Exception;
 
 	int bulkIndex(JSONObject raw_message);
+
+	void setOptionalSettings(Map<String, String> settings);
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/ise/parser/ISEParser.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/ise/parser/ISEParser.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/ise/parser/ISEParser.java
index 7c88ae3..a54f1ce 100644
--- a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/ise/parser/ISEParser.java
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/ise/parser/ISEParser.java
@@ -2,17 +2,20 @@
 package com.opensoc.ise.parser;
 import java.io.*;
 import java.util.*;
+
 import org.json.simple.*;
 
 /**
 * Basic ISE data parser generated by JavaCC. 
 */
 public class ISEParser implements Serializable, ISEParserConstants {
-  private boolean nativeNumbers = false;
+ // private boolean nativeNumbers = false;
 
-  public ISEParser()
-  { //do nothing
-  }
+	private static final long serialVersionUID = -2531656825360044979L;
+
+	public ISEParser()
+	  { //do nothing
+	  }
 
   public ISEParser(String input)
   {
@@ -29,7 +32,8 @@ public class ISEParser implements Serializable, ISEParserConstants {
     return toReturn;
   }
 
-  final public boolean ensureEOF() throws ParseException {
+  @SuppressWarnings("unused")
+final public boolean ensureEOF() throws ParseException {
     switch (jj_nt.kind) {
     case COMMA:
       jj_consume_token(COMMA);
@@ -43,7 +47,8 @@ public class ISEParser implements Serializable, ISEParserConstants {
     throw new Error("Missing return statement in function");
   }
 
-  final public JSONObject innerMap() throws ParseException {
+  @SuppressWarnings({ "unchecked", "unused" })
+final public JSONObject innerMap() throws ParseException {
   final JSONObject json = new JSONObject();
   String key;
   Object value;
@@ -76,7 +81,8 @@ public class ISEParser implements Serializable, ISEParserConstants {
     throw new Error("Missing return statement in function");
   }
 
-  final public JSONObject object() throws ParseException {
+  @SuppressWarnings({ "unused", "unchecked" })
+final public JSONObject object() throws ParseException {
   final JSONObject json = new JSONObject();
   String key;
   Object value;
@@ -105,7 +111,8 @@ public class ISEParser implements Serializable, ISEParserConstants {
     throw new Error("Missing return statement in function");
   }
 
-  final public String objectKey() throws ParseException {
+  @SuppressWarnings("unused")
+final public String objectKey() throws ParseException {
   String k;
     k = string();
     //  System.out.println("key == " + k);
@@ -113,7 +120,8 @@ public class ISEParser implements Serializable, ISEParserConstants {
     throw new Error("Missing return statement in function");
   }
 
-  final public Object value() throws ParseException {
+  @SuppressWarnings({ "unused", "rawtypes" })
+final public Object value() throws ParseException {
   Object x;
   String eof = "EOF";
   Map m = null;
@@ -147,12 +155,14 @@ public class ISEParser implements Serializable, ISEParserConstants {
     throw new Error("Missing return statement in function");
   }
 
-  final public String nullValue() throws ParseException {
+  @SuppressWarnings("unused")
+final public String nullValue() throws ParseException {
     {if (true) return null;}
     throw new Error("Missing return statement in function");
   }
 
-  final public String tagString() throws ParseException {
+  @SuppressWarnings("unused")
+final public String tagString() throws ParseException {
   String output = "(tag=0)";
     jj_consume_token(TAG);
     jj_consume_token(STRING_BODY);
@@ -160,19 +170,22 @@ public class ISEParser implements Serializable, ISEParserConstants {
     throw new Error("Missing return statement in function");
   }
 
-  final public String blankValue() throws ParseException {
+  @SuppressWarnings("unused")
+final public String blankValue() throws ParseException {
     {if (true) return null;}
     throw new Error("Missing return statement in function");
   }
 
-  final public String string() throws ParseException {
+  @SuppressWarnings("unused")
+final public String string() throws ParseException {
   String s;
     jj_consume_token(STRING_BODY);
     {if (true) return token.image.trim();}
     throw new Error("Missing return statement in function");
   }
 
-  final public String braced_string() throws ParseException {
+  @SuppressWarnings("unused")
+final public String braced_string() throws ParseException {
   String s;
     jj_consume_token(BRACED_STRING);
     //  System.out.println("braced == " + token.image);
@@ -471,7 +484,9 @@ public class ISEParser implements Serializable, ISEParserConstants {
     throw generateParseException();
   }
 
-  static private final class LookaheadSuccess extends java.lang.Error { }
+  static private final class LookaheadSuccess extends java.lang.Error {
+
+	private static final long serialVersionUID = -5724812746511794505L; }
   final private LookaheadSuccess jj_ls = new LookaheadSuccess();
   private boolean jj_scan_token(int kind) {
     if (jj_scanpos == jj_lastpos) {

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/ise/parser/ISEParserTokenManager.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/ise/parser/ISEParserTokenManager.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/ise/parser/ISEParserTokenManager.java
index adf9401..9999452 100644
--- a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/ise/parser/ISEParserTokenManager.java
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/ise/parser/ISEParserTokenManager.java
@@ -1,8 +1,5 @@
 /* Generated By:JavaCC: Do not edit this line. ISEParserTokenManager.java */
 package com.opensoc.ise.parser;
-import java.io.*;
-import java.util.*;
-import org.json.simple.*;
 
 /** Token Manager. */
 class ISEParserTokenManager implements ISEParserConstants

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/json/serialization/JSONEncoderHelper.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/json/serialization/JSONEncoderHelper.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/json/serialization/JSONEncoderHelper.java
index 38ad375..b388397 100644
--- a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/json/serialization/JSONEncoderHelper.java
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/json/serialization/JSONEncoderHelper.java
@@ -19,6 +19,7 @@ package com.opensoc.json.serialization;
 import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.Iterator;
+
 import org.apache.commons.configuration.Configuration;
 import org.json.simple.JSONObject;
 
@@ -68,6 +69,7 @@ public class JSONEncoderHelper {
 
 	}
 
+	@SuppressWarnings({ "rawtypes", "unchecked" })
 	public static JSONObject getJSON(Configuration config) {
 
 		JSONObject output = new JSONObject();

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/json/serialization/JSONKafkaSerializer.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/json/serialization/JSONKafkaSerializer.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/json/serialization/JSONKafkaSerializer.java
index 08f3b44..c08444f 100644
--- a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/json/serialization/JSONKafkaSerializer.java
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/json/serialization/JSONKafkaSerializer.java
@@ -17,30 +17,32 @@
 
 package com.opensoc.json.serialization;
 
+import static com.opensoc.json.serialization.JSONDecoderHelper.getObject;
+import static com.opensoc.json.serialization.JSONEncoderHelper.putBoolean;
+import static com.opensoc.json.serialization.JSONEncoderHelper.putNull;
+import static com.opensoc.json.serialization.JSONEncoderHelper.putNumber;
+import static com.opensoc.json.serialization.JSONEncoderHelper.putString;
+
 import java.io.BufferedReader;
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
-import java.io.FileNotFoundException;
 import java.io.FileReader;
 import java.io.IOException;
-import java.io.Reader;
 import java.util.Iterator;
 import java.util.Map;
 import java.util.Map.Entry;
 
+import kafka.serializer.Decoder;
+import kafka.serializer.Encoder;
+import kafka.utils.VerifiableProperties;
+
 import org.json.simple.JSONArray;
 import org.json.simple.JSONObject;
 import org.json.simple.parser.JSONParser;
 import org.json.simple.parser.ParseException;
 
-import kafka.serializer.Decoder;
-import kafka.serializer.Encoder;
-import kafka.utils.VerifiableProperties;
-import static com.opensoc.json.serialization.JSONEncoderHelper.*;
-import static com.opensoc.json.serialization.JSONDecoderHelper.*;
-
 /**
  * JSON Serailization class for kafka. Implements kafka Encoder and Decoder
  * String, JSONObject, Number, Boolean,JSONObject.NULL JSONArray

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/parser/interfaces/MessageParser.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/parser/interfaces/MessageParser.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/parser/interfaces/MessageParser.java
index 700d3ab..b71e4f9 100644
--- a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/parser/interfaces/MessageParser.java
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/parser/interfaces/MessageParser.java
@@ -5,6 +5,7 @@ import org.json.simple.JSONObject;
 public interface MessageParser {
 	
 	void initializeParser();
+	void init();
 	JSONObject parse(byte[] raw_message);
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PacketInfo.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PacketInfo.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PacketInfo.java
index 151e3d3..804387d 100644
--- a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PacketInfo.java
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PacketInfo.java
@@ -1,6 +1,7 @@
 package com.opensoc.pcap;
 
 import java.text.MessageFormat;
+import org.apache.log4j.Logger;
 
 import org.krakenapps.pcap.decoder.ip.Ipv4Packet;
 import org.krakenapps.pcap.decoder.tcp.TcpPacket;
@@ -9,6 +10,9 @@ import org.krakenapps.pcap.file.GlobalHeader;
 import org.krakenapps.pcap.packet.PacketHeader;
 import org.krakenapps.pcap.packet.PcapPacket;
 
+import com.opensoc.pcap.Constants;
+import com.opensoc.pcap.PcapUtils;
+
 /**
  * The Class PacketInfo.
  * 
@@ -47,6 +51,9 @@ public class PacketInfo {
   /** The Constant udpHeaderJsonTemplateSB. */
   private static final StringBuffer udpHeaderJsonTemplateSB = new StringBuffer();
 
+  /** The Constant LOG. */
+  private static final Logger LOG = Logger.getLogger(PacketInfo.class);
+  
   static {
     globalHeaderJsonTemplateSB.append("<\"global_header\":<\"pcap_id\":\"").append("{0}").append('"');
     globalHeaderJsonTemplateSB.append(",\"inc_len\":").append("{1}");
@@ -232,6 +239,28 @@ public class PacketInfo {
   }
 
   /**
+   * Gets the short key
+   * 
+   * 
+   * @return the short key
+   */
+  public String getShortKey() {
+	int sourcePort = 0;
+	int destinationPort = 0;
+	if(Constants.PROTOCOL_UDP == ipv4Packet.getProtocol()) {
+		sourcePort = udpPacket.getSourcePort();
+		destinationPort = udpPacket.getDestinationPort();
+	} else if (Constants.PROTOCOL_TCP == ipv4Packet.getProtocol()) {
+		sourcePort = tcpPacket.getSourcePort();
+		destinationPort = tcpPacket.getDestinationPort();
+	}
+	  
+	return PcapUtils.getShortSessionKey(ipv4Packet.getSourceAddress().getHostAddress(), ipv4Packet.getDestinationAddress().getHostAddress(),
+	    ipv4Packet.getProtocol(), sourcePort, destinationPort);
+			 
+  }
+  
+  /**
    * Gets the json doc.
    * 
    * 
@@ -260,6 +289,7 @@ public class PacketInfo {
    */
   private String getJsonDocUsingSBAppend() {
 
+	
     StringBuffer jsonSb = new StringBuffer(1024);
 
     // global header
@@ -373,29 +403,52 @@ public class PacketInfo {
    */
   private String getJsonIndexDocUsingSBAppend() {
 
-    StringBuffer jsonSb = new StringBuffer(175);
+	Long ts_micro = getPacketTimeInNanos() / 1000L;
+	StringBuffer jsonSb = new StringBuffer(175);
 
-    jsonSb.append("{\"pcap_id\":\"").append(getKey());
+	jsonSb.append("{\"pcap_id\":\"").append(getShortKey());
     jsonSb.append("\",\"ip_protocol\":").append(ipv4Packet.getProtocol());
+    jsonSb.append(",\"ip_id\":").append(ipv4Packet.getId());
+    jsonSb.append(",\"frag_offset\":").append(ipv4Packet.getFragmentOffset());
+    jsonSb.append(",\"ts_micro\":").append(ts_micro);
+
 
     // tcp header
     if (tcpPacket != null) {
-      jsonSb.append(",\"src_addr\":\"").append(tcpPacket.getSourceAddress().getHostAddress());
-      jsonSb.append("\",\"src_port\":").append(tcpPacket.getSourcePort());
-      jsonSb.append(",\"dst_addr\":\"").append(tcpPacket.getDestinationAddress().getHostAddress());
-      jsonSb.append("\",\"dst_port\":").append(tcpPacket.getDestinationPort());
+      jsonSb.append(",\"ip_src_addr\":\"").append(tcpPacket.getSourceAddress().getHostAddress());
+      jsonSb.append("\",\"ip_src_port\":").append(tcpPacket.getSourcePort());
+      jsonSb.append(",\"ip_dst_addr\":\"").append(tcpPacket.getDestinationAddress().getHostAddress());
+      jsonSb.append("\",\"ip_dst_port\":").append(tcpPacket.getDestinationPort());
     }
 
     // udp headers
     if (udpPacket != null) {
-      jsonSb.append(",\"src_addr\":\"").append(udpPacket.getSource().getAddress().getHostAddress());
-      jsonSb.append("\",\"src_port\":").append(udpPacket.getSourcePort());
-      jsonSb.append(",\"dst_addr\":\"").append(udpPacket.getDestination().getAddress().getHostAddress());
-      jsonSb.append("\",\"dst_port\":").append(udpPacket.getDestinationPort());
+      jsonSb.append(",\"ip_src_addr\":\"").append(udpPacket.getSource().getAddress().getHostAddress());
+      jsonSb.append("\",\"ip_src_port\":").append(udpPacket.getSourcePort());
+      jsonSb.append(",\"ip_dst_addr\":\"").append(udpPacket.getDestination().getAddress().getHostAddress());
+      jsonSb.append("\",\"ip_dst_port\":").append(udpPacket.getDestinationPort());
     }
 
     jsonSb.append('}');
 
     return jsonSb.toString();
   }
+  
+  public long getPacketTimeInNanos()
+  {
+	  if ( getGlobalHeader().getMagicNumber() == 0xa1b2c3d4 || getGlobalHeader().getMagicNumber() == 0xd4c3b2a1 )
+	  {
+		  //Time is in micro assemble as nano
+		  LOG.info("Times are in micro according to the magic number");
+		  return getPacketHeader().getTsSec() * 1000000000L + getPacketHeader().getTsUsec() * 1000L ; 
+	  }
+	  else if ( getGlobalHeader().getMagicNumber() == 0xa1b23c4d || getGlobalHeader().getMagicNumber() == 0x4d3cb2a1 ) {
+		//Time is in nano assemble as nano
+		  LOG.info("Times are in nano according to the magic number");
+		  return getPacketHeader().getTsSec() * 1000000000L + getPacketHeader().getTsUsec() ; 
+	  }
+	  //Default assume time is in micro assemble as nano
+	  LOG.warn("Unknown magic number. Defaulting to micro");
+	  return getPacketHeader().getTsSec() * 1000000000L + getPacketHeader().getTsUsec() * 1000L ;  
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapByteOutputStream.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapByteOutputStream.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapByteOutputStream.java
new file mode 100644
index 0000000..8a5ad18
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapByteOutputStream.java
@@ -0,0 +1,288 @@
+// $codepro.audit.disable explicitThisUsage, lossOfPrecisionInCast
+package com.opensoc.pcap;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.BufferUnderflowException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.krakenapps.pcap.PcapOutputStream;
+import org.krakenapps.pcap.file.GlobalHeader;
+import org.krakenapps.pcap.packet.PacketHeader;
+import org.krakenapps.pcap.packet.PcapPacket;
+import org.krakenapps.pcap.util.Buffer;
+
+// TODO: Auto-generated Javadoc
+/**
+ * The Class PcapByteOutputStream.
+ * 
+ * @author sheetal
+ * @version $Revision: 1.0 $
+ */
+public class PcapByteOutputStream implements PcapOutputStream {
+
+  /** The Constant LOG. */
+  private static final Logger LOG = Logger
+      .getLogger(PcapByteOutputStream.class);
+
+  /** The Constant MAX_CACHED_PACKET_NUMBER. */
+  private static final int MAX_CACHED_PACKET_NUMBER = 1000;
+
+  /** The cached packet num. */
+  private int cachedPacketNum = 0; // NOPMD by sheetal on 1/29/14 2:34 PM
+
+  /** The baos. */
+  private ByteArrayOutputStream baos; // NOPMD by sheetal on 1/29/14 2:34 PM
+
+  /** The list. */
+  private List<Byte> list; // NOPMD by sheetal on 1/29/14 2:34 PM
+
+  /**
+   * Instantiates a new pcap byte output stream.
+   * 
+   * @param baos
+   *          the baos
+   */
+  public PcapByteOutputStream(ByteArrayOutputStream baos) {
+    this.baos = baos;
+    list = new ArrayList<Byte>();
+    createGlobalHeader();
+  }
+
+  /**
+   * Instantiates a new pcap byte output stream.
+   * 
+   * @param baos
+   *          the baos
+   * @param header
+   *          the header
+   */
+  public PcapByteOutputStream(ByteArrayOutputStream baos, GlobalHeader header) {
+    this.baos = baos;
+    list = new ArrayList<Byte>();
+    copyGlobalHeader(header);
+  }
+
+  /**
+   * Creates the global header.
+   */
+  private void createGlobalHeader() {
+    /* magic number(swapped) */
+    list.add((byte) 0xd4);
+    list.add((byte) 0xc3);
+    list.add((byte) 0xb2);
+    list.add((byte) 0xa1);
+
+    /* major version number */
+    list.add((byte) 0x02);
+    list.add((byte) 0x00);
+
+    /* minor version number */
+    list.add((byte) 0x04);
+    list.add((byte) 0x00);
+
+    /* GMT to local correction */
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+
+    /* accuracy of timestamps */
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+
+    /* max length of captured packets, in octets */
+    list.add((byte) 0xff);
+    list.add((byte) 0xff);
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+
+    /* data link type(ethernet) */
+    list.add((byte) 0x01);
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+  }
+
+  /**
+   * Copy global header.
+   * 
+   * @param header
+   *          the header
+   */
+  private void copyGlobalHeader(GlobalHeader header) {
+    final byte[] magicNumber = intToByteArray(header.getMagicNumber());
+    final byte[] majorVersion = shortToByteArray(header.getMajorVersion());
+    final byte[] minorVersion = shortToByteArray(header.getMinorVersion());
+    final byte[] zone = intToByteArray(header.getThiszone());
+    final byte[] sigFigs = intToByteArray(header.getSigfigs());
+    final byte[] snapLen = intToByteArray(header.getSnaplen());
+    final byte[] network = intToByteArray(header.getNetwork());
+
+    list.add(magicNumber[0]);
+    list.add(magicNumber[1]);
+    list.add(magicNumber[2]);
+    list.add(magicNumber[3]);
+
+    list.add(majorVersion[1]);
+    list.add(majorVersion[0]);
+
+    list.add(minorVersion[1]);
+    list.add(minorVersion[0]);
+
+    list.add(zone[3]);
+    list.add(zone[2]);
+    list.add(zone[1]);
+    list.add(zone[0]);
+
+    list.add(sigFigs[3]);
+    list.add(sigFigs[2]);
+    list.add(sigFigs[1]);
+    list.add(sigFigs[0]);
+
+    list.add(snapLen[3]);
+    list.add(snapLen[2]);
+    list.add(snapLen[1]);
+    list.add(snapLen[0]);
+
+    list.add(network[3]);
+    list.add(network[2]);
+    list.add(network[1]);
+    list.add(network[0]);
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.krakenapps.pcap.PcapOutputStream#write(org.krakenapps.pcap.packet
+   * .PcapPacket)
+   */
+  /**
+   * Method write.
+   * 
+   * @param packet
+   *          PcapPacket
+   * 
+   * 
+   * @throws IOException
+   *           * @see org.krakenapps.pcap.PcapOutputStream#write(PcapPacket) * @see
+   *           org.krakenapps.pcap.PcapOutputStream#write(PcapPacket)
+   */
+ 
+  public void write(PcapPacket packet) throws IOException {
+    PacketHeader packetHeader = packet.getPacketHeader();
+
+    int tsSec = packetHeader.getTsSec();
+    int tsUsec = packetHeader.getTsUsec();
+    int inclLen = packetHeader.getInclLen();
+    int origLen = packetHeader.getOrigLen();
+
+    addInt(tsSec);
+    addInt(tsUsec);
+    addInt(inclLen);
+    addInt(origLen);
+
+    Buffer payload = packet.getPacketData();
+
+    try {
+      payload.mark();
+      while (true) {
+        list.add(payload.get());
+      }
+    } catch (BufferUnderflowException e) {
+      //LOG.debug("Ignorable exception while writing packet", e);
+      payload.reset();
+    }
+
+    cachedPacketNum++;
+    if (cachedPacketNum == MAX_CACHED_PACKET_NUMBER) {
+      flush();
+    }
+  }
+
+  /**
+   * Adds the int.
+   * 
+   * @param number
+   *          the number
+   */
+  private void addInt(int number) {
+    list.add((byte) (number & 0xff));
+    list.add((byte) ((number & 0xff00) >> 8));
+    list.add((byte) ((number & 0xff0000) >> 16));
+    list.add((byte) ((number & 0xff000000) >> 24));
+  }
+
+  /**
+   * Int to byte array.
+   * 
+   * @param number
+   *          the number
+   * 
+   * @return the byte[]
+   */
+  private byte[] intToByteArray(int number) {
+    return new byte[] { (byte) (number >>> 24), (byte) (number >>> 16),
+        (byte) (number >>> 8), (byte) number };
+  }
+
+  /**
+   * Short to byte array.
+   * 
+   * @param number
+   *          the number
+   * 
+   * @return the byte[]
+   */
+  private byte[] shortToByteArray(short number) {
+    return new byte[] { (byte) (number >>> 8), (byte) number };
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.krakenapps.pcap.PcapOutputStream#flush()
+   */
+  /**
+   * Method flush.
+   * 
+   * 
+   * @throws IOException
+   *           * @see org.krakenapps.pcap.PcapOutputStream#flush() * @see
+   *           org.krakenapps.pcap.PcapOutputStream#flush()
+   */
+ 
+  public void flush() throws IOException {
+    byte[] fileBinary = new byte[list.size()];
+    for (int i = 0; i < fileBinary.length; i++) {
+      fileBinary[i] = list.get(i);
+    }
+
+    list.clear();
+    baos.write(fileBinary);
+    cachedPacketNum = 0;
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.krakenapps.pcap.PcapOutputStream#close()
+   */
+  /**
+   * Method close.
+   * 
+   * 
+   * @throws IOException
+   *           * @see org.krakenapps.pcap.PcapOutputStream#close() * @see
+   *           org.krakenapps.pcap.PcapOutputStream#close()
+   */
+ 
+  public void close() throws IOException {
+    flush();
+    baos.close(); // $codepro.audit.disable closeInFinally
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapMerger.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapMerger.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapMerger.java
new file mode 100644
index 0000000..392523b
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapMerger.java
@@ -0,0 +1,245 @@
+ package com.opensoc.pcap;
+
+import java.io.ByteArrayOutputStream;
+import java.io.EOFException;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.log4j.Logger;
+
+import org.krakenapps.pcap.packet.PcapPacket;
+import org.krakenapps.pcap.file.GlobalHeader;
+
+// TODO: Auto-generated Javadoc
+/**
+ * The Class PcapMerger.
+ * 
+ * @author sheetal
+ * @version $Revision: 1.0 $
+ */
+public final class PcapMerger {
+
+  /** The Constant LOG. */
+  private static final Logger LOG = Logger.getLogger(PcapMerger.class);
+  
+  /** The comparator for PcapPackets */
+  private static PcapPacketComparator PCAP_PACKET_COMPARATOR = new PcapPacketComparator();
+
+  /**
+   * Instantiates a new pcap merger.
+   */
+  private PcapMerger() { // $codepro.audit.disable emptyMethod
+  }
+
+  /**
+   * Merge two pcap byte arrays.
+   * 
+   * @param baos
+   *          the baos
+   * @param pcaps
+   *          the pcaps
+   * 
+   * @throws IOException
+   *           if there is no byte array, no access permission, or other io
+   *           related problems.
+   */
+  // public static void merge(byte[] to, byte[] from) throws IOException {
+  // PcapByteInputStream is = null;
+  // PcapByteOutputStream os = null;
+  // ByteArrayOutputStream baos = null;
+  // try {
+  // is = new PcapByteInputStream(from);
+  // baos = new ByteArrayOutputStream();
+  // os = new PcapByteOutputStream(baos, is.getGlobalHeader());
+  //
+  // writePacket(is, os);
+  // } finally {
+  // closeInput(is);
+  // if (baos != null) {
+  // baos.close();
+  // }
+  // closeOutput(os);
+  // }
+  // }
+
+  public static void merge(ByteArrayOutputStream baos, List<byte[]> pcaps)
+      throws IOException {
+    PcapByteInputStream is = null;
+    PcapByteOutputStream os = null;
+    ByteArrayOutputStream unsortedBaos = new ByteArrayOutputStream();
+    
+    try {
+      int i = 1;
+      for (byte[] pcap : pcaps) {
+        is = new PcapByteInputStream(pcap);
+        if (i == 1) {
+          os = new PcapByteOutputStream(unsortedBaos, is.getGlobalHeader());
+        }
+
+        writePacket(is, os);
+        i++;
+        closeInput(is);
+      }
+    } finally {
+      if (unsortedBaos != null) {
+        unsortedBaos.close();
+      }
+      closeOutput(os);
+      sort(baos, unsortedBaos.toByteArray());
+    }
+  }
+
+  /**
+   * Merge byte array1 with byte array2, and write to output byte array. It
+   * doesn't hurt original pcap dump byte arrays.
+   * 
+   * @param baos
+   *          the baos
+   * @param pcaps
+   *          the pcaps
+   * 
+   * @throws IOException
+   *           if there are no source byte arrays, have no read and/or write
+   *           permissions, or anything else.
+   */
+  public static void merge(ByteArrayOutputStream baos, byte[]... pcaps) // $codepro.audit.disable
+                                                                        // overloadedMethods
+      throws IOException {
+    merge(baos, Arrays.asList(pcaps));
+
+  }
+  
+  /**
+   * Sort the potentially unsorted byte array according to the timestamp
+   * in the packet header
+   * 
+   * @param unsortedBytes
+   * 	a byte array of a pcap file
+   * 
+   * @return byte array of a pcap file with packets in cronological order
+   * 
+   * @throws IOException
+   * 	if there are no source byte arrays, have no read and or write 
+   * 	permission, or anything else.
+   */
+  private static void sort(ByteArrayOutputStream baos, byte[] unsortedBytes) throws IOException {
+	  PcapByteInputStream pcapIs = new PcapByteInputStream(unsortedBytes);
+	  PcapByteOutputStream pcapOs = new PcapByteOutputStream(baos, pcapIs.getGlobalHeader());
+	  PcapPacket packet;
+	  ArrayList<PcapPacket> packetList = new ArrayList<PcapPacket>();
+	  
+	  try {
+		  while (true) {
+			  packet = pcapIs.getPacket();
+			  if (packet == null)
+				  break;
+			  packetList.add(packet);
+			  LOG.debug("Presort packet: " + packet.getPacketHeader().toString());
+		  }
+	  } catch (EOFException e) {
+		  //LOG.debug("Ignoreable exception in sort", e);
+	  }
+	  
+	  Collections.sort(packetList, PCAP_PACKET_COMPARATOR);
+	  for (PcapPacket p : packetList) {
+		  pcapOs.write(p);
+		  LOG.debug("Postsort packet: " + p.getPacketHeader().toString());
+	  }
+	  pcapOs.close();  
+  }
+  
+  /**
+   * Write packet.
+   * 
+   * @param is
+   *          the is
+   * @param os
+   *          the os
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private static void writePacket(PcapByteInputStream is,
+      PcapByteOutputStream os) throws IOException {
+    PcapPacket packet = null;
+    try {
+      while (true) {
+        packet = is.getPacket();
+        if (packet == null) {
+          break;
+        }
+        os.write(packet);
+      }
+    } catch (EOFException e) {
+      //LOG.debug("Ignorable exception in writePacket", e);
+    }
+
+  }
+
+  /**
+   * Close input.
+   * 
+   * @param is
+   *          the is
+   */
+  private static void closeInput(PcapByteInputStream is) {
+    if (is == null) {
+      return;
+    }
+    try {
+      is.close(); // $codepro.audit.disable closeInFinally
+    } catch (IOException e) {
+      LOG.error("Failed to close input stream", e);
+    }
+  }
+
+  /**
+   * Close output.
+   * 
+   * @param os
+   *          the os
+   */
+  private static void closeOutput(PcapByteOutputStream os) {
+    if (os == null) {
+      return;
+    }
+    try {
+      os.close();
+    } catch (IOException e) {
+      LOG.error("Failed to close output stream", e);
+
+    }
+  }
+
+  /**
+   * The main method.
+   * 
+   * @param args
+   *          the arguments
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public static void main(String[] args) throws IOException {
+    byte[] b1 = FileUtils.readFileToByteArray(new File(
+        "/Users/sheetal/Downloads/constructedTcpDump.1.pcap"));
+    byte[] b2 = FileUtils.readFileToByteArray(new File(
+        "/Users/sheetal/Downloads/constructedTcpDump.2.pcap"));
+    byte[] b3 = FileUtils.readFileToByteArray(new File(
+        "/Users/sheetal/Downloads/constructedTcpDump.3.pcap"));
+
+    ByteArrayOutputStream boas = new ByteArrayOutputStream(); // $codepro.audit.disable
+                                                              // closeWhereCreated
+    PcapMerger.merge(boas, b1, b2, b3);
+
+    FileUtils.writeByteArrayToFile(new File(
+        "/Users/sheetal/Downloads/constructedTcpDump.automerged.1.2.pcap"),
+        boas.toByteArray(), false);
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapPacketComparator.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapPacketComparator.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapPacketComparator.java
new file mode 100644
index 0000000..29a2414
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapPacketComparator.java
@@ -0,0 +1,22 @@
+package com.opensoc.pcap;
+
+import java.util.Comparator;
+
+import org.apache.log4j.Logger;
+
+import org.krakenapps.pcap.packet.PcapPacket;
+
+public class PcapPacketComparator implements Comparator<PcapPacket> {
+
+	/** The Constant LOG. */
+	private static final Logger LOG = Logger.getLogger(PcapMerger.class);
+	
+	public int compare(PcapPacket p1, PcapPacket p2) {
+
+		Long p1time = new Long(p1.getPacketHeader().getTsSec()) * 1000000L + new Long(p1.getPacketHeader().getTsUsec());
+		Long p2time = new Long(p2.getPacketHeader().getTsSec()) * 1000000L + new Long(p2.getPacketHeader().getTsUsec());
+		Long delta = p1time - p2time;
+		LOG.debug("p1time: " + p1time.toString() + " p2time: " + p2time.toString() + " delta: " + delta.toString());
+		return delta.intValue();
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapParser.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapParser.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapParser.java
new file mode 100644
index 0000000..abc0873
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapParser.java
@@ -0,0 +1,183 @@
+package com.opensoc.pcap;
+
+import java.io.EOFException;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.log4j.Logger;
+import org.krakenapps.pcap.decoder.ethernet.EthernetDecoder;
+import org.krakenapps.pcap.decoder.ethernet.EthernetType;
+import org.krakenapps.pcap.decoder.ip.IpDecoder;
+import org.krakenapps.pcap.decoder.ip.Ipv4Packet;
+import org.krakenapps.pcap.decoder.tcp.TcpPacket;
+import org.krakenapps.pcap.decoder.udp.UdpPacket;
+import org.krakenapps.pcap.file.GlobalHeader;
+import org.krakenapps.pcap.packet.PacketHeader;
+import org.krakenapps.pcap.packet.PcapPacket;
+import org.krakenapps.pcap.util.Buffer;
+
+// TODO: Auto-generated Javadoc
+/**
+ * The Class PcapParser.
+ * 
+ * @author sheetal
+ * @version $Revision: 1.0 $
+ */
+public final class PcapParser {
+
+  /** The Constant LOG. */
+  private static final Logger LOG = Logger.getLogger(PcapParser.class);
+
+  /** The ETHERNET_DECODER. */
+  private static final EthernetDecoder ETHERNET_DECODER = new EthernetDecoder();
+
+  /** The ip decoder. */
+  private static final IpDecoder IP_DECODER = new IpDecoder();
+
+  // /** The tcp decoder. */
+  // private static final TcpDecoder TCP_DECODER = new TcpDecoder(new
+  // TcpPortProtocolMapper());
+  //
+  // /** The udp decoder. */
+  // private static final UdpDecoder UDP_DECODER = new UdpDecoder(new
+  // UdpPortProtocolMapper());
+
+  static {
+    // IP_DECODER.register(InternetProtocol.TCP, TCP_DECODER);
+    // IP_DECODER.register(InternetProtocol.UDP, UDP_DECODER);
+    ETHERNET_DECODER.register(EthernetType.IPV4, IP_DECODER);
+  }
+
+  /**
+   * Instantiates a new pcap parser.
+   */
+  private PcapParser() { // $codepro.audit.disable emptyMethod
+
+  }
+
+  /**
+   * Parses the.
+   * 
+   * @param tcpdump
+   *          the tcpdump
+   * @return the list * @throws IOException Signals that an I/O exception has
+   *         occurred. * @throws IOException * @throws IOException * @throws
+   *         IOException
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public static List<PacketInfo> parse(byte[] tcpdump) throws IOException {
+    List<PacketInfo> packetInfoList = new ArrayList<PacketInfo>();
+
+    PcapByteInputStream pcapByteInputStream = new PcapByteInputStream(tcpdump);
+
+    GlobalHeader globalHeader = pcapByteInputStream.getGlobalHeader();
+    while (true) {
+      try
+
+      {
+        PcapPacket packet = pcapByteInputStream.getPacket();
+        // int packetCounter = 0;
+        // PacketHeader packetHeader = null;
+        // Ipv4Packet ipv4Packet = null;
+        TcpPacket tcpPacket = null;
+        UdpPacket udpPacket = null;
+        // Buffer packetDataBuffer = null;
+        int sourcePort = 0;
+        int destinationPort = 0;
+
+        // LOG.trace("Got packet # " + ++packetCounter);
+
+        // LOG.trace(packet.getPacketData());
+        ETHERNET_DECODER.decode(packet);
+
+        PacketHeader packetHeader = packet.getPacketHeader();
+        Ipv4Packet ipv4Packet = Ipv4Packet.parse(packet.getPacketData());
+
+        if (ipv4Packet.getProtocol() == Constants.PROTOCOL_TCP) {
+          tcpPacket = TcpPacket.parse(ipv4Packet);
+
+        }
+
+        if (ipv4Packet.getProtocol() == Constants.PROTOCOL_UDP) {
+
+          Buffer packetDataBuffer = ipv4Packet.getData();
+          sourcePort = packetDataBuffer.getUnsignedShort();
+          destinationPort = packetDataBuffer.getUnsignedShort();
+
+          udpPacket = new UdpPacket(ipv4Packet, sourcePort, destinationPort);
+
+          udpPacket.setLength(packetDataBuffer.getUnsignedShort());
+          udpPacket.setChecksum(packetDataBuffer.getUnsignedShort());
+          packetDataBuffer.discardReadBytes();
+          udpPacket.setData(packetDataBuffer);
+        }
+
+        packetInfoList.add(new PacketInfo(globalHeader, packetHeader, packet,
+            ipv4Packet, tcpPacket, udpPacket));
+      } catch (NegativeArraySizeException ignored) {
+        LOG.debug("Ignorable exception while parsing packet.", ignored);
+      } catch (EOFException eof) { // $codepro.audit.disable logExceptions
+        // Ignore exception and break
+        break;
+      }
+    }
+    return packetInfoList;
+  }
+
+  /**
+   * The main method.
+   * 
+   * @param args
+   *          the arguments
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   * @throws InterruptedException
+   *           the interrupted exception
+   */
+  public static void main(String[] args) throws IOException,
+      InterruptedException {
+
+    double totalIterations = 1000000;
+    double parallelism = 64;
+    double targetEvents = 1000000;
+
+    File fin = new File("/Users/sheetal/Downloads/udp.pcap");
+    File fout = new File(fin.getAbsolutePath() + ".parsed");
+    byte[] pcapBytes = FileUtils.readFileToByteArray(fin);
+    long startTime = System.currentTimeMillis();
+    for (int i = 0; i < totalIterations; i++) {
+      List<PacketInfo> list = parse(pcapBytes);
+
+      for (PacketInfo packetInfo : list) {
+        // FileUtils.writeStringToFile(fout, packetInfo.getJsonDoc(), true);
+        // FileUtils.writeStringToFile(fout, "\n", true);
+        // System.out.println(packetInfo.getJsonDoc());
+      }
+    }
+    long endTime = System.currentTimeMillis();
+
+    System.out.println("Time taken to process " + totalIterations + " events :"
+        + (endTime - startTime) + " milliseconds");
+
+    System.out
+        .println("With parallelism of "
+            + parallelism
+            + " estimated time to process "
+            + targetEvents
+            + " events: "
+            + (((((endTime - startTime) / totalIterations) * targetEvents) / parallelism) / 1000)
+            + " seconds");
+    System.out.println("With parallelism of " + parallelism
+        + " estimated # of events per second: "
+        + ((parallelism * 1000 * totalIterations) / (endTime - startTime))
+        + " events");
+    System.out.println("Expected Parallelism to process " + targetEvents
+        + " events in a second: "
+        + (targetEvents / ((1000 * totalIterations) / (endTime - startTime))));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapUtils.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapUtils.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapUtils.java
index 8d06caa..8f9520f 100644
--- a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapUtils.java
+++ b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/PcapUtils.java
@@ -280,6 +280,33 @@ public class PcapUtils {
     return sb.toString();
   }
 
+  /**
+   * Gets the short session key. (5-tuple only)
+   * 
+   * @param srcIp
+   *          the src ip
+   * @param dstIp
+   *          the dst ip
+   * @param protocol
+   *          the protocol
+   * @param srcPort
+   *          the src port
+   * @param dstPort
+   *          the dst port
+   * @return the session key
+   */
+  public static String getShortSessionKey(String srcIp, String dstIp, int protocol,
+      int srcPort, int dstPort) {
+    String keySeperator = "-";
+    StringBuffer sb = new StringBuffer(40);
+    sb.append(convertIpv4IpToHex(srcIp)).append(keySeperator)
+        .append(convertIpv4IpToHex(dstIp)).append(keySeperator)
+        .append(protocol).append(keySeperator).append(srcPort)
+        .append(keySeperator).append(dstPort);
+
+    return sb.toString();
+  }
+  
   // public static String convertPortToHex(String portNumber) {
   // return convertPortToHex(Integer.valueOf(portNumber));
   //

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/asdf.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/asdf.java b/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/asdf.java
deleted file mode 100644
index db2c2b2..0000000
--- a/opensoc-streaming/OpenSOC-Common/src/main/java/com/opensoc/pcap/asdf.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package com.opensoc.pcap;
-
-public class asdf {
-
-}


[21/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataLoads/README.md
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataLoads/README.md b/opensoc-streaming/OpenSOC-DataLoads/README.md
new file mode 100644
index 0000000..773d6db
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataLoads/README.md
@@ -0,0 +1,50 @@
+# OpenSOC-DataLoads
+
+This project is a collection of classes to assist with loading of various enrichment sources into OpenSOC.
+
+## Threat Intel Enrichment
+
+Threat Intel enrichment data sources can be loaded into OpenSOC using the ThreatIntelLoader class and an implementation of a ThreatIntelSource interface. Both are described below.
+
+### ThreatIntelSource Interface
+
+This inteface extends the Iterator interface and must implement the following methods:
+
+`void initializeSource(Configuration config);`
+
+Put any setup that needs to be done here. This will be called by ThreatIntelLoader before attempting to fetch any data from the source. The paramter config is a Configuration object created from the configuration file passed to ThreatIntelLoader. See the ThreatIntelLoader section below for more details
+
+`void cleanupSource();`
+
+This is called after all data is retrieved, just before ThreatIntelLoader exists. Perform any clean up here if needed.
+
+`JSONObject next()`
+
+This method should return the next piece of intel to be stored in OpenSOC. The returned JSONObject must have the following fields:
+
+* indicator - The indicator that will be checked against during enrichment. For example, and IP Address or a Hostname.
+* source - The source of the data, which can be any unique string to identify the origin of the intel. This will be the column qualifer in HBase and be used to group matches on in Storm
+* data - A JSONArray of JSONObjects that detail the intel for the indicator. The JSONObjects have no required format
+
+
+`boolean hasNext()`
+
+Returns true if there are more sources to read. Otherwise, false.
+
+
+### ThreatIntelLoader
+
+This class is intenteded to be called from the commandline on the OpenSOC cluster and is responsible for taking intel from a ThreatIntelSource implementation and putting them into HBase.
+
+#### Usage
+
+````
+usage: ThreatIntelLoader [--configFile <c>] --source <s> --table <t>
+    --configFile <c>   Configuration file for source class
+    --source <s>       Source class to use
+    --table <t>        HBase table to load into
+````
+
+* configFile - the file passed in by this class is used to provide configuration options to the ThreatIntelSource implementation being used.
+* source - the implementation of ThreatIntelSource to use
+* table - the hbase table to store the threat intel in for enrichment later. This should match what the corresponding enrichment bolt is using in Storm

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataLoads/dependency-reduced-pom.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataLoads/dependency-reduced-pom.xml b/opensoc-streaming/OpenSOC-DataLoads/dependency-reduced-pom.xml
deleted file mode 100644
index 679e46a..0000000
--- a/opensoc-streaming/OpenSOC-DataLoads/dependency-reduced-pom.xml
+++ /dev/null
@@ -1,145 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <parent>
-    <artifactId>OpenSOC-Streaming</artifactId>
-    <groupId>com.opensoc</groupId>
-    <version>0.3BETA-SNAPSHOT</version>
-  </parent>
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>OpenSOC-DataLoads</artifactId>
-  <build>
-    <sourceDirectory>src</sourceDirectory>
-    <resources>
-      <resource>
-        <directory>src</directory>
-        <excludes>
-          <exclude>**/*.java</exclude>
-        </excludes>
-      </resource>
-    </resources>
-    <plugins>
-      <plugin>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>3.1</version>
-        <configuration>
-          <source>1.7</source>
-          <target>1.7</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <artifactId>maven-shade-plugin</artifactId>
-        <version>2.3</version>
-        <executions>
-          <execution>
-            <phase>package</phase>
-            <goals>
-              <goal>shade</goal>
-            </goals>
-            <configuration>
-              <artifactSet>
-                <excludes>
-                  <exclude>classworlds:classworlds</exclude>
-                  <exclude>junit:junit</exclude>
-                  <exclude>jmock:*</exclude>
-                  <exclude>*:xml-apis</exclude>
-                  <exclude>org.apache.maven:lib:tests</exclude>
-                  <exclude>log4j:log4j:jar:</exclude>
-                  <exclude>*:hbase:*</exclude>
-                </excludes>
-              </artifactSet>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.storm</groupId>
-      <artifactId>storm-core</artifactId>
-      <version>0.9.2-incubating</version>
-      <scope>provided</scope>
-      <exclusions>
-        <exclusion>
-          <artifactId>clojure</artifactId>
-          <groupId>org.clojure</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>clj-time</artifactId>
-          <groupId>clj-time</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>compojure</artifactId>
-          <groupId>compojure</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>hiccup</artifactId>
-          <groupId>hiccup</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>ring-devel</artifactId>
-          <groupId>ring</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>ring-jetty-adapter</artifactId>
-          <groupId>ring</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>tools.logging</artifactId>
-          <groupId>org.clojure</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>math.numeric-tower</artifactId>
-          <groupId>org.clojure</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>tools.cli</artifactId>
-          <groupId>org.clojure</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>commons-exec</artifactId>
-          <groupId>org.apache.commons</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>curator-framework</artifactId>
-          <groupId>org.apache.curator</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>carbonite</artifactId>
-          <groupId>com.twitter</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>snakeyaml</artifactId>
-          <groupId>org.yaml</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>httpclient</artifactId>
-          <groupId>org.apache.httpcomponents</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>disruptor</artifactId>
-          <groupId>com.googlecode.disruptor</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>jgrapht-core</artifactId>
-          <groupId>org.jgrapht</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>logback-classic</artifactId>
-          <groupId>ch.qos.logback</groupId>
-        </exclusion>
-        <exclusion>
-          <artifactId>log4j-over-slf4j</artifactId>
-          <groupId>org.slf4j</groupId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <version>3.8.2</version>
-      <scope>compile</scope>
-    </dependency>
-  </dependencies>
-</project>
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataLoads/pom.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataLoads/pom.xml b/opensoc-streaming/OpenSOC-DataLoads/pom.xml
index 44df767..c51c045 100644
--- a/opensoc-streaming/OpenSOC-DataLoads/pom.xml
+++ b/opensoc-streaming/OpenSOC-DataLoads/pom.xml
@@ -15,28 +15,41 @@
 	<parent>
 		<groupId>com.opensoc</groupId>
 		<artifactId>OpenSOC-Streaming</artifactId>
-		<version>0.3BETA-SNAPSHOT</version>
+		<version>0.6BETA</version>
 	</parent>
 	<artifactId>OpenSOC-DataLoads</artifactId>
 	<properties>
-
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>	
 	</properties>
 	<dependencies>
 		<dependency>
 			<groupId>com.opensoc</groupId>
 			<artifactId>OpenSOC-Common</artifactId>
-			<version>${parent.version}</version>
+			<version>${project.parent.version}</version>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.storm</groupId>
 			<artifactId>storm-core</artifactId>
 			<version>${global_storm_version}</version>
 			<scope>provided</scope>
+			<exclusions>
+				<exclusion>
+			  	 <artifactId>servlet-api</artifactId>
+			   	 <groupId>javax.servlet</groupId>
+			  </exclusion>
+			</exclusions>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.hbase</groupId>
 			<artifactId>hbase-client</artifactId>
 			<version>${global_hbase_version}</version>
+			<exclusions>
+				<exclusion>
+					<artifactId>log4j</artifactId>
+					<groupId>log4j</groupId>
+				</exclusion>
+			</exclusions>
 		</dependency>
 	</dependencies>
 	<build>
@@ -69,15 +82,18 @@
 							<goal>shade</goal>
 						</goals>
 						<configuration>
+						<minimizeJar>true</minimizeJar>
 							<artifactSet>
 								<excludes>
 									<exclude>classworlds:classworlds</exclude>
 									<exclude>junit:junit</exclude>
 									<exclude>jmock:*</exclude>
 									<exclude>*:xml-apis</exclude>
+									<exclude>*slf4j*</exclude>
 									<exclude>org.apache.maven:lib:tests</exclude>
 									<exclude>log4j:log4j:jar:</exclude>
 									<exclude>*:hbase:*</exclude>
+									<exclude>org.apache.hadoop.yarn.util.package-info*</exclude>
 								</excludes>
 							</artifactSet>
 						</configuration>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataLoads/src/com/opensoc/dataloads/cif/HBaseTableLoad.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataLoads/src/com/opensoc/dataloads/cif/HBaseTableLoad.java b/opensoc-streaming/OpenSOC-DataLoads/src/com/opensoc/dataloads/cif/HBaseTableLoad.java
deleted file mode 100644
index cdf0541..0000000
--- a/opensoc-streaming/OpenSOC-DataLoads/src/com/opensoc/dataloads/cif/HBaseTableLoad.java
+++ /dev/null
@@ -1,122 +0,0 @@
-package com.opensoc.dataloads.cif;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.zip.GZIPInputStream;
-
-import org.json.simple.parser.JSONParser;
-import org.json.simple.parser.ParseException;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.*;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.conf.Configuration;
-
-import java.io.BufferedInputStream;
-
-public class HBaseTableLoad {
-
-	private static Configuration conf = null;
-	private final static String hbaseTable = "cif_table";
-	/**
-	 * Initialization
-	 */
-	static {
-		conf = HBaseConfiguration.create();
-	}
-
-	public static void main(String[] args) {
-
-		LoadDirHBase(args[0]);
-
-	}
-
-	public static void LoadDirHBase(String dirName) {
-		System.out.println("Working on:" + dirName);
-		File folder = new File(dirName);
-		File[] listOfFiles = folder.listFiles();
-
-		for (int i = 0; i < listOfFiles.length; i++) {
-			File file = listOfFiles[i];
-
-			if (file.isFile() && file.getName().endsWith(".gz")) {
-
-				// e.g. folder name is infrastructure_botnet. Col Qualifier is
-				// botnet and col_family is infrastructure
-
-				String col_family = folder.getName().split("_")[0];
-				String col_qualifier = folder.getName().split("_")[1];
-
-				// Open gz file
-				try {
-					InputStream input = new BufferedInputStream(
-							new GZIPInputStream(new FileInputStream(file)));
-
-					HBaseBulkPut(input, col_family, col_qualifier);
-
-				} catch (IOException e) {
-					// TODO Auto-generated catch block
-					e.printStackTrace();
-				} catch (ParseException e) {
-					// TODO Auto-generated catch block
-					e.printStackTrace();
-				}
-			} else if (file.isDirectory()) // if sub-directory then call the
-											// function recursively
-				LoadDirHBase(file.getAbsolutePath());
-		}
-	}
-
-	/**
-	 * @param input
-	 * @param hbaseTable
-	 * @param col_family
-	 * @throws IOException
-	 * @throws ParseException
-	 * 
-	 * 
-	 *             Inserts all json records picked up from the inputStream
-	 */
-	public static void HBaseBulkPut(InputStream input, String col_family,
-			String col_qualifier) throws IOException, ParseException {
-
-		HTable table = new HTable(conf, hbaseTable);
-		JSONParser parser = new JSONParser();
-
-		BufferedReader br = new BufferedReader(new InputStreamReader(input));
-		String jsonString;
-		List<Put> allputs = new ArrayList<Put>();
-		Map json;
-
-		while ((jsonString = br.readLine()) != null) {
-
-			try {
-
-				json = (Map) parser.parse(jsonString);
-			} catch (ParseException e) {
-				//System.out.println("Unable to Parse: " +jsonString);
-				continue;
-			}
-			// Iterator iter = json.entrySet().iterator();
-
-			// Get Address - either IP/domain or email and make that the Key
-			Put put = new Put(Bytes.toBytes((String) json.get("address")));
-
-			// We are just adding a "Y" flag to mark this address
-			put.add(Bytes.toBytes(col_family), Bytes.toBytes(col_qualifier),
-					Bytes.toBytes("Y"));
-
-			allputs.add(put);
-		}
-		table.put(allputs);
-		System.out.println("---------------Values------------------"
-				+ hbaseTable);
-		table.close();
-	}
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataLoads/src/hbase-site.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataLoads/src/hbase-site.xml b/opensoc-streaming/OpenSOC-DataLoads/src/hbase-site.xml
deleted file mode 100644
index a73469d..0000000
--- a/opensoc-streaming/OpenSOC-DataLoads/src/hbase-site.xml
+++ /dev/null
@@ -1,100 +0,0 @@
-<!--Tue Feb 11 02:34:08 2014 -->
-<configuration>
-
-	<property>
-		<name>hbase.regionserver.global.memstore.lowerLimit</name>
-		<value>0.38</value>
-	</property>
-	<property>
-		<name>zookeeper.session.timeout</name>
-		<value>30000</value>
-	</property>
-
-	<property>
-		<name>hbase.security.authorization</name>
-		<value>false</value>
-	</property>
-	<property>
-		<name>hbase.cluster.distributed</name>
-		<value>true</value>
-	</property>
-	
-	<property>
-		<name>hbase.hstore.flush.retries.number</name>
-		<value>120</value>
-	</property>
-	<property>
-		<name>hbase.hregion.memstore.block.multiplier</name>
-		<value>4</value>
-	</property>
-	<property>
-		<name>hbase.hstore.blockingStoreFiles</name>
-		<value>200</value>
-	</property>
-	<property>
-		<name>hbase.defaults.for.version.skip</name>
-		<value>true</value>
-	</property>
-	<property>
-		<name>hbase.regionserver.global.memstore.upperLimit</name>
-		<value>0.4</value>
-	</property>
-	<property>
-		<name>hbase.hregion.memstore.mslab.enabled</name>
-		<value>true</value>
-	</property>
-	<property>
-		<name>hbase.client.keyvalue.maxsize</name>
-		<value>10485760</value>
-	</property>
-	<property>
-		<name>hbase.superuser</name>
-		<value>hbase</value>
-	</property>
-	<property>
-		<name>hfile.block.cache.size</name>
-		<value>0.40</value>
-	</property>
-	<property>
-		<name>zookeeper.znode.parent</name>
-		<value>/hbase-unsecure</value>
-	</property>
-	<property>
-		<name>hbase.hregion.max.filesize</name>
-		<value>10737418240</value>
-	</property>
-	<property>
-		<name>hbase.zookeeper.property.clientPort</name>
-		<value>2181</value>
-	</property>
-	<property>
-		<name>hbase.security.authentication</name>
-		<value>simple</value>
-	</property>
-	<property>
-		<name>hbase.client.scanner.caching</name>
-		<value>100</value>
-	</property>
-	<property>
-		<name>hbase.hregion.memstore.flush.size</name>
-		<value>134217728</value>
-	</property>
-	<property>
-		<name>hbase.hregion.majorcompaction</name>
-		<value>86400000</value>
-	</property>
-	 <property>
-      <name>hbase.zookeeper.property.clientPort</name>
-      <value>2181</value>
-    </property>
-
-    <property>
-      <name>hbase.zookeeper.quorum</name>
-      <value>zkpr1</value>
-    </property>
-
-	<property>
-		<name>hbase.client.write.buffer</name>
-		<value>500000000</value>
-	</property>
-</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataLoads/src/main/java/com/opensoc/dataloads/ThreatIntelLoader.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataLoads/src/main/java/com/opensoc/dataloads/ThreatIntelLoader.java b/opensoc-streaming/OpenSOC-DataLoads/src/main/java/com/opensoc/dataloads/ThreatIntelLoader.java
new file mode 100644
index 0000000..c602d0c
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataLoads/src/main/java/com/opensoc/dataloads/ThreatIntelLoader.java
@@ -0,0 +1,174 @@
+package com.opensoc.dataloads;
+
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InterruptedIOException;
+import java.util.ArrayList;
+
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.log4j.Logger;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONArray;
+
+import com.opensoc.dataloads.interfaces.ThreatIntelSource;
+
+public class ThreatIntelLoader {
+
+	
+	private static final Logger LOG = Logger.getLogger(ThreatIntelLoader.class);
+	
+	private static int BULK_SIZE = 50; 
+	
+	public static void main(String[] args) {
+		
+		PropertiesConfiguration sourceConfig = null;
+		ThreatIntelSource threatIntelSource = null;
+		ArrayList<Put> putList = null;
+		HTable table = null;
+		Configuration hConf = null;
+		
+		CommandLine commandLine = parseCommandLine(args);
+		File configFile = new File(commandLine.getOptionValue("configFile"));
+		
+		try {
+			sourceConfig = new PropertiesConfiguration(configFile);
+		} catch (org.apache.commons.configuration.ConfigurationException e) {
+			LOG.error("Error in configuration file " + configFile);
+			LOG.error(e);
+			System.exit(-1);
+		}
+		
+		try {
+			threatIntelSource = (ThreatIntelSource) Class.forName(commandLine.getOptionValue("source")).newInstance();
+			threatIntelSource.initializeSource(sourceConfig);
+		} catch (ClassNotFoundException|InstantiationException|IllegalAccessException e) {
+			LOG.error("Error while trying to load class " + commandLine.getOptionValue("source"));
+			LOG.error(e);
+			System.exit(-1);
+		}
+		
+		hConf = HBaseConfiguration.create();
+		try {
+			table = new HTable(hConf, commandLine.getOptionValue("table"));
+		} catch (IOException e) {
+			LOG.error("Exception when processing HBase config");
+			LOG.error(e);
+			System.exit(-1);
+		}
+		
+		
+		putList = new ArrayList<Put>();
+		
+		while (threatIntelSource.hasNext()) {
+			
+			JSONObject intel = threatIntelSource.next();
+			
+			/*
+			 * If any of the required fields from threatIntelSource are
+			 * missing, or contain invalid data, don't put it in HBase.
+			 */
+			try {				
+
+				putList.add(putRequestFromIntel(intel));		
+				
+				if (putList.size() == BULK_SIZE) {
+					table.put(putList);
+					putList.clear();
+				}
+				
+			} catch (NullPointerException|ClassCastException e) {
+				LOG.error("Exception while processing intel object");
+				LOG.error(intel.toString());
+				LOG.error(e);
+			} catch (InterruptedIOException|org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException e) {
+				LOG.error("Problem communicationg with HBase");
+				LOG.error(e);
+				System.exit(-1);
+			} 
+		}
+		
+	}
+	/*
+	 * Takes a JSONObject from a ThreatIntelSource implementation, ensures
+	 * that the format of the returned JSONObect is correct, and returns
+	 * a Put request for HBase.
+	 * 
+	 * @param	intel	The JSONObject from a ThreatIntelSource
+	 * @return			A put request for the intel data 
+	 * @throws	NullPointerException If a required field is missing
+	 * @throws	ClassCastException If a field has an invalid type
+	 * 
+	 */
+	private static Put putRequestFromIntel(JSONObject intel) {
+		
+		Put tempPut = new Put(Bytes.toBytes((String) intel.get("indicator")));
+		
+		JSONArray intelArray = (JSONArray) intel.get("data");
+		
+		tempPut.add(Bytes.toBytes("source"),
+					Bytes.toBytes((String) intel.get("source")),
+					Bytes.toBytes(intelArray.toString()));
+		
+		return tempPut;
+	}
+	/*
+	 * Handles parsing of command line options and validates the options are used
+	 * correctly. This will not validate the value of the options, it will just
+	 * ensure that the required options are used. If the options are used 
+	 * incorrectly, the help is printed, and the program exits.
+	 * 
+	 * @param  args The arguments from the CLI
+	 * @return 		A CommandLine with the CLI arguments
+	 * 
+	 */
+	private static CommandLine parseCommandLine(String[] args) {
+		
+		CommandLineParser parser = new BasicParser();
+		CommandLine cli = null;
+		
+		Options options = new Options();
+		
+		options.addOption(OptionBuilder.withArgName("s").
+				withLongOpt("source").
+				isRequired(true).
+				hasArg(true).
+				withDescription("Source class to use").
+				create()
+				);
+		options.addOption(OptionBuilder.withArgName("t").
+				withLongOpt("table").
+				isRequired(true).
+				hasArg(true).
+				withDescription("HBase table to load into").
+				create()
+				);
+		options.addOption(OptionBuilder.withArgName("c").
+				withLongOpt("configFile").
+				hasArg(true).
+				withDescription("Configuration file for source class").
+				create()
+				);
+		
+		try {
+			cli = parser.parse(options, args);
+		} catch(org.apache.commons.cli.ParseException e) {
+			HelpFormatter formatter = new HelpFormatter();
+			formatter.printHelp("ThreatIntelLoader", options, true);
+			System.exit(-1);
+		}
+		
+		return cli;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataLoads/src/main/java/com/opensoc/dataloads/cif/HBaseTableLoad.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataLoads/src/main/java/com/opensoc/dataloads/cif/HBaseTableLoad.java b/opensoc-streaming/OpenSOC-DataLoads/src/main/java/com/opensoc/dataloads/cif/HBaseTableLoad.java
new file mode 100644
index 0000000..5e456e4
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataLoads/src/main/java/com/opensoc/dataloads/cif/HBaseTableLoad.java
@@ -0,0 +1,238 @@
+package com.opensoc.dataloads.cif;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.zip.GZIPInputStream;
+import java.util.zip.ZipInputStream;
+
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.Options;
+
+import java.io.BufferedInputStream;
+
+public class HBaseTableLoad {
+
+	private static final Logger LOG = Logger.getLogger(HBaseTableLoad.class);
+	private static Configuration conf = null;
+	private String hbaseTable = "cif_table";
+	private String dirName = "./";
+	private boolean usefileList = false;
+	private Set<String> files;
+
+	/**
+	 * Initialization
+	 */
+	static {
+		conf = HBaseConfiguration.create();
+	}
+
+	public static void main(String[] args) {
+
+		HBaseTableLoad ht = new HBaseTableLoad();
+
+		ht.parse(args);
+		//ht.LoadDirHBase();
+
+	}
+
+	private void LoadDirHBase() {
+		LOG.info("Working on:" + dirName);
+		File folder = new File(dirName);
+		File[] listOfFiles = folder.listFiles();
+		InputStream input;
+
+		for (int i = 0; i < listOfFiles.length; i++) {
+			File file = listOfFiles[i];
+
+			if (file.isFile()) {
+
+				// Check if filename is present in FileList
+				if (usefileList)
+					if (!files.contains(file.getAbsolutePath()))
+						continue;
+
+				// e.g. folder name is infrastructure_botnet. Col Qualifier is
+				// botnet and col_family is infrastructure
+
+				String col_family = folder.getName().split("_")[0];
+				String col_qualifier = folder.getName().split("_")[1];
+
+				// Open file
+				try {
+					if (file.getName().endsWith(".gz"))
+						input = new BufferedInputStream(new GZIPInputStream(
+								new FileInputStream(file)));
+					else if (file.getName().endsWith(".zip"))
+						input = new BufferedInputStream(new ZipInputStream(
+								new FileInputStream(file)));
+					else if (file.getName().endsWith(".json"))
+						input = new BufferedInputStream((new FileInputStream(
+								file)));
+					else
+						continue;
+
+					LOG.info("Begin Loading File:" + file.getAbsolutePath());
+
+					HBaseBulkPut(input, col_family, col_qualifier);
+					LOG.info("Completed Loading File:" + file.getAbsolutePath());
+
+				} catch (IOException e) {
+					// TODO Auto-generated catch block
+					e.printStackTrace();
+				} catch (ParseException e) {
+					// TODO Auto-generated catch block
+					e.printStackTrace();
+				}
+			} else if (file.isDirectory()) // if sub-directory then call the
+											// function recursively
+				this.LoadDirHBase(file.getAbsolutePath());
+		}
+	}
+
+	private void LoadDirHBase(String dirname) {
+
+		this.dirName = dirname;
+		this.LoadDirHBase();
+
+	}
+
+	/**
+	 * @param input
+	 * @param hbaseTable
+	 * @param col_family
+	 * @throws IOException
+	 * @throws ParseException
+	 * 
+	 * 
+	 *     Inserts all json records picked up from the inputStream
+	 */
+	private void HBaseBulkPut(InputStream input, String col_family,
+			String col_qualifier) throws IOException, ParseException {
+
+		HTable table = new HTable(conf, hbaseTable);
+		JSONParser parser = new JSONParser();
+
+		BufferedReader br = new BufferedReader(new InputStreamReader(input));
+		String jsonString;
+		List<Put> allputs = new ArrayList<Put>();
+		Map json;
+
+		while ((jsonString = br.readLine()) != null) {
+
+			try {
+
+				json = (Map) parser.parse(jsonString);
+			} catch (ParseException e) {
+				// System.out.println("Unable to Parse: " +jsonString);
+				continue;
+			}
+			// Iterator iter = json.entrySet().iterator();
+
+			// Get Address - either IP/domain or email and make that the Key
+			Put put = new Put(Bytes.toBytes((String) json.get("address")));
+
+			// We are just adding a "Y" flag to mark this address
+			put.add(Bytes.toBytes(col_family), Bytes.toBytes(col_qualifier),
+					Bytes.toBytes("Y"));
+
+			allputs.add(put);
+		}
+		table.put(allputs);
+		table.close();
+	}
+
+	private void printUsage() {
+		System.out
+				.println("Usage: java -cp JarFile com.opensoc.dataloads.cif.HBaseTableLoad -d <directory> -t <tablename> -f <optional file-list>");
+	}
+
+	private void parse(String[] args) {
+		CommandLineParser parser = new BasicParser();
+		Options options = new Options();
+
+		options.addOption("d", true, "description");
+		options.addOption("t", true, "description");
+		options.addOption("f", false, "description");
+
+		CommandLine cmd = null;
+		try {
+			cmd = parser.parse(options, args);
+
+			if (cmd.hasOption("d"))
+			{
+				this.dirName = cmd.getOptionValue("d");
+				LOG.info("Directory Name:" + cmd.getOptionValue("d"));
+			}
+			else {
+				LOG.info("Missing Directory Name");
+				printUsage();
+				System.exit(-1);
+			}
+
+			if (cmd.hasOption("t"))
+			{
+				this.hbaseTable = cmd.getOptionValue("t");
+				LOG.info("HBase Table Name:" + cmd.getOptionValue("t"));
+			}
+			else {
+				LOG.info("Missing Table Name");
+				printUsage();
+				System.exit(-1);
+			}
+
+			if (cmd.hasOption("f")) {
+				this.usefileList = true;
+				files = LoadFileList(cmd.getOptionValue("f"));
+				LOG.info("FileList:" + cmd.getOptionValue("f"));
+			}
+
+		} catch (org.apache.commons.cli.ParseException e) {
+			LOG.error("Failed to parse comand line properties", e);
+			e.printStackTrace();
+			System.exit(-1);
+		}
+	}
+
+	private Set<String> LoadFileList(String filename) {
+
+		Set<String> output = null;
+		BufferedReader reader;
+
+		try {
+			reader = new BufferedReader(new InputStreamReader(
+					new FileInputStream(filename)));
+			output = new HashSet<String>();
+			String in = "";
+
+			while ((in = reader.readLine()) != null)
+				output.add(in);
+
+			reader.close();
+
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}
+
+		return output;
+	}
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataLoads/src/main/resources/hbase-site.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataLoads/src/main/resources/hbase-site.xml b/opensoc-streaming/OpenSOC-DataLoads/src/main/resources/hbase-site.xml
new file mode 100644
index 0000000..a73469d
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataLoads/src/main/resources/hbase-site.xml
@@ -0,0 +1,100 @@
+<!--Tue Feb 11 02:34:08 2014 -->
+<configuration>
+
+	<property>
+		<name>hbase.regionserver.global.memstore.lowerLimit</name>
+		<value>0.38</value>
+	</property>
+	<property>
+		<name>zookeeper.session.timeout</name>
+		<value>30000</value>
+	</property>
+
+	<property>
+		<name>hbase.security.authorization</name>
+		<value>false</value>
+	</property>
+	<property>
+		<name>hbase.cluster.distributed</name>
+		<value>true</value>
+	</property>
+	
+	<property>
+		<name>hbase.hstore.flush.retries.number</name>
+		<value>120</value>
+	</property>
+	<property>
+		<name>hbase.hregion.memstore.block.multiplier</name>
+		<value>4</value>
+	</property>
+	<property>
+		<name>hbase.hstore.blockingStoreFiles</name>
+		<value>200</value>
+	</property>
+	<property>
+		<name>hbase.defaults.for.version.skip</name>
+		<value>true</value>
+	</property>
+	<property>
+		<name>hbase.regionserver.global.memstore.upperLimit</name>
+		<value>0.4</value>
+	</property>
+	<property>
+		<name>hbase.hregion.memstore.mslab.enabled</name>
+		<value>true</value>
+	</property>
+	<property>
+		<name>hbase.client.keyvalue.maxsize</name>
+		<value>10485760</value>
+	</property>
+	<property>
+		<name>hbase.superuser</name>
+		<value>hbase</value>
+	</property>
+	<property>
+		<name>hfile.block.cache.size</name>
+		<value>0.40</value>
+	</property>
+	<property>
+		<name>zookeeper.znode.parent</name>
+		<value>/hbase-unsecure</value>
+	</property>
+	<property>
+		<name>hbase.hregion.max.filesize</name>
+		<value>10737418240</value>
+	</property>
+	<property>
+		<name>hbase.zookeeper.property.clientPort</name>
+		<value>2181</value>
+	</property>
+	<property>
+		<name>hbase.security.authentication</name>
+		<value>simple</value>
+	</property>
+	<property>
+		<name>hbase.client.scanner.caching</name>
+		<value>100</value>
+	</property>
+	<property>
+		<name>hbase.hregion.memstore.flush.size</name>
+		<value>134217728</value>
+	</property>
+	<property>
+		<name>hbase.hregion.majorcompaction</name>
+		<value>86400000</value>
+	</property>
+	 <property>
+      <name>hbase.zookeeper.property.clientPort</name>
+      <value>2181</value>
+    </property>
+
+    <property>
+      <name>hbase.zookeeper.quorum</name>
+      <value>zkpr1</value>
+    </property>
+
+	<property>
+		<name>hbase.client.write.buffer</name>
+		<value>500000000</value>
+	</property>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/README.md
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/README.md b/opensoc-streaming/OpenSOC-DataServices/README.md
new file mode 100644
index 0000000..e845566
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/README.md
@@ -0,0 +1 @@
+README

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/pom.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/pom.xml b/opensoc-streaming/OpenSOC-DataServices/pom.xml
index 56b7372..14c51eb 100644
--- a/opensoc-streaming/OpenSOC-DataServices/pom.xml
+++ b/opensoc-streaming/OpenSOC-DataServices/pom.xml
@@ -1,22 +1,239 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
 	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-	<modelVersion>4.0.0</modelVersion>
 
-	<groupId>com.opensoc</groupId>
+	<parent>
+		<groupId>com.opensoc</groupId>
+		<artifactId>OpenSOC-Streaming</artifactId>
+		<version>0.6BETA</version>
+	</parent>
+
+	<modelVersion>4.0.0</modelVersion>
 	<artifactId>OpenSOC-DataServices</artifactId>
 	<version>1.0-SNAPSHOT</version>
 
 	<properties>
 		<slf4j.version>1.6.4</slf4j.version>
+		<commons-configuration.version>1.10</commons-configuration.version>
+		<commons-beanutils.version>1.8.3</commons-beanutils.version>
+		<commons-jexl.version>2.1.1</commons-jexl.version>
+		<spring.integration.version>3.0.0.RELEASE</spring.integration.version>
+		<spring.version>3.2.6.RELEASE</spring.version>
 	</properties>
+	<repositories>
+		<repository>
+			<id>Kraken-Repo</id>
+			<name>Kraken Repository</name>
+			<url>http://download.krakenapps.org</url>
+		</repository>
+	</repositories>
 
 	<dependencies>
 		<dependency>
+			<groupId>org.krakenapps</groupId>
+			<artifactId>kraken-pcap</artifactId>
+			<version>1.5.0</version>
+			<scope>provided</scope>
+			<exclusions>
+				<exclusion>
+					<artifactId>slf4j-simple</artifactId>
+					<groupId>org.slf4j</groupId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+
+		<dependency>
+			<groupId>com.opensoc</groupId>
+			<artifactId>OpenSOC-Common</artifactId>
+			<version>${parent.version}</version>
+		</dependency>
+
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>${junit.version}</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.powermock</groupId>
+			<artifactId>powermock-api-mockito</artifactId>
+			<version>1.5</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.powermock</groupId>
+			<artifactId>powermock-core</artifactId>
+			<version>1.5</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.powermock</groupId>
+			<artifactId>powermock-module-junit4</artifactId>
+			<version>1.5</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>joda-time</groupId>
+			<artifactId>joda-time</artifactId>
+			<version>2.3</version>
+		</dependency>
+		<dependency>
+			<groupId>org.springframework.integration</groupId>
+			<artifactId>spring-integration-http</artifactId>
+			<version>${spring.integration.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.springframework</groupId>
+			<artifactId>spring-webmvc</artifactId>
+			<version>${spring.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hbase</groupId>
+			<artifactId>hbase-client</artifactId>
+			<version>${global_hbase_version}</version>
+			<scope>provided</scope>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-log4j12</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hbase</groupId>
+			<artifactId>hbase-testing-util</artifactId>
+			<version>${global_hbase_version}</version>
+			<scope>provided</scope>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-log4j12</artifactId>
+				</exclusion>
+				<exclusion>
+					<artifactId>jsp-api</artifactId>
+					<groupId>javax.servlet.jsp</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>servlet-api</artifactId>
+					<groupId>javax.servlet</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>servlet-api-2.5</artifactId>
+					<groupId>org.mortbay.jetty</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>jackson-jaxrs</artifactId>
+					<groupId>org.codehaus.jackson</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>jersey-core</artifactId>
+					<groupId>com.sun.jersey</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>jasper-compiler</artifactId>
+					<groupId>tomcat</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>jasper-runtime</artifactId>
+					<groupId>tomcat</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>jsp-2.1</artifactId>
+					<groupId>org.mortbay.jetty</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>jsp-api-2.1</artifactId>
+					<groupId>org.mortbay.jetty</groupId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-common</artifactId>
+			<version>${global_hadoop_version}</version>
+			<scope>provided</scope>
+			<exclusions>
+				<exclusion>
+					<artifactId>jsp-api</artifactId>
+					<groupId>javax.servlet.jsp</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>servlet-api</artifactId>
+					<groupId>javax.servlet</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>jackson-jaxrs</artifactId>
+					<groupId>org.codehaus.jackson</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>jersey-core</artifactId>
+					<groupId>com.sun.jersey</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>jasper-compiler</artifactId>
+					<groupId>tomcat</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>jasper-runtime</artifactId>
+					<groupId>tomcat</groupId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-hdfs</artifactId>
+			<version>${global_hadoop_version}</version>
+			<scope>provided</scope>
+			<exclusions>
+				<exclusion>
+					<artifactId>jsp-api</artifactId>
+					<groupId>javax.servlet.jsp</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>servlet-api</artifactId>
+					<groupId>javax.servlet</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>jersey-core</artifactId>
+					<groupId>com.sun.jersey</groupId>
+				</exclusion>
+				<exclusion>
+					<artifactId>jasper-runtime</artifactId>
+					<groupId>tomcat</groupId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+
+
+
+		<dependency>
 			<groupId>org.elasticsearch</groupId>
 			<artifactId>elasticsearch</artifactId>
 			<version>1.3.1</version>
 		</dependency>
 		<dependency>
+			<groupId>commons-beanutils</groupId>
+			<artifactId>commons-beanutils</artifactId>
+			<version>${commons-beanutils.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.commons</groupId>
+			<artifactId>commons-jexl</artifactId>
+			<version>${commons-jexl.version}</version>
+		</dependency>
+
+		<dependency>
+			<artifactId>commons-configuration</artifactId>
+			<groupId>commons-configuration</groupId>
+			<version>${commons-configuration.version}</version>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-api</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+
+		<dependency>
 			<groupId>commons-codec</groupId>
 			<artifactId>commons-codec</artifactId>
 			<version>1.6</version>
@@ -46,6 +263,12 @@
 			<groupId>org.eclipse.jetty</groupId>
 			<artifactId>apache-jsp</artifactId>
 			<version>9.2.1.v20140609</version>
+			<exclusions>
+				<exclusion>
+					<artifactId>javax.servlet.jsp-api</artifactId>
+					<groupId>javax.servlet.jsp</groupId>
+				</exclusion>
+			</exclusions>
 		</dependency>
 		<dependency>
 			<groupId>org.eclipse.jetty</groupId>
@@ -137,7 +360,7 @@
 		<dependency>
 			<groupId>org.slf4j</groupId>
 			<artifactId>slf4j-log4j12</artifactId>
-			<version>1.6.4</version>
+			<version>${slf4j.version}</version>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.shiro</groupId>
@@ -160,6 +383,7 @@
 			<artifactId>slf4j-api</artifactId>
 			<version>${slf4j.version}</version>
 		</dependency>
+
 		<dependency>
 			<!-- Required in the sample apps only for 3rd-party libraries that expect 
 				to call the commons logging APIs -->
@@ -275,4 +499,4 @@
 			</plugin>
 		</plugins>
 	</build>
-</project>
\ No newline at end of file
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/alerts/server/AlertsProcessingServer.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/alerts/server/AlertsProcessingServer.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/alerts/server/AlertsProcessingServer.java
index 3cf6246..cec50b9 100644
--- a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/alerts/server/AlertsProcessingServer.java
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/alerts/server/AlertsProcessingServer.java
@@ -11,7 +11,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.inject.Inject;
-import com.opensoc.dataservices.Main;
 
 public class AlertsProcessingServer {
 	

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/alerts/server/AlertsSearcher.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/alerts/server/AlertsSearcher.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/alerts/server/AlertsSearcher.java
index 15db704..4a23395 100644
--- a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/alerts/server/AlertsSearcher.java
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/alerts/server/AlertsSearcher.java
@@ -7,9 +7,7 @@ import java.io.FileOutputStream;
 import java.io.IOException;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
-import java.util.ArrayList;
 import java.util.HashMap;
-import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/kafkaclient/KafkaClient.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/kafkaclient/KafkaClient.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/kafkaclient/KafkaClient.java
index 7874f19..7b61ba0 100644
--- a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/kafkaclient/KafkaClient.java
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/kafkaclient/KafkaClient.java
@@ -7,16 +7,14 @@ import java.util.Properties;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 
-import org.eclipse.jetty.websocket.api.RemoteEndpoint;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.opensoc.dataservices.Main;
-
 import kafka.consumer.ConsumerConfig;
 import kafka.consumer.KafkaStream;
 import kafka.javaapi.consumer.ConsumerConnector;
 
+import org.eclipse.jetty.websocket.api.RemoteEndpoint;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 public class KafkaClient 
 {
 	private static final Logger logger = LoggerFactory.getLogger( KafkaClient.class );

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/kafkaclient/KafkaConsumer.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/kafkaclient/KafkaConsumer.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/kafkaclient/KafkaConsumer.java
index 0e01f1d..ca2f113 100644
--- a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/kafkaclient/KafkaConsumer.java
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/kafkaclient/KafkaConsumer.java
@@ -2,15 +2,13 @@ package com.opensoc.dataservices.kafkaclient;
 
 import java.io.IOException;
 
+import kafka.consumer.ConsumerIterator;
+import kafka.consumer.KafkaStream;
+
 import org.eclipse.jetty.websocket.api.RemoteEndpoint;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.opensoc.dataservices.Main;
-
-import kafka.consumer.ConsumerIterator;
-import kafka.consumer.KafkaStream;
-
 public class KafkaConsumer implements Runnable 
 {
 	private static final Logger logger = LoggerFactory.getLogger( KafkaConsumer.class );

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/modules/guice/DefaultServletModule.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/modules/guice/DefaultServletModule.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/modules/guice/DefaultServletModule.java
index 3e6d3b5..68ce111 100644
--- a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/modules/guice/DefaultServletModule.java
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/modules/guice/DefaultServletModule.java
@@ -9,7 +9,6 @@ import org.slf4j.LoggerFactory;
 
 import com.google.inject.Singleton;
 import com.google.inject.servlet.ServletModule;
-import com.opensoc.dataservices.kafkaclient.KafkaConsumer;
 import com.opensoc.dataservices.servlet.LoginServlet;
 import com.opensoc.dataservices.servlet.LogoutServlet;
 import com.opensoc.dataservices.websocket.KafkaMessageSenderServlet;

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/modules/guice/RestEasyModule.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/modules/guice/RestEasyModule.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/modules/guice/RestEasyModule.java
index 14dfdb8..a9efce8 100644
--- a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/modules/guice/RestEasyModule.java
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/modules/guice/RestEasyModule.java
@@ -4,11 +4,9 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.inject.AbstractModule;
-import com.google.inject.Binder;
-import com.google.inject.Module;
 import com.opensoc.dataservices.auth.RestSecurityInterceptor;
-import com.opensoc.dataservices.kafkaclient.KafkaConsumer;
 import com.opensoc.dataservices.rest.Index;
+import com.opensoc.pcapservice.PcapReceiverImplRestEasy;
 
 public class RestEasyModule extends AbstractModule {
 	
@@ -18,6 +16,7 @@ public class RestEasyModule extends AbstractModule {
 	protected void configure() {
 		
 		bind( Index.class );
+		bind( PcapReceiverImplRestEasy.class );
 		bind( RestSecurityInterceptor.class );
 	}
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/modules/guice/ServiceModule.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/modules/guice/ServiceModule.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/modules/guice/ServiceModule.java
index 5271674..8c7e01d 100644
--- a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/modules/guice/ServiceModule.java
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/modules/guice/ServiceModule.java
@@ -8,7 +8,6 @@ import org.slf4j.LoggerFactory;
 
 import com.google.inject.Provides;
 import com.opensoc.dataservices.common.OpenSOCService;
-import com.opensoc.dataservices.kafkaclient.KafkaConsumer;
 import com.opensoc.services.alerts.ElasticSearch_KafkaAlertsService;
 import com.opensoc.services.alerts.Solr_KafkaAlertsService;
 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/rest/RestServices.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/rest/RestServices.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/rest/RestServices.java
index 650b6d4..4029214 100644
--- a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/rest/RestServices.java
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/rest/RestServices.java
@@ -8,8 +8,6 @@ import javax.ws.rs.core.Application;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.opensoc.dataservices.kafkaclient.KafkaConsumer;
-
 public class RestServices extends Application 
 {
 	private static final Logger logger = LoggerFactory.getLogger( RestServices.class );

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/servlet/LogoutServlet.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/servlet/LogoutServlet.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/servlet/LogoutServlet.java
index 0754f3a..dbc13c7 100644
--- a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/servlet/LogoutServlet.java
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/servlet/LogoutServlet.java
@@ -9,18 +9,10 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.shiro.SecurityUtils;
-import org.apache.shiro.authc.AuthenticationException;
-import org.apache.shiro.authc.ExcessiveAttemptsException;
-import org.apache.shiro.authc.IncorrectCredentialsException;
-import org.apache.shiro.authc.LockedAccountException;
-import org.apache.shiro.authc.UnknownAccountException;
-import org.apache.shiro.authc.UsernamePasswordToken;
 import org.apache.shiro.subject.Subject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.opensoc.dataservices.kafkaclient.KafkaConsumer;
-
 public class LogoutServlet extends HttpServlet 
 {
 	private static final Logger logger = LoggerFactory.getLogger( LogoutServlet.class );

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/websocket/KafkaMessageSenderServlet.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/websocket/KafkaMessageSenderServlet.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/websocket/KafkaMessageSenderServlet.java
index 90820d2..5823e18 100644
--- a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/websocket/KafkaMessageSenderServlet.java
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/websocket/KafkaMessageSenderServlet.java
@@ -8,7 +8,6 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.inject.Inject;
-import com.opensoc.dataservices.kafkaclient.KafkaConsumer;
 
 @WebServlet(name = "Message Sender Servlet", urlPatterns = { "/messages" })
 public class KafkaMessageSenderServlet extends WebSocketServlet

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/websocket/KafkaMessageSenderSocket.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/websocket/KafkaMessageSenderSocket.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/websocket/KafkaMessageSenderSocket.java
index 7a055ef..97a61f6 100644
--- a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/websocket/KafkaMessageSenderSocket.java
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/websocket/KafkaMessageSenderSocket.java
@@ -13,9 +13,7 @@ import org.eclipse.jetty.websocket.api.annotations.WebSocket;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.inject.Inject;
 import com.opensoc.dataservices.kafkaclient.KafkaClient;
-import com.opensoc.dataservices.kafkaclient.KafkaConsumer;
 
 @WebSocket(maxTextMessageSize = 64 * 1024)
 public class KafkaMessageSenderSocket 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/websocket/KafkaWebSocketCreator.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/websocket/KafkaWebSocketCreator.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/websocket/KafkaWebSocketCreator.java
index 2239afc..575fbfe 100644
--- a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/websocket/KafkaWebSocketCreator.java
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/dataservices/websocket/KafkaWebSocketCreator.java
@@ -12,7 +12,6 @@ import org.slf4j.LoggerFactory;
 
 import com.google.inject.Inject;
 import com.opensoc.dataservices.auth.AuthToken;
-import com.opensoc.dataservices.kafkaclient.KafkaConsumer;
 
 public class KafkaWebSocketCreator implements WebSocketCreator
 {

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/CellTimestampComparator.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/CellTimestampComparator.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/CellTimestampComparator.java
new file mode 100644
index 0000000..e45d849
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/CellTimestampComparator.java
@@ -0,0 +1,23 @@
+package com.opensoc.pcapservice;
+
+import java.util.Comparator;
+
+import org.apache.hadoop.hbase.Cell;
+
+/**
+ * Comparator created for sorting pcaps cells based on the timestamp (asc).
+ * 
+ * @author Sayi
+ */
+public class CellTimestampComparator implements Comparator<Cell> {
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
+   */
+  
+  public int compare(Cell o1, Cell o2) {
+    return Long.valueOf(o1.getTimestamp()).compareTo(o2.getTimestamp());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/ConfigurationUtil.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/ConfigurationUtil.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/ConfigurationUtil.java
new file mode 100644
index 0000000..be1a1bf
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/ConfigurationUtil.java
@@ -0,0 +1,269 @@
+package com.opensoc.pcapservice;
+
+import org.apache.commons.configuration.Configuration;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.springframework.util.Assert;
+
+import com.opensoc.configuration.ConfigurationManager;
+
+
+
+/**
+ * utility class for this module which loads commons configuration to fetch
+ * properties from underlying resources to communicate with hbase.
+ * 
+ * @author Sayi
+ */
+public class ConfigurationUtil {
+
+	/** Configuration definition file name for fetching pcaps from hbase */
+	private static final String configDefFileName = "config-definition-hbase.xml";
+	
+	/** property configuration. */
+	private static Configuration propConfiguration = null;
+
+
+	/**
+	 * The Enum SizeUnit.
+	 */
+	public enum SizeUnit {
+
+		/** The kb. */
+		KB,
+		/** The mb. */
+		MB
+	};
+
+	/** The Constant DEFAULT_HCONNECTION_RETRY_LIMIT. */
+	private static final int DEFAULT_HCONNECTION_RETRY_LIMIT = 0;
+
+	/**
+	 * Loads configuration resources 
+	 * @return Configuration
+	 */
+	public static Configuration getConfiguration() {
+		if(propConfiguration == null){
+			propConfiguration =  ConfigurationManager.getConfiguration(configDefFileName);
+		}
+		return propConfiguration;
+	}
+
+	/**
+	 * Returns the configured default result size in bytes, if the user input is
+	 * null; otherwise, returns the user input after validating with the
+	 * configured max value. Throws IllegalArgumentException if : 1. input is
+	 * less than or equals to 0 OR 2. input is greater than configured
+	 * {hbase.scan.max.result.size} value
+	 * 
+	 * @param input
+	 *            the input
+	 * @return long
+	 */
+	public static long validateMaxResultSize(String input) {
+		if (input == null) {
+			return getDefaultResultSize();
+		}
+		// validate the user input
+		long value = convertToBytes(Long.parseLong(input), getResultSizeUnit());
+		Assert.isTrue(
+				isAllowableResultSize(value),
+				"'maxResponseSize' param value must be positive and less than {hbase.scan.max.result.size} value");
+		return convertToBytes(value, getResultSizeUnit());
+	}
+
+	/**
+	 * Checks if is allowable result size.
+	 * 
+	 * @param input
+	 *            the input
+	 * @return true, if is allowable result size
+	 */
+	public static boolean isAllowableResultSize(long input) {
+		if (input <= 0 || input > getMaxResultSize()) {
+			return false;
+		}
+		return true;
+	}
+
+	/**
+	 * Returns the configured default result size in bytes.
+	 * 
+	 * @return long
+	 */
+	public static long getDefaultResultSize() {
+		float value = ConfigurationUtil.getConfiguration().getFloat(
+				"hbase.scan.default.result.size");
+		return convertToBytes(value, getResultSizeUnit());
+	}
+
+	/**
+	 * Returns the configured max result size in bytes.
+	 * 
+	 * @return long
+	 */
+	public static long getMaxResultSize() {
+		float value = ConfigurationUtil.getConfiguration().getFloat(
+				"hbase.scan.max.result.size");
+		return convertToBytes(value, getResultSizeUnit());
+	}
+
+	/**
+	 * Returns the configured max row size in bytes.
+	 * 
+	 * @return long
+	 */
+	public static long getMaxRowSize() {
+		float maxRowSize = ConfigurationUtil.getConfiguration().getFloat(
+				"hbase.table.max.row.size");
+		return convertToBytes(maxRowSize, getRowSizeUnit());
+	}
+
+	/**
+	 * Gets the result size unit.
+	 * 
+	 * @return the result size unit
+	 */
+	public static SizeUnit getResultSizeUnit() {
+		return SizeUnit.valueOf(ConfigurationUtil.getConfiguration()
+				.getString("hbase.scan.result.size.unit"));
+	}
+
+	/**
+	 * Gets the row size unit.
+	 * 
+	 * @return the row size unit
+	 */
+	public static SizeUnit getRowSizeUnit() {
+		return SizeUnit.valueOf(ConfigurationUtil.getConfiguration()
+				.getString("hbase.table.row.size.unit"));
+	}
+
+	/**
+	 * Gets the connection retry limit.
+	 * 
+	 * @return the connection retry limit
+	 */
+	public static int getConnectionRetryLimit() {
+		return ConfigurationUtil.getConfiguration().getInt(
+				"hbase.hconnection.retries.number",
+				DEFAULT_HCONNECTION_RETRY_LIMIT);
+	}
+
+	/**
+	 * Checks if is default include reverse traffic.
+	 * 
+	 * @return true, if is default include reverse traffic
+	 */
+	public static boolean isDefaultIncludeReverseTraffic() {
+		return ConfigurationUtil.getConfiguration().getBoolean(
+				"pcaps.include.reverse.traffic");
+	}
+
+	/**
+	 * Gets the table name.
+	 * 
+	 * @return the table name
+	 */
+	public static byte[] getTableName() {
+		return Bytes.toBytes(ConfigurationUtil.getConfiguration().getString(
+				"hbase.table.name"));
+	}
+
+	/**
+	 * Gets the column family.
+	 * 
+	 * @return the column family
+	 */
+	public static byte[] getColumnFamily() {
+		return Bytes.toBytes(ConfigurationUtil.getConfiguration().getString(
+				"hbase.table.column.family"));
+	}
+
+	/**
+	 * Gets the column qualifier.
+	 * 
+	 * @return the column qualifier
+	 */
+	public static byte[] getColumnQualifier() {
+		return Bytes.toBytes(ConfigurationUtil.getConfiguration().getString(
+				"hbase.table.column.qualifier"));
+	}
+
+	/**
+	 * Gets the max versions.
+	 * 
+	 * @return the max versions
+	 */
+	public static int getMaxVersions() {
+		return ConfigurationUtil.getConfiguration().getInt(
+				"hbase.table.column.maxVersions");
+	}
+
+	/**
+	 * Gets the configured tokens in rowkey.
+	 * 
+	 * @return the configured tokens in rowkey
+	 */
+	public static int getConfiguredTokensInRowkey() {
+		return ConfigurationUtil.getConfiguration().getInt(
+				"hbase.table.row.key.tokens");
+	}
+
+	/**
+	 * Gets the minimum tokens in inputkey.
+	 * 
+	 * @return the minimum tokens in inputkey
+	 */
+	public static int getMinimumTokensInInputkey() {
+		return ConfigurationUtil.getConfiguration().getInt(
+				"rest.api.input.key.min.tokens");
+	}
+
+	/**
+	 * Gets the appending token digits.
+	 * 
+	 * @return the appending token digits
+	 */
+	public static int getAppendingTokenDigits() {
+		return ConfigurationUtil.getConfiguration().getInt(
+				"hbase.table.row.key.token.appending.digits");
+	}
+
+	/**
+	 * Convert to bytes.
+	 * 
+	 * @param value
+	 *            the value
+	 * @param unit
+	 *            the unit
+	 * @return the long
+	 */
+	public static long convertToBytes(float value, SizeUnit unit) {
+		if (SizeUnit.KB == unit) {
+			return (long) (value * 1024);
+		}
+		if (SizeUnit.MB == unit) {
+			return (long) (value * 1024 * 1024);
+		}
+		return (long) value;
+	}
+
+	/**
+	 * The main method.
+	 * 
+	 * @param args
+	 *            the arguments
+	 */
+	public static void main(String[] args) {
+		long r1 = getMaxRowSize();
+		System.out.println("getMaxRowSizeInBytes = " + r1);
+		long r2 = getMaxResultSize();
+		System.out.println("getMaxAllowableResultSizeInBytes = " + r2);
+
+		SizeUnit u1 = getRowSizeUnit();
+		System.out.println("getMaxRowSizeUnit = " + u1.toString());
+		SizeUnit u2 = getResultSizeUnit();
+		System.out.println("getMaxAllowableResultsSizeUnit = " + u2.toString());
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/HBaseConfigConstants.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/HBaseConfigConstants.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/HBaseConfigConstants.java
new file mode 100644
index 0000000..a7e7e3b
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/HBaseConfigConstants.java
@@ -0,0 +1,40 @@
+package com.opensoc.pcapservice;
+
+/**
+ * HBase configuration properties.
+ * 
+ * @author Sayi
+ */
+public class HBaseConfigConstants {
+
+  /** The Constant HBASE_ZOOKEEPER_QUORUM. */
+  public static final String HBASE_ZOOKEEPER_QUORUM = "hbase.zookeeper.quorum";
+
+  /** The Constant HBASE_ZOOKEEPER_CLIENT_PORT. */
+  public static final String HBASE_ZOOKEEPER_CLIENT_PORT = "hbase.zookeeper.clientPort";
+
+  /** The Constant HBASE_ZOOKEEPER_SESSION_TIMEOUT. */
+  public static final String HBASE_ZOOKEEPER_SESSION_TIMEOUT = "zookeeper.session.timeout";
+
+  /** The Constant HBASE_ZOOKEEPER_RECOVERY_RETRY. */
+  public static final String HBASE_ZOOKEEPER_RECOVERY_RETRY = "zookeeper.recovery.retry";
+
+  /** The Constant HBASE_CLIENT_RETRIES_NUMBER. */
+  public static final String HBASE_CLIENT_RETRIES_NUMBER = "hbase.client.retries.number";
+
+  /** The delimeter. */
+  String delimeter = "-";
+
+  /** The regex. */
+  String regex = "\\-";
+
+  /** The Constant PCAP_KEY_DELIMETER. */
+  public static final String PCAP_KEY_DELIMETER = "-";
+
+  /** The Constant START_KEY. */
+  public static final String START_KEY = "startKey";
+
+  /** The Constant END_KEY. */
+  public static final String END_KEY = "endKey";
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/HBaseConfigurationUtil.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/HBaseConfigurationUtil.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/HBaseConfigurationUtil.java
new file mode 100644
index 0000000..8a5c022
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/HBaseConfigurationUtil.java
@@ -0,0 +1,165 @@
+/**
+ * 
+ */
+package com.opensoc.pcapservice;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.log4j.Logger;
+import org.mortbay.log.Log;
+
+/**
+ * Utility class which creates HConnection instance when the first request is
+ * received and registers a shut down hook which closes the connection when the
+ * JVM exits. Creates new connection to the cluster only if the existing
+ * connection is closed for unknown reasons. Also creates Configuration with
+ * HBase resources using configuration properties.
+ * 
+ * @author Sayi
+ * 
+ */
+public class HBaseConfigurationUtil {
+
+  /** The Constant LOGGER. */
+  private static final Logger LOGGER = Logger
+      .getLogger(HBaseConfigurationUtil.class);
+
+  /** Configuration which holds all HBase properties. */
+  private static Configuration config;
+
+  /**
+   * A cluster connection which knows how to find master node and locate regions
+   * on the cluster.
+   */
+  private static HConnection clusterConnection = null;
+
+  /**
+   * Creates HConnection instance when the first request is received and returns
+   * the same instance for all subsequent requests if the connection is still
+   * open.
+   * 
+   * @return HConnection instance
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public static HConnection getConnection() throws IOException {
+    if (!connectionAvailable()) {
+      synchronized (HBaseConfigurationUtil.class) {
+        createClusterConncetion();
+      }
+    }
+    return clusterConnection;
+  }
+
+  /**
+   * Creates the cluster conncetion.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private static void createClusterConncetion() throws IOException {
+    try {
+      if (connectionAvailable()) {
+        return;
+      }
+      clusterConnection = HConnectionManager.createConnection(read());
+      addShutdownHook();
+      System.out.println("Created HConnection and added shutDownHook");
+    } catch (IOException e) {
+      LOGGER
+          .error(
+              "Exception occurred while creating HConnection using HConnectionManager",
+              e);
+      throw e;
+    }
+  }
+
+  /**
+   * Connection available.
+   * 
+   * @return true, if successful
+   */
+  private static boolean connectionAvailable() {
+    if (clusterConnection == null) {
+      System.out.println("clusterConnection=" + clusterConnection);
+      return false;
+    }
+    System.out.println("clusterConnection.isClosed()="
+        + clusterConnection.isClosed());
+    return clusterConnection != null && !clusterConnection.isClosed();
+  }
+
+  /**
+   * Adds the shutdown hook.
+   */
+  private static void addShutdownHook() {
+    Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
+      public void run() {
+        System.out
+            .println("Executing ShutdownHook HBaseConfigurationUtil : Closing HConnection");
+        try {
+          clusterConnection.close();
+        } catch (IOException e) {
+          Log.debug("Caught ignorable exception ", e);
+        }
+      }
+    }, "HBaseConfigurationUtilShutDown"));
+  }
+
+  /**
+   * Closes the underlying connection to cluster; ignores if any exception is
+   * thrown.
+   */
+  public static void closeConnection() {
+    if (clusterConnection != null) {
+      try {
+        clusterConnection.close();
+      } catch (IOException e) {
+        Log.debug("Caught ignorable exception ", e);
+      }
+    }
+  }
+
+  /**
+   * This method creates Configuration with HBase resources using configuration
+   * properties. The same Configuration object will be used to communicate with
+   * all HBase tables;
+   * 
+   * @return Configuration object
+   */
+  public static Configuration read() {
+    if (config == null) {
+      synchronized (HBaseConfigurationUtil.class) {
+        if (config == null) {
+          config = HBaseConfiguration.create();
+
+          config.set(
+              HBaseConfigConstants.HBASE_ZOOKEEPER_QUORUM,
+              ConfigurationUtil.getConfiguration().getString(
+                  "hbase.zookeeper.quorum"));
+          config.set(
+              HBaseConfigConstants.HBASE_ZOOKEEPER_CLIENT_PORT,
+              ConfigurationUtil.getConfiguration().getString(
+                  "hbase.zookeeper.clientPort"));
+          config.set(
+              HBaseConfigConstants.HBASE_CLIENT_RETRIES_NUMBER,
+              ConfigurationUtil.getConfiguration().getString(
+                  "hbase.client.retries.number"));
+          config.set(
+              HBaseConfigConstants.HBASE_ZOOKEEPER_SESSION_TIMEOUT,
+              ConfigurationUtil.getConfiguration().getString(
+                  "zookeeper.session.timeout"));
+          config.set(
+              HBaseConfigConstants.HBASE_ZOOKEEPER_RECOVERY_RETRY,
+              ConfigurationUtil.getConfiguration().getString(
+                  "zookeeper.recovery.retry"));
+        }
+      }
+    }
+    return config;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/IPcapGetter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/IPcapGetter.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/IPcapGetter.java
new file mode 100644
index 0000000..dbff59c
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/IPcapGetter.java
@@ -0,0 +1,88 @@
+/**
+ * 
+ */
+package com.opensoc.pcapservice;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * interface to all 'keys' based pcaps fetching methods.
+ * 
+ * @author Sayi
+ */
+public interface IPcapGetter {
+
+  /**
+   * Gets the pcaps for the input list of keys and lastRowKey.
+   * 
+   * @param keys
+   *          the list of keys for which pcaps are to be retrieved
+   * @param lastRowKey
+   *          last row key from the previous partial response
+   * @param startTime
+   *          the start time in system milliseconds to be used to filter the
+   *          pcaps. The value is set to '0' if the caller sends negative value
+   * @param endTime
+   *          the end time in system milliseconds to be used to filter the
+   *          pcaps. The value is set to Long.MAX_VALUE if the caller sends
+   *          negative value. 'endTime' must be greater than the 'startTime'.
+   * @param includeReverseTraffic
+   *          indicates whether or not to include pcaps from the reverse traffic
+   * @param includeDuplicateLastRow
+   *          indicates whether or not to include the last row from the previous
+   *          partial response
+   * @param maxResultSize
+   *          the max result size
+   * @return PcapsResponse with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public PcapsResponse getPcaps(List<String> keys, String lastRowKey,
+      long startTime, long endTime, boolean includeReverseTraffic,
+      boolean includeDuplicateLastRow, long maxResultSize) throws IOException;
+
+  /**
+   * Gets the pcaps for the input key.
+   * 
+   * @param key
+   *          the key for which pcaps is to be retrieved.
+   * @param startTime
+   *          the start time in system milliseconds to be used to filter the
+   *          pcaps. The value is set to '0' if the caller sends negative value
+   * @param endTime
+   *          the end time in system milliseconds to be used to filter the
+   *          pcaps.The value is set to Long.MAX_VALUE if the caller sends
+   *          negative value. 'endTime' must be greater than the 'startTime'.
+   * @param includeReverseTraffic
+   *          indicates whether or not to include pcaps from the reverse traffic
+   * @return PcapsResponse with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public PcapsResponse getPcaps(String key, long startTime, long endTime,
+      boolean includeReverseTraffic) throws IOException;
+
+  /**
+   * Gets the pcaps for the input list of keys.
+   * 
+   * @param keys
+   *          the list of keys for which pcaps are to be retrieved.
+   * @return PcapsResponse with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public PcapsResponse getPcaps(List<String> keys) throws IOException;
+
+  /**
+   * Gets the pcaps for the input key.
+   * 
+   * @param key
+   *          the key for which pcaps is to be retrieved.
+   * @return PcapsResponse with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public PcapsResponse getPcaps(String key) throws IOException;
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/IPcapScanner.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/IPcapScanner.java b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/IPcapScanner.java
new file mode 100644
index 0000000..64408e9
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-DataServices/src/main/java/com/opensoc/pcapservice/IPcapScanner.java
@@ -0,0 +1,49 @@
+package com.opensoc.pcapservice;
+
+import java.io.IOException;
+
+/**
+ * The Interface for all pcaps fetching methods based on key range.
+ */
+public interface IPcapScanner {
+
+  /**
+   * Gets the pcaps for between startKey (inclusive) and endKey (exclusive).
+   * 
+   * @param startKey
+   *          the start key of a key range for which pcaps is to be retrieved.
+   * @param endKey
+   *          the end key of a key range for which pcaps is to be retrieved.
+   * @param maxResponseSize
+   *          indicates the maximum response size in MegaBytes(MB). User needs
+   *          to pass positive value and must be less than 60 (MB)
+   * @param startTime
+   *          the start time in system milliseconds to be used to filter the
+   *          pcaps. The value is set to '0' if the caller sends negative value
+   * @param endTime
+   *          the end time in system milliseconds to be used to filter the
+   *          pcaps. The value is set Long.MAX_VALUE if the caller sends
+   *          negative value
+   * @return byte array with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public byte[] getPcaps(String startKey, String endKey, long maxResponseSize,
+      long startTime, long endTime) throws IOException;
+
+  /**
+   * Gets the pcaps for between startKey (inclusive) and endKey (exclusive).
+   * 
+   * @param startKey
+   *          the start key (inclusive) of a key range for which pcaps is to be
+   *          retrieved.
+   * @param endKey
+   *          the end key (exclusive) of a key range for which pcaps is to be
+   *          retrieved.
+   * @return byte array with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public byte[] getPcaps(String startKey, String endKey) throws IOException;
+
+}


[26/26] incubator-metron git commit: Merge branch 'master' of github.com:charlesporter/incubator-metron

Posted by ce...@apache.org.
Merge branch 'master' of github.com:charlesporter/incubator-metron

METRON-3 - replace opensoc-streaming version 0.4BETA with 0.6BETA


Project: http://git-wip-us.apache.org/repos/asf/incubator-metron/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-metron/commit/70e84c03
Tree: http://git-wip-us.apache.org/repos/asf/incubator-metron/tree/70e84c03
Diff: http://git-wip-us.apache.org/repos/asf/incubator-metron/diff/70e84c03

Branch: refs/heads/master
Commit: 70e84c0318f76190d85a84ec2bdc1f5f1f78fbd0
Parents: 34faa30 a919cc1
Author: cstella <ce...@gmail.com>
Authored: Thu Dec 17 15:43:49 2015 -0500
Committer: cstella <ce...@gmail.com>
Committed: Thu Dec 17 15:43:49 2015 -0500

----------------------------------------------------------------------
 opensoc-streaming/.gitignore                    |   15 -
 opensoc-streaming/.travis.yml                   |   14 -
 opensoc-streaming/OpenSOC-Alerts/pom.xml        |   61 +-
 opensoc-streaming/OpenSOC-Alerts/readme.md      |  104 +
 .../com/opensoc/alerts/AbstractAlertBolt.java   |   13 +-
 .../com/opensoc/alerts/TelemetryAlertsBolt.java |   31 +-
 .../alerts/adapters/AbstractAlertAdapter.java   |    8 +-
 .../alerts/adapters/AllAlertAdapter.java        |  249 +-
 .../alerts/adapters/CIFAlertsAdapter.java       |  311 +
 .../adapters/HbaseWhiteAndBlacklistAdapter.java |  130 +-
 .../alerts/adapters/KeywordsAlertAdapter.java   |  274 +
 .../opensoc/alerts/adapters/RangeChecker.java   |   23 +
 .../alerts/adapters/ThreatAlertsAdapter.java    |  311 +
 .../src/main/resources/hbase-site.xml           |  221 +-
 .../alerts/adapters/AllAlertAdapterTest.java    |  166 +
 .../resources/AllAlertAdapterTest.properties    |    1 +
 .../TestSchemas/AllAlertAdapterSchema.json      |   42 +
 .../resources/config/AllAlertAdapterTest.config |    8 +
 opensoc-streaming/OpenSOC-Common/.gitignore     |    1 +
 opensoc-streaming/OpenSOC-Common/pom.xml        |   48 +-
 .../configuration/ConfigurationManager.java     |  119 +
 .../dataloads/interfaces/ThreatIntelSource.java |   11 +
 .../main/java/com/opensoc/hbase/HBaseBolt.java  |   14 +-
 .../helpers/services/PcapServiceCli.java        |  110 +
 .../java/com/opensoc/helpers/topology/Cli.java  |  186 +
 .../helpers/topology/ErrorGenerator.java        |   37 +
 .../helpers/topology/SettingsLoader.java        |  149 +
 .../opensoc/index/interfaces/IndexAdapter.java  |    6 +-
 .../java/com/opensoc/ise/parser/ISEParser.java  |   45 +-
 .../ise/parser/ISEParserTokenManager.java       |    3 -
 .../json/serialization/JSONEncoderHelper.java   |    2 +
 .../json/serialization/JSONKafkaSerializer.java |   18 +-
 .../parser/interfaces/MessageParser.java        |    1 +
 .../main/java/com/opensoc/pcap/PacketInfo.java  |   73 +-
 .../com/opensoc/pcap/PcapByteOutputStream.java  |  288 +
 .../main/java/com/opensoc/pcap/PcapMerger.java  |  245 +
 .../com/opensoc/pcap/PcapPacketComparator.java  |   22 +
 .../main/java/com/opensoc/pcap/PcapParser.java  |  183 +
 .../main/java/com/opensoc/pcap/PcapUtils.java   |   27 +
 .../src/main/java/com/opensoc/pcap/asdf.java    |    5 -
 .../com/opensoc/test/AbstractConfigTest.java    |  299 +
 .../com/opensoc/test/AbstractSchemaTest.java    |  198 +
 .../com/opensoc/test/AbstractTestContext.java   |    2 +-
 .../java/com/opensoc/test/ISEParserTest.java    |   27 -
 .../opensoc/tldextractor/BasicTldExtractor.java |  137 +
 .../test/BasicTldExtractorTest.java             |  125 +
 .../java/com/opensoc/topologyhelpers/Cli.java   |  186 -
 .../opensoc/topologyhelpers/ErrorGenerator.java |   27 -
 .../opensoc/topologyhelpers/SettingsLoader.java |  118 -
 .../config/BasicTldExtractorTest.config         |    2 +
 .../src/test/resources/effective_tld_names.dat  | 9719 ++++++++++++++++++
 opensoc-streaming/OpenSOC-DataLoads/README.md   |   50 +
 .../dependency-reduced-pom.xml                  |  145 -
 opensoc-streaming/OpenSOC-DataLoads/pom.xml     |   22 +-
 .../opensoc/dataloads/cif/HBaseTableLoad.java   |  122 -
 .../OpenSOC-DataLoads/src/hbase-site.xml        |  100 -
 .../opensoc/dataloads/ThreatIntelLoader.java    |  174 +
 .../opensoc/dataloads/cif/HBaseTableLoad.java   |  238 +
 .../src/main/resources/hbase-site.xml           |  100 +
 .../OpenSOC-DataServices/README.md              |    1 +
 opensoc-streaming/OpenSOC-DataServices/pom.xml  |  232 +-
 .../alerts/server/AlertsProcessingServer.java   |    1 -
 .../opensoc/alerts/server/AlertsSearcher.java   |    2 -
 .../dataservices/kafkaclient/KafkaClient.java   |   10 +-
 .../dataservices/kafkaclient/KafkaConsumer.java |    8 +-
 .../modules/guice/DefaultServletModule.java     |    1 -
 .../modules/guice/RestEasyModule.java           |    5 +-
 .../modules/guice/ServiceModule.java            |    1 -
 .../opensoc/dataservices/rest/RestServices.java |    2 -
 .../dataservices/servlet/LogoutServlet.java     |    8 -
 .../websocket/KafkaMessageSenderServlet.java    |    1 -
 .../websocket/KafkaMessageSenderSocket.java     |    2 -
 .../websocket/KafkaWebSocketCreator.java        |    1 -
 .../pcapservice/CellTimestampComparator.java    |   23 +
 .../opensoc/pcapservice/ConfigurationUtil.java  |  269 +
 .../pcapservice/HBaseConfigConstants.java       |   40 +
 .../pcapservice/HBaseConfigurationUtil.java     |  165 +
 .../com/opensoc/pcapservice/IPcapGetter.java    |   88 +
 .../com/opensoc/pcapservice/IPcapScanner.java   |   49 +
 .../pcapservice/PcapGetterHBaseImpl.java        |  809 ++
 .../com/opensoc/pcapservice/PcapHelper.java     |  205 +
 .../pcapservice/PcapReceiverImplRestEasy.java   |  256 +
 .../pcapservice/PcapScannerHBaseImpl.java       |  302 +
 .../com/opensoc/pcapservice/PcapsResponse.java  |  153 +
 .../opensoc/pcapservice/RestTestingUtil.java    |  238 +
 .../pcapservice/rest/JettyServiceRunner.java    |   26 +
 .../opensoc/pcapservice/rest/PcapService.java   |   34 +
 .../ElasticSearch_KafkaAlertsService.java       |    1 -
 .../main/resources/config-definition-hbase.xml  |   34 +
 .../resources/hbase-config-default.properties   |   40 +
 .../src/main/resources/hbase-site.xml           |  127 +
 .../CellTimestampComparatorTest.java            |   92 +
 .../OpenSOC-EnrichmentAdapters/pom.xml          |  138 +-
 .../OpenSOC-EnrichmentAdapters/readme.md        |  125 +
 .../adapters/geo/GeoMysqlAdapter.java           |    5 +-
 .../host/HostFromPropertiesFileAdapter.java     |    4 +-
 .../adapters/threat/AbstractThreatAdapter.java  |   36 +
 .../adapters/threat/ThreatHbaseAdapter.java     |  129 +
 .../adapters/whois/WhoisHBaseAdapter.java       |   23 +-
 .../common/AbstractEnrichmentBolt.java          |   16 +-
 .../common/GenericEnrichmentBolt.java           |   21 +-
 .../src/main/resources/hbase-site.xml           |  221 +-
 .../adapters/cif/CIFHbaseAdapterTest.java       |   27 +
 .../adapters/geo/GeoMysqlAdapterTest.java       |   46 +-
 .../adapters/whois/WhoisHBaseAdapterTest.java   |   31 +-
 .../resources/CIFHbaseAdapterTest.properties    |    8 +-
 .../resources/GeoMysqlAdapterTest.properties    |    8 +-
 .../resources/TestSchemas/CIFHbaseSchema.json   |    0
 .../resources/TestSchemas/GeoMySqlSchema.json   |   42 +
 .../resources/TestSchemas/WhoisHbaseSchema.json |    0
 .../resources/WhoisHbaseAdapterTest.properties  |    8 +-
 opensoc-streaming/OpenSOC-Indexing/pom.xml      |   16 +-
 opensoc-streaming/OpenSOC-Indexing/readme.md    |   61 +
 .../opensoc/indexing/TelemetryIndexingBolt.java |   26 +-
 .../indexing/adapters/AbstractIndexAdapter.java |    2 +-
 .../indexing/adapters/ESBaseBulkAdapter.java    |   12 +-
 .../adapters/ESBulkRotatingAdapter.java         |   18 +-
 .../adapters/ESTimedRotatingAdapter.java        |   49 +-
 .../OpenSOC-MessageParsers/pom.xml              |   30 +-
 .../OpenSOC-MessageParsers/readme.md            |   82 +
 .../com/opensoc/parsing/AbstractParserBolt.java |   33 +-
 .../com/opensoc/parsing/PcapParserBolt.java     |   83 +-
 .../opensoc/parsing/TelemetryParserBolt.java    |    9 +-
 .../opensoc/parsing/parsers/AbstractParser.java |    8 +-
 .../opensoc/parsing/parsers/BasicBroParser.java |  101 +-
 .../parsing/parsers/BasicFireEyeParser.java     |  234 +
 .../opensoc/parsing/parsers/BasicIseParser.java |    2 +
 .../parsing/parsers/BasicLancopeParser.java     |    2 +
 .../parsing/parsers/BasicLogstashParser.java    |   65 +
 .../parsers/BasicPaloAltoFirewallParser.java    |  184 +
 .../parsing/parsers/BasicSourcefireParser.java  |   25 +-
 .../opensoc/parsing/parsers/GrokAsaParser.java  |  269 +
 .../parsing/parsers/GrokSourcefireParser.java   |    2 +
 .../com/opensoc/parsing/parsers/GrokUtils.java  |   26 +
 .../parsing/parsers/OpenSOCConverter.java       |  183 +
 .../opensoc/parsing/parsers/OpenSOCGarbage.java |  130 +
 .../opensoc/parsing/parsers/OpenSOCGrok.java    |  367 +
 .../opensoc/parsing/parsers/OpenSOCMatch.java   |  280 +
 .../opensoc/parsing/parsers/ParserUtils.java    |   23 +
 .../src/main/resources/patterns/asa             |  176 +
 .../src/main/resources/patterns/fireeye         |    9 +
 .../src/main/resources/patterns/sourcefire      |   30 +
 .../src/main/resources/patters/sourcefire       |   30 -
 .../parsing/test/BasicBroParserTest.java        |  103 +
 .../parsing/test/BasicFireEyeParserTest.java    |  141 +
 .../parsing/test/BasicIseParserTest.java        |  149 +-
 .../parsing/test/BasicLancopeParserTest.java    |  144 +-
 .../test/BasicPaloAltoFirewallParserTest.java   |  136 +
 .../parsing/test/BasicSourcefireParserTest.java |  103 +-
 .../com/opensoc/parsing/test/BroParserTest.java |  120 +-
 .../opensoc/parsing/test/GrokAsaParserTest.java |  149 +
 .../src/test/resources/BroParserTest.log        |    3 +
 .../src/test/resources/FireEyeParserTest.log    |    8 +
 .../src/test/resources/GrokParserTest.log       |   12 +
 .../src/test/resources/IseParserTest.log        |  308 +
 .../src/test/resources/IseSample.log            |  308 -
 .../src/test/resources/LancopeParserTest.log    |    1 +
 .../resources/PaloAltoFirewallParserTest.log    |    2 +
 .../src/test/resources/SourceFireTest.log       |    3 +
 .../test/resources/TestSchemas/BroSchema.json   |   28 +
 .../resources/TestSchemas/LancopeSchema.json    |   14 +-
 .../resources/TestSchemas/SourcefireSchema.json |   20 +-
 .../config/BasicFireEyeParserTest.config        |    2 +
 .../resources/config/BasicIseParserTest.config  |    2 +
 .../config/BasicLancopeParserTest.config        |    2 +
 .../BasicPaloAltoFirewallParserTest.config      |    2 +
 .../config/BasicSourcefireParserTest.config     |    2 +
 .../test/resources/config/BroParserTest.config  |    2 +
 .../resources/config/GrokAsaParserTest.config   |    2 +
 .../src/test/resources/effective_tld_names.dat  | 9719 ++++++++++++++++++
 .../OpenSOC-PCAP_Reconstruction/hbase/.pmd      | 1262 ---
 .../hbase/README.txt                            |   16 -
 .../hbase/dependency-reduced-pom.xml            |  230 -
 .../OpenSOC-PCAP_Reconstruction/hbase/pom.xml   |  137 -
 .../hbase/client/CellTimestampComparator.java   |   23 -
 .../opensoc/hbase/client/ConfigurationUtil.java |  267 -
 .../hbase/client/HBaseConfigConstants.java      |   40 -
 .../hbase/client/HBaseConfigurationUtil.java    |  165 -
 .../cisco/opensoc/hbase/client/IPcapGetter.java |   88 -
 .../opensoc/hbase/client/IPcapReceiver.java     |  109 -
 .../opensoc/hbase/client/IPcapScanner.java      |   49 -
 .../hbase/client/PcapGetterHBaseImpl.java       |  790 --
 .../cisco/opensoc/hbase/client/PcapHelper.java  |  205 -
 .../opensoc/hbase/client/PcapReceiverImpl.java  |  212 -
 .../hbase/client/PcapScannerHBaseImpl.java      |  302 -
 .../opensoc/hbase/client/PcapsResponse.java     |  151 -
 .../opensoc/hbase/client/RestTestingUtil.java   |  238 -
 .../main/resources/config-definition-hbase.xml  |   34 -
 .../resources/hbase-config-default.properties   |   40 -
 .../hbase/src/main/resources/log4j.properties   |   21 -
 .../client/CellTimestampComparatorTest.java     |   92 -
 .../hbase/client/ConfigurationUtilTest.java     |   50 -
 .../client/HBaseConfigurationUtilTest.java      |   52 -
 .../hbase/client/HBaseIntegrationTest.java      |   74 -
 .../hbase/client/PcapGetterHBaseImplTest.java   |  536 -
 .../opensoc/hbase/client/PcapHelperTest.java    |  321 -
 .../hbase/client/PcapReceiverImplTest.java      |  232 -
 .../hbase/client/PcapScannerHBaseImplTest.java  |  234 -
 .../src/test/resources/hbase-config.properties  |   40 -
 .../src/test/resources/test-tcp-packet.pcap     |  Bin 144 -> 0 bytes
 .../OpenSOC-PCAP_Reconstruction/service/.pmd    | 1190 ---
 .../service/README.txt                          |   10 -
 .../OpenSOC-PCAP_Reconstruction/service/pom.xml |   52 -
 .../service/src/main/resources/log4j.properties |   10 -
 .../src/main/webapp/META-INF/MANIFEST.MF        |    3 -
 .../src/main/webapp/WEB-INF/ipcap-config.xml    |    7 -
 .../service/src/main/webapp/WEB-INF/web.xml     |   17 -
 .../OpenSOC-Pcap_Service/README.txt             |   16 +
 opensoc-streaming/OpenSOC-Pcap_Service/pom.xml  |  267 +
 .../OpenSOC-Pcap_Service/pom.xml.versionsBackup |  268 +
 .../pcapservice/CellTimestampComparator.java    |   23 +
 .../opensoc/pcapservice/ConfigurationUtil.java  |  269 +
 .../pcapservice/HBaseConfigConstants.java       |   40 +
 .../pcapservice/HBaseConfigurationUtil.java     |  165 +
 .../com/opensoc/pcapservice/IPcapGetter.java    |   88 +
 .../com/opensoc/pcapservice/IPcapScanner.java   |   49 +
 .../pcapservice/PcapGetterHBaseImpl.java        |  809 ++
 .../com/opensoc/pcapservice/PcapHelper.java     |  205 +
 .../pcapservice/PcapReceiverImplRestEasy.java   |  250 +
 .../pcapservice/PcapScannerHBaseImpl.java       |  302 +
 .../com/opensoc/pcapservice/PcapsResponse.java  |  153 +
 .../opensoc/pcapservice/RestTestingUtil.java    |  238 +
 .../pcapservice/rest/JettyServiceRunner.java    |   26 +
 .../opensoc/pcapservice/rest/PcapService.java   |   34 +
 .../OnlyDeleteExpiredFilesCompactionPolicy.java |   37 +
 .../main/resources/config-definition-hbase.xml  |   34 +
 .../resources/hbase-config-default.properties   |   40 +
 .../src/main/resources/hbase-site.xml           |  127 +
 .../src/main/resources/log4j.properties         |   21 +
 .../CellTimestampComparatorTest.java            |   92 +
 .../pcapservice/ConfigurationUtilTest.java      |   50 +
 .../pcapservice/HBaseConfigurationUtilTest.java |   52 +
 .../pcapservice/HBaseIntegrationTest.java       |   74 +
 .../pcapservice/PcapGetterHBaseImplTest.java    |  536 +
 .../com/opensoc/pcapservice/PcapHelperTest.java |  321 +
 .../pcapservice/PcapScannerHBaseImplTest.java   |  232 +
 .../src/test/resources/hbase-config.properties  |   40 +
 .../src/test/resources/test-tcp-packet.pcap     |  Bin 0 -> 144 bytes
 opensoc-streaming/OpenSOC-Topologies/pom.xml    |  188 +
 opensoc-streaming/OpenSOC-Topologies/readme.md  |   47 +
 .../src/main/java/com/opensoc/topology/Asa.java |   40 +
 .../main/java/com/opensoc/topology/FireEye.java |   21 +
 .../src/main/java/com/opensoc/topology/Ise.java |    4 +
 .../main/java/com/opensoc/topology/Lancope.java |    5 +
 .../com/opensoc/topology/PaloAltoFirewall.java  |   41 +
 .../main/java/com/opensoc/topology/Pcap.java    |    6 +
 .../com/opensoc/topology/runner/AsaRunner.java  |   94 +
 .../com/opensoc/topology/runner/BroRunner.java  |   15 +-
 .../opensoc/topology/runner/FireEyeRunner.java  |   77 +
 .../com/opensoc/topology/runner/ISERunner.java  |   17 +-
 .../opensoc/topology/runner/LancopeRunner.java  |   15 +-
 .../topology/runner/PaloAltoFirewallRunner.java |   95 +
 .../com/opensoc/topology/runner/PcapRunner.java |    4 +-
 .../topology/runner/SourcefireRunner.java       |   17 +-
 .../opensoc/topology/runner/TopologyRunner.java |  205 +-
 .../topologies/asa/features_enabled.conf        |  113 +
 .../OpenSOC_Configs/topologies/asa/metrics.conf |   26 +
 .../topologies/asa/topology.conf                |  110 +
 .../topologies/asa/topology_identifier.conf     |    4 +
 .../topologies/bro/features_enabled.conf        |   14 +-
 .../topologies/bro/topology.conf                |   71 +-
 .../topologies/fireeye/features_enabled.conf    |  113 +
 .../topologies/fireeye/metrics.conf             |   26 +
 .../topologies/fireeye/topology.conf            |  110 +
 .../topologies/fireeye/topology_identifier.conf |    4 +
 .../topologies/ise/features_enabled.conf        |   10 +-
 .../topologies/ise/topology.conf                |   39 +-
 .../topologies/lancope/features_enabled.conf    |   10 +-
 .../topologies/lancope/topology.conf            |   39 +-
 .../topologies/paloalto/features_enabled.conf   |  113 +
 .../topologies/paloalto/metrics.conf            |   26 +
 .../topologies/paloalto/topology.conf           |  113 +
 .../paloalto/topology_identifier.conf           |    4 +
 .../topologies/pcap/features_enabled.conf       |   10 +-
 .../topologies/pcap/topology.conf               |   64 +-
 .../topologies/sourcefire/features_enabled.conf |   10 +-
 .../topologies/sourcefire/topology.conf         |   48 +-
 .../src/main/resources/SampleInput/AsaOutput    |  100 +
 .../resources/SampleInput/FireeyeExampleOutput  |   90 +
 .../main/resources/SampleInput/ISESampleOutput  |  314 +-
 .../main/resources/SampleInput/PaloaltoOutput   |  100 +
 .../resources/TopologyConfigs_old/lancope.conf  |   16 +-
 .../src/main/resources/effective_tld_names.dat  | 9719 ++++++++++++++++++
 .../src/main/resources/hbase-site.xml           |  131 +
 opensoc-streaming/pom.xml                       |   15 +-
 opensoc-streaming/readme.md                     |  136 +-
 286 files changed, 48498 insertions(+), 9780 deletions(-)
----------------------------------------------------------------------



[04/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Topologies/src/main/resources/SampleInput/ISESampleOutput
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/SampleInput/ISESampleOutput b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/SampleInput/ISESampleOutput
index 1a73c1f..1cb0678 100644
--- a/opensoc-streaming/OpenSOC-Topologies/src/main/resources/SampleInput/ISESampleOutput
+++ b/opensoc-streaming/OpenSOC-Topologies/src/main/resources/SampleInput/ISESampleOutput
@@ -1,19 +1,19 @@
-Aug  6 17:26:31 10.34.84.145 Aug  7 00:45:43 stage-pdp01 CISE_Profiler 0000024855 1 0 2014-08-07 00:45:43.741 -07:00 0000288542 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=113, EndpointCertainityMetric=10, EndpointIPAddress=10.56.111.14, EndpointMacAddress=3C:97:0E:C3:F8:F1, EndpointMatchedPolicy=Nortel-Device, EndpointNADAddress=10.56.72.127, EndpointOUI=Wistron InfoComm(Kunshan)Co.\,Ltd., EndpointPolicy=Nortel-Device, EndpointProperty=StaticAssignment=false\,PostureApplicable=Yes\,PolicyVersion=402\,IdentityGroupID=0c1d9270-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=10\,BYODRegistration=Unknown\,FeedService=false\,EndPointPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1407397543718\,MatchedPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,TimeToProfile=19\,StaticGroupAssignment=false\,NmapSubnetScanID=0\,DeviceRegistrationStatus=NotRegistered\,PortalUser=, EndpointSourceEvent=SNMPQuery Probe, EndpointIdentityGroup=Profile
 d, ProfilerServer=stage-pdp01.cisco.com,
-Aug  6 17:26:31 10.34.84.145 Aug  7 00:45:43 stage-pdp01 CISE_Profiler 0000024856 1 0 2014-08-07 00:45:43.786 -07:00 0000288543 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=113, EndpointCertainityMetric=10, EndpointIPAddress=10.56.111.14, EndpointMacAddress=3C:97:0E:C3:F8:F1, EndpointMatchedPolicy=Nortel-Device, EndpointNADAddress=10.56.72.127, EndpointOUI=Wistron InfoComm(Kunshan)Co.\,Ltd., EndpointPolicy=Nortel-Device, EndpointProperty=StaticAssignment=false\,PostureApplicable=Yes\,BYODRegistration=Unknown\,EndPointPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1407397543718\,TimeToProfile=19\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,UpdateTime=0\,PolicyVersion=402\,IdentityGroupID=0c1d9270-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=10\,FeedService=false\,MatchedPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,NmapScanCount=0\,NmapSubnetScanID=0\,PortalUser=, EndpointSourceE
 vent=SNMPQuery Probe, EndpointIdentityGroup=Profiled, ProfilerServer=stage-pdp01.cisco.com,
-Aug  6 20:00:52 10.42.7.64 Aug  7 03:20:05 npf-sjca-pdp02 CISE_Profiler 0000373185 1 0 2014-08-07 03:20:05.549 -07:00 0011310202 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=90, EndpointIPAddress=10.56.129.142, EndpointMacAddress=3C:A9:F4:46:75:CC, EndpointMatchedPolicy=Windows7-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Windows7-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-46-75-cc\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=HEXAMPLE\,BYODRegistration=Unknown\,EndPointPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406112353750\,TimeToProfile=11\,Framed-IP-Address=10.56.129.142\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407394245820\,PolicyVersion=403\,Ident
 ityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=90\,FeedService=false\,MatchedPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1394526689397\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=Mozilla/5.0 (Windows NT 6.1\\\; WOW64\\ rv:30.0) Gecko/20100101 Firefox/30.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf.example.com,
-Aug  6 21:00:48 10.42.7.64 Aug  7 04:20:00 npf-sjca-pdp02 CISE_Profiler 0000373902 1 0 2014-08-07 04:20:00.983 -07:00 0011322557 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=30, EndpointIPAddress=10.56.129.142, EndpointMacAddress=3C:A9:F4:46:75:CC, EndpointMatchedPolicy=Microsoft-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Microsoft-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-46-75-cc\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=HEXAMPLE\,BYODRegistration=Unknown\,EndPointPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406112353750\,TimeToProfile=11\,Framed-IP-Address=10.56.129.142\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407406806572\,PolicyVersion=403\,Ide
 ntityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=30\,FeedService=false\,MatchedPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1394526689397\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=MS-WebServices/1.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf.example.com,
-Aug  6 22:22:50 10.42.7.64 Aug  7 05:42:03 npf-sjca-pdp02 CISE_Profiler 0000374846 1 0 2014-08-07 05:42:03.617 -07:00 0011340138 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=10, EndpointMacAddress=68:A8:6D:4E:0D:86, EndpointMatchedPolicy=Apple-Device, EndpointOUI=Apple, EndpointPolicy=Apple-Device, EndpointProperty=StaticAssignment=false\,PostureApplicable=Yes\,host-name=PEXAMPLE\,BYODRegistration=Unknown\,EndPointPolicyID=377d8ba0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1407415322895\,TimeToProfile=717\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,PolicyVersion=403\,IdentityGroupID=abbbcac0-89e6-11e1-bf14-005056aa4dd7\,Total Certainty Factor=10\,ciaddr=0.0.0.0\,FeedService=false\,dhcp-parameter-request-list=1\, 3\, 6\, 15\, 119\, 95\, 252\, 44\, 46\,MatchedPolicyID=377d8ba0-68a6-11e1-bc72-0050568e013c\,NmapSubnetScanID=0\,PortalUser=, EndpointSourceEvent=DHCP Probe, EndpointIdentityGroup
 =Apple-Device, ProfilerServer=npf.example.com,
-Aug  6 23:30:10 10.42.7.64 Aug  7 06:49:23 npf-sjca-pdp02 CISE_Profiler 0000375603 1 0 2014-08-07 06:49:23.920 -07:00 0011353768 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=90, EndpointIPAddress=10.56.129.142, EndpointMacAddress=3C:A9:F4:46:75:CC, EndpointMatchedPolicy=Windows7-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Windows7-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-46-75-cc\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=HEXAMPLE\,BYODRegistration=Unknown\,EndPointPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406112353750\,TimeToProfile=11\,Framed-IP-Address=10.56.129.142\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407410402099\,PolicyVersion=403\,Ident
 ityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=90\,FeedService=false\,MatchedPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1394526689397\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=Mozilla/5.0 (Windows NT 6.1\\\; WOW64\\ rv:30.0) Gecko/20100101 Firefox/30.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf.example.com,
-Aug  6 23:30:48 10.42.7.64 Aug  7 06:50:01 npf-sjca-pdp02 CISE_Profiler 0000375611 1 0 2014-08-07 06:50:01.377 -07:00 0011353875 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=50, EndpointIPAddress=10.34.92.103, EndpointMacAddress=3C:A9:F4:29:FC:3C, EndpointMatchedPolicy=Microsoft-Workstation, EndpointNADAddress=10.34.76.212, EndpointOUI=Intel Corporate, EndpointPolicy=Microsoft-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-29-fc-3c\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=EXAMPLE\,BYODRegistration=Unknown\,EndPointPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406109860322\,L4_DST_PORT=50428\,TimeToProfile=7\,Framed-IP-Address=10.34.92.103\,LastNmapScanTime=1380758278898\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=140668603
 4558\,PolicyVersion=403\,IdentityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=50\,operating-system=Microsoft Windows Vista SP0 - SP2\, Server 2008\, or Windows 7 Ultimate\,FeedService=false\,MatchedPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1373657280926\,NmapScanCount=3\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=MS-WebServices/1.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf.example.com,
-Aug  6 23:32:52 10.42.7.64 Aug  7 06:52:05 npf-sjca-pdp02 CISE_Profiler 0000375636 1 0 2014-08-07 06:52:05.272 -07:00 0011354313 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=30, EndpointIPAddress=10.56.129.143, EndpointMacAddress=E8:2A:EA:23:5E:3D, EndpointMatchedPolicy=Microsoft-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Microsoft-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=e8-2a-ea-23-5e-3d\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=ANOY-WS01\,BYODRegistration=Unknown\,EndPointPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406114784910\,TimeToProfile=7\,Framed-IP-Address=10.56.129.143\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407395211208\,PolicyVersion=403\,Ide
 ntityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=30\,FeedService=false\,MatchedPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1405408515121\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=MS-WebServices/1.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf.example.com,
+Aug  6 17:26:31 10.34.84.145 Aug  7 00:45:43 stage-pdp01 CISE_Profiler 0000024855 1 0 2014-08-07 00:45:43.741 -07:00 0000288542 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=113, EndpointCertainityMetric=10, EndpointIPAddress=10.56.111.14, EndpointMacAddress=3C:97:0E:C3:F8:F1, EndpointMatchedPolicy=Nortel-Device, EndpointNADAddress=10.56.72.127, EndpointOUI=Wistron InfoComm(Kunshan)Co.\,Ltd., EndpointPolicy=Nortel-Device, EndpointProperty=StaticAssignment=false\,PostureApplicable=Yes\,PolicyVersion=402\,IdentityGroupID=0c1d9270-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=10\,BYODRegistration=Unknown\,FeedService=false\,EndPointPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1407397543718\,MatchedPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,TimeToProfile=19\,StaticGroupAssignment=false\,NmapSubnetScanID=0\,DeviceRegistrationStatus=NotRegistered\,PortalUser=, EndpointSourceEvent=SNMPQuery Probe, EndpointIdentityGroup=Profile
 d, ProfilerServer=stage-pdp01.cisco.com, 
+Aug  6 17:26:31 10.34.84.145 Aug  7 00:45:43 stage-pdp01 CISE_Profiler 0000024856 1 0 2014-08-07 00:45:43.786 -07:00 0000288543 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=113, EndpointCertainityMetric=10, EndpointIPAddress=10.56.111.14, EndpointMacAddress=3C:97:0E:C3:F8:F1, EndpointMatchedPolicy=Nortel-Device, EndpointNADAddress=10.56.72.127, EndpointOUI=Wistron InfoComm(Kunshan)Co.\,Ltd., EndpointPolicy=Nortel-Device, EndpointProperty=StaticAssignment=false\,PostureApplicable=Yes\,BYODRegistration=Unknown\,EndPointPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1407397543718\,TimeToProfile=19\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,UpdateTime=0\,PolicyVersion=402\,IdentityGroupID=0c1d9270-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=10\,FeedService=false\,MatchedPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,NmapScanCount=0\,NmapSubnetScanID=0\,PortalUser=, EndpointSourceE
 vent=SNMPQuery Probe, EndpointIdentityGroup=Profiled, ProfilerServer=stage-pdp01.cisco.com, 
+Aug  6 20:00:52 10.42.7.64 Aug  7 03:20:05 npf-sjca-pdp02 CISE_Profiler 0000373185 1 0 2014-08-07 03:20:05.549 -07:00 0011310202 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=90, EndpointIPAddress=10.56.129.142, EndpointMacAddress=3C:A9:F4:46:75:CC, EndpointMatchedPolicy=Windows7-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Windows7-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-46-75-cc\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=HASSI-WS03\,BYODRegistration=Unknown\,EndPointPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406112353750\,TimeToProfile=11\,Framed-IP-Address=10.56.129.142\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407394245820\,PolicyVersion=403\,Ide
 ntityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=90\,FeedService=false\,MatchedPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1394526689397\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=Mozilla/5.0 (Windows NT 6.1\\\; WOW64\\ rv:30.0) Gecko/20100101 Firefox/30.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf-sjca-pdp02.cisco.com, 
+Aug  6 21:00:48 10.42.7.64 Aug  7 04:20:00 npf-sjca-pdp02 CISE_Profiler 0000373902 1 0 2014-08-07 04:20:00.983 -07:00 0011322557 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=30, EndpointIPAddress=10.56.129.142, EndpointMacAddress=3C:A9:F4:46:75:CC, EndpointMatchedPolicy=Microsoft-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Microsoft-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-46-75-cc\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=HASSI-WS03\,BYODRegistration=Unknown\,EndPointPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406112353750\,TimeToProfile=11\,Framed-IP-Address=10.56.129.142\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407406806572\,PolicyVersion=403\,I
 dentityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=30\,FeedService=false\,MatchedPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1394526689397\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=MS-WebServices/1.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf-sjca-pdp02.cisco.com, 
+Aug  6 22:22:50 10.42.7.64 Aug  7 05:42:03 npf-sjca-pdp02 CISE_Profiler 0000374846 1 0 2014-08-07 05:42:03.617 -07:00 0011340138 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=10, EndpointMacAddress=68:A8:6D:4E:0D:86, EndpointMatchedPolicy=Apple-Device, EndpointOUI=Apple, EndpointPolicy=Apple-Device, EndpointProperty=StaticAssignment=false\,PostureApplicable=Yes\,host-name=PGIANG-M-306R\,BYODRegistration=Unknown\,EndPointPolicyID=377d8ba0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1407415322895\,TimeToProfile=717\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,PolicyVersion=403\,IdentityGroupID=abbbcac0-89e6-11e1-bf14-005056aa4dd7\,Total Certainty Factor=10\,ciaddr=0.0.0.0\,FeedService=false\,dhcp-parameter-request-list=1\, 3\, 6\, 15\, 119\, 95\, 252\, 44\, 46\,MatchedPolicyID=377d8ba0-68a6-11e1-bc72-0050568e013c\,NmapSubnetScanID=0\,PortalUser=, EndpointSourceEvent=DHCP Probe, EndpointIdentity
 Group=Apple-Device, ProfilerServer=npf-sjca-pdp02.cisco.com, 
+Aug  6 23:30:10 10.42.7.64 Aug  7 06:49:23 npf-sjca-pdp02 CISE_Profiler 0000375603 1 0 2014-08-07 06:49:23.920 -07:00 0011353768 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=90, EndpointIPAddress=10.56.129.142, EndpointMacAddress=3C:A9:F4:46:75:CC, EndpointMatchedPolicy=Windows7-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Windows7-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-46-75-cc\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=HASSI-WS03\,BYODRegistration=Unknown\,EndPointPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406112353750\,TimeToProfile=11\,Framed-IP-Address=10.56.129.142\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407410402099\,PolicyVersion=403\,Ide
 ntityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=90\,FeedService=false\,MatchedPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1394526689397\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=Mozilla/5.0 (Windows NT 6.1\\\; WOW64\\ rv:30.0) Gecko/20100101 Firefox/30.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf-sjca-pdp02.cisco.com, 
+Aug  6 23:30:48 10.42.7.64 Aug  7 06:50:01 npf-sjca-pdp02 CISE_Profiler 0000375611 1 0 2014-08-07 06:50:01.377 -07:00 0011353875 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=50, EndpointIPAddress=10.34.92.103, EndpointMacAddress=3C:A9:F4:29:FC:3C, EndpointMatchedPolicy=Microsoft-Workstation, EndpointNADAddress=10.34.76.212, EndpointOUI=Intel Corporate, EndpointPolicy=Microsoft-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-29-fc-3c\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=AMIBASU-WS01\,BYODRegistration=Unknown\,EndPointPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406109860322\,L4_DST_PORT=50428\,TimeToProfile=7\,Framed-IP-Address=10.34.92.103\,LastNmapScanTime=1380758278898\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1406
 686034558\,PolicyVersion=403\,IdentityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=50\,operating-system=Microsoft Windows Vista SP0 - SP2\, Server 2008\, or Windows 7 Ultimate\,FeedService=false\,MatchedPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1373657280926\,NmapScanCount=3\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=MS-WebServices/1.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf-sjca-pdp02.cisco.com, 
+Aug  6 23:32:52 10.42.7.64 Aug  7 06:52:05 npf-sjca-pdp02 CISE_Profiler 0000375636 1 0 2014-08-07 06:52:05.272 -07:00 0011354313 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=30, EndpointIPAddress=10.56.129.143, EndpointMacAddress=E8:2A:EA:23:5E:3D, EndpointMatchedPolicy=Microsoft-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Microsoft-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=e8-2a-ea-23-5e-3d\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=ANOY-WS01\,BYODRegistration=Unknown\,EndPointPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406114784910\,TimeToProfile=7\,Framed-IP-Address=10.56.129.143\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407395211208\,PolicyVersion=403\,Ide
 ntityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=30\,FeedService=false\,MatchedPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1405408515121\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=MS-WebServices/1.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf-sjca-pdp02.cisco.com, 
 Aug  6 16:40:52 10.42.7.64 Aug  7 00:00:04 npf-sjca-pdp02 CISE_Failed_Attempts 0000370855 1 0 2014-08-07 00:00:04.527 -07:00 0011266584 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270932, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056EF53E323F4, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
 Aug  6 16:40:57 10.42.7.63 Aug  7 00:00:09 npf-sjca-pdp01 CISE_Failed_Attempts 0001969834 1 0 2014-08-07 00:00:09.568 -07:00 0098648519 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2084839, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4A53E323F9, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:41:24 10.34.84.145 Aug  7 00:00:36 stage-pdp01 CISE_Failed_Attempts 0000024616 1 0 2014-08-07 00:00:36.332 -07:00 0000287007 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19317, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:41:26 10.34.84.145 Aug  7 00:00:38 stage-pdp01 CISE_Failed_Attempts 0000024617 1 0 2014-08-07 00:00:38.336 -07:00 0000287011 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19318, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:41:28 10.34.84.145 Aug  7 00:00:40 stage-pdp01 CISE_Failed_Attempts 0000024618 1 0 2014-08-07 00:00:40.336 -07:00 0000287015 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19319, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:41:30 10.34.84.145 Aug  7 00:00:42 stage-pdp01 CISE_Failed_Attempts 0000024619 1 0 2014-08-07 00:00:42.340 -07:00 0000287019 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19320, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:41:32 10.34.84.145 Aug  7 00:00:44 stage-pdp01 CISE_Failed_Attempts 0000024620 1 0 2014-08-07 00:00:44.340 -07:00 0000287023 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19321, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:41:34 10.34.84.145 Aug  7 00:00:46 stage-pdp01 CISE_Failed_Attempts 0000024621 1 0 2014-08-07 00:00:46.344 -07:00 0000287027 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19322, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:41:24 10.34.84.145 Aug  7 00:00:36 stage-pdp01 CISE_Failed_Attempts 0000024616 1 0 2014-08-07 00:00:36.332 -07:00 0000287007 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19317, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:41:26 10.34.84.145 Aug  7 00:00:38 stage-pdp01 CISE_Failed_Attempts 0000024617 1 0 2014-08-07 00:00:38.336 -07:00 0000287011 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19318, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:41:28 10.34.84.145 Aug  7 00:00:40 stage-pdp01 CISE_Failed_Attempts 0000024618 1 0 2014-08-07 00:00:40.336 -07:00 0000287015 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19319, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:41:30 10.34.84.145 Aug  7 00:00:42 stage-pdp01 CISE_Failed_Attempts 0000024619 1 0 2014-08-07 00:00:42.340 -07:00 0000287019 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19320, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:41:32 10.34.84.145 Aug  7 00:00:44 stage-pdp01 CISE_Failed_Attempts 0000024620 1 0 2014-08-07 00:00:44.340 -07:00 0000287023 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19321, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:41:34 10.34.84.145 Aug  7 00:00:46 stage-pdp01 CISE_Failed_Attempts 0000024621 1 0 2014-08-07 00:00:46.344 -07:00 0000287027 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19322, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
 Aug  6 16:42:02 10.42.7.64 Aug  7 00:01:14 npf-sjca-pdp02 CISE_Failed_Attempts 0000370865 1 0 2014-08-07 00:01:14.610 -07:00 0011266810 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=7, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270940, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F053E3243A, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
 Aug  6 16:42:07 10.42.7.63 Aug  7 00:01:19 npf-sjca-pdp01 CISE_Failed_Attempts 0001969923 1 0 2014-08-07 00:01:19.665 -07:00 0098652715 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2084986, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4B53E3243F, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
 Aug  6 16:42:12 10.42.7.64 Aug  7 00:01:24 npf-sjca-pdp02 CISE_Failed_Attempts 0000370867 1 0 2014-08-07 00:01:24.701 -07:00 0011266815 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270941, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F153E32444, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
@@ -21,30 +21,30 @@ Aug  6 16:42:17 10.42.7.63 Aug  7 00:01:29 npf-sjca-pdp01 CISE_Failed_Attempts 0
 Aug  6 16:43:22 10.42.7.64 Aug  7 00:02:34 npf-sjca-pdp02 CISE_Failed_Attempts 0000370885 1 0 2014-08-07 00:02:34.792 -07:00 0011267367 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=4, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270956, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F353E3248A, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
 Aug  6 16:43:27 10.42.7.63 Aug  7 00:02:39 npf-sjca-pdp01 CISE_Failed_Attempts 0001970043 1 0 2014-08-07 00:02:39.808 -07:00 0098657578 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085161, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4D53E3248F, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
 Aug  6 16:43:56 10.42.7.64 Aug  7 00:03:08 npf-sjca-pdp02 CISE_Failed_Attempts 0000370897 1 0 2014-08-07 00:03:08.902 -07:00 0011267657 5435 NOTICE RADIUS: NAS conducted several failed authentications of the same scenario, ConfigVersionId=240, Device IP Address=10.56.129.4, Device Port=32770, DestinationIPAddress=10.42.7.64, DestinationPort=1813, RadiusPacketType=AccountingRequest, UserName=yshchory, Protocol=Radius, RequestLatency=49, NetworkDeviceName=NTN-WLC1, User-Name=yshchory, NAS-IP-Address=10.56.129.4, NAS-Port=1, Framed-IP-Address=10.56.129.141, Class=CACS:0a388104000045cd53e2be75:npf-sjca-pdp02/195481465/270958, Called-Station-ID=6c-41-6a-5f-6e-c0, Calling-Station-ID=90-18-7c-7b-59-01, NAS-Identifier=ntn01-11a-wlc1, Acct-Status-Type=Interim-Update, Acct-Delay-Time=0, Acct-Input-Octets=2359603, Acct-Output-Octets=26928466, Acct-Session-Id=53e2be78/90:18:7c:7b:59:01/13844, Acct-Authentic=RADIUS, Acct-Session-Time=1466, Acct-Input-Packets=14866, Acct-Output-Packets=23043, und
 efined-52=
-Aug  6 16:44:01 10.42.7.63 Aug  7 00:03:13 npf-sjca-pdp01 CISE_Failed_Attempts 0001970072 1 0 2014-08-07 00:03:13.112 -07:00 0098658804 5435 NOTICE RADIUS: NAS conducted several failed authentications of the same scenario, ConfigVersionId=133, Device IP Address=10.56.72.127, Device Port=1646, DestinationIPAddress=10.42.7.63, DestinationPort=1813, Protocol=Radius, NetworkDeviceName=ntn01-11a-sw4, User-Name=host/salfi-pc.cisco.com, NAS-IP-Address=10.56.72.127, NAS-Port=50212, Service-Type=Framed, Framed-IP-Address=10.56.111.14, Class=CACS:0A38487F00000397BDA7BCAC:npf-sjca-pdp02/195481465/270957, Called-Station-ID=00-26-99-28-5E-BB, Calling-Station-ID=3C-97-0E-C3-F8-F1, Acct-Status-Type=Interim-Update, Acct-Delay-Time=4, Acct-Input-Octets=225395, Acct-Output-Octets=761436, Acct-Session-Id=00000560, Acct-Authentic=RADIUS, Acct-Session-Time=43, Acct-Input-Packets=1163, Acct-Output-Packets=1080, NAS-Port-Type=Ethernet, NAS-Port-Id=GigabitEthernet2/12, undefined-151=F54C88B0, cisco-av-pair
 =audit-session-id=0A38487F00000397BDA7BCAC, cisco-av-pair=connect-progress=Auth Open, AcsSessionID=npf-sjca-pdp01/195491152/2085221, FailureReason=11038 RADIUS Accounting-Request header contains invalid Authenticator field, Step=11004, Step=11017, Step=11038, Step=5435, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0A38487F00000397BDA7BCAC, TotalFailedAttempts=2, TotalFailedTime=42, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired,
+Aug  6 16:44:01 10.42.7.63 Aug  7 00:03:13 npf-sjca-pdp01 CISE_Failed_Attempts 0001970072 1 0 2014-08-07 00:03:13.112 -07:00 0098658804 5435 NOTICE RADIUS: NAS conducted several failed authentications of the same scenario, ConfigVersionId=133, Device IP Address=10.56.72.127, Device Port=1646, DestinationIPAddress=10.42.7.63, DestinationPort=1813, Protocol=Radius, NetworkDeviceName=ntn01-11a-sw4, User-Name=host/salfi-pc.cisco.com, NAS-IP-Address=10.56.72.127, NAS-Port=50212, Service-Type=Framed, Framed-IP-Address=10.56.111.14, Class=CACS:0A38487F00000397BDA7BCAC:npf-sjca-pdp02/195481465/270957, Called-Station-ID=00-26-99-28-5E-BB, Calling-Station-ID=3C-97-0E-C3-F8-F1, Acct-Status-Type=Interim-Update, Acct-Delay-Time=4, Acct-Input-Octets=225395, Acct-Output-Octets=761436, Acct-Session-Id=00000560, Acct-Authentic=RADIUS, Acct-Session-Time=43, Acct-Input-Packets=1163, Acct-Output-Packets=1080, NAS-Port-Type=Ethernet, NAS-Port-Id=GigabitEthernet2/12, undefined-151=F54C88B0, cisco-av-pair
 =audit-session-id=0A38487F00000397BDA7BCAC, cisco-av-pair=connect-progress=Auth Open, AcsSessionID=npf-sjca-pdp01/195491152/2085221, FailureReason=11038 RADIUS Accounting-Request header contains invalid Authenticator field, Step=11004, Step=11017, Step=11038, Step=5435, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0A38487F00000397BDA7BCAC, TotalFailedAttempts=2, TotalFailedTime=42, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, 
 Aug  6 16:44:32 10.42.7.64 Aug  7 00:03:44 npf-sjca-pdp02 CISE_Failed_Attempts 0000370899 1 0 2014-08-07 00:03:44.851 -07:00 0011267663 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=7, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270963, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F453E324D0, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:44:36 10.34.84.145 Aug  7 00:03:48 stage-pdp01 CISE_Failed_Attempts 0000024632 1 0 2014-08-07 00:03:48.375 -07:00 0000287084 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19329, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:44:36 10.34.84.145 Aug  7 00:03:48 stage-pdp01 CISE_Failed_Attempts 0000024632 1 0 2014-08-07 00:03:48.375 -07:00 0000287084 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19329, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
 Aug  6 16:44:37 10.42.7.63 Aug  7 00:03:49 npf-sjca-pdp01 CISE_Failed_Attempts 0001970128 1 0 2014-08-07 00:03:49.893 -07:00 0098661643 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085307, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4E53E324D5, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:44:38 10.34.84.145 Aug  7 00:03:50 stage-pdp01 CISE_Failed_Attempts 0000024633 1 0 2014-08-07 00:03:50.379 -07:00 0000287088 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19330, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:44:40 10.34.84.145 Aug  7 00:03:52 stage-pdp01 CISE_Failed_Attempts 0000024634 1 0 2014-08-07 00:03:52.379 -07:00 0000287092 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19331, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:44:42 10.34.84.145 Aug  7 00:03:54 stage-pdp01 CISE_Failed_Attempts 0000024635 1 0 2014-08-07 00:03:54.387 -07:00 0000287096 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19332, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:44:38 10.34.84.145 Aug  7 00:03:50 stage-pdp01 CISE_Failed_Attempts 0000024633 1 0 2014-08-07 00:03:50.379 -07:00 0000287088 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19330, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:44:40 10.34.84.145 Aug  7 00:03:52 stage-pdp01 CISE_Failed_Attempts 0000024634 1 0 2014-08-07 00:03:52.379 -07:00 0000287092 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19331, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:44:42 10.34.84.145 Aug  7 00:03:54 stage-pdp01 CISE_Failed_Attempts 0000024635 1 0 2014-08-07 00:03:54.387 -07:00 0000287096 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19332, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
 Aug  6 16:44:42 10.42.7.64 Aug  7 00:03:54 npf-sjca-pdp02 CISE_Failed_Attempts 0000370903 1 0 2014-08-07 00:03:54.924 -07:00 0011267670 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=4, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270964, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F553E324DA, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
-Aug  6 16:44:44 10.34.84.145 Aug  7 00:03:56 stage-pdp01 CISE_Failed_Attempts 0000024636 1 0 2014-08-07 00:03:56.386 -07:00 0000287100 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19333, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:44:46 10.34.84.145 Aug  7 00:03:58 stage-pdp01 CISE_Failed_Attempts 0000024637 1 0 2014-08-07 00:03:58.390 -07:00 0000287104 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19334, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:44:44 10.34.84.145 Aug  7 00:03:56 stage-pdp01 CISE_Failed_Attempts 0000024636 1 0 2014-08-07 00:03:56.386 -07:00 0000287100 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19333, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:44:46 10.34.84.145 Aug  7 00:03:58 stage-pdp01 CISE_Failed_Attempts 0000024637 1 0 2014-08-07 00:03:58.390 -07:00 0000287104 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19334, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
 Aug  6 16:44:47 10.42.7.63 Aug  7 00:03:59 npf-sjca-pdp01 CISE_Failed_Attempts 0001970140 1 0 2014-08-07 00:03:59.951 -07:00 0098662310 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085331, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4F53E324DF, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:44:48 10.42.7.64 Aug  7 00:04:00 npf-sjca-pdp02 CISE_Failed_Attempts 0000370905 1 0 2014-08-07 00:04:00.526 -07:00 0011267674 5435 NOTICE RADIUS: NAS conducted several failed authentications of the same scenario, ConfigVersionId=240, Device IP Address=10.56.72.127, Device Port=1646, DestinationIPAddress=10.42.7.64, DestinationPort=1813, Protocol=Radius, NetworkDeviceName=ntn01-11a-sw4, User-Name=host/salfi-pc.cisco.com, NAS-IP-Address=10.56.72.127, NAS-Port=50212, Service-Type=Framed, Framed-IP-Address=169.254.53.87, Class=CACS:0A38487F00000397BDA7BCAC:npf-sjca-pdp02/195481465/270957, Called-Station-ID=00-26-99-28-5E-BB, Calling-Station-ID=3C-97-0E-C3-F8-F1, Acct-Status-Type=Interim-Update, Acct-Delay-Time=0, Acct-Input-Octets=1458615, Acct-Output-Octets=3836368, Acct-Session-Id=00000560, Acct-Authentic=RADIUS, Acct-Session-Time=95, Acct-Input-Packets=4505, Acct-Output-Packets=5619, NAS-Port-Type=Ethernet, NAS-Port-Id=GigabitEthernet2/12, undefined-151=F54C88B0, cisco-av-p
 air=audit-session-id=0A38487F00000397BDA7BCAC, cisco-av-pair=connect-progress=Auth Open, AcsSessionID=npf-sjca-pdp02/195481465/270965, FailureReason=11038 RADIUS Accounting-Request header contains invalid Authenticator field, Step=11004, Step=11017, Step=11038, Step=5435, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0A38487F00000397BDA7BCAC, TotalFailedAttempts=2, TotalFailedTime=52, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired,
+Aug  6 16:44:48 10.42.7.64 Aug  7 00:04:00 npf-sjca-pdp02 CISE_Failed_Attempts 0000370905 1 0 2014-08-07 00:04:00.526 -07:00 0011267674 5435 NOTICE RADIUS: NAS conducted several failed authentications of the same scenario, ConfigVersionId=240, Device IP Address=10.56.72.127, Device Port=1646, DestinationIPAddress=10.42.7.64, DestinationPort=1813, Protocol=Radius, NetworkDeviceName=ntn01-11a-sw4, User-Name=host/salfi-pc.cisco.com, NAS-IP-Address=10.56.72.127, NAS-Port=50212, Service-Type=Framed, Framed-IP-Address=169.254.53.87, Class=CACS:0A38487F00000397BDA7BCAC:npf-sjca-pdp02/195481465/270957, Called-Station-ID=00-26-99-28-5E-BB, Calling-Station-ID=3C-97-0E-C3-F8-F1, Acct-Status-Type=Interim-Update, Acct-Delay-Time=0, Acct-Input-Octets=1458615, Acct-Output-Octets=3836368, Acct-Session-Id=00000560, Acct-Authentic=RADIUS, Acct-Session-Time=95, Acct-Input-Packets=4505, Acct-Output-Packets=5619, NAS-Port-Type=Ethernet, NAS-Port-Id=GigabitEthernet2/12, undefined-151=F54C88B0, cisco-av-p
 air=audit-session-id=0A38487F00000397BDA7BCAC, cisco-av-pair=connect-progress=Auth Open, AcsSessionID=npf-sjca-pdp02/195481465/270965, FailureReason=11038 RADIUS Accounting-Request header contains invalid Authenticator field, Step=11004, Step=11017, Step=11038, Step=5435, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0A38487F00000397BDA7BCAC, TotalFailedAttempts=2, TotalFailedTime=52, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, 
 Aug  6 16:45:52 10.42.7.64 Aug  7 00:05:04 npf-sjca-pdp02 CISE_Failed_Attempts 0000370920 1 0 2014-08-07 00:05:04.969 -07:00 0011267987 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=6, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270977, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F653E32520, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
 Aug  6 16:45:58 10.42.7.63 Aug  7 00:05:09 npf-sjca-pdp01 CISE_Failed_Attempts 0001970212 1 0 2014-08-07 00:05:09.998 -07:00 0098665518 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085460, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5053E32525, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
 Aug  6 16:47:03 10.42.7.64 Aug  7 00:06:15 npf-sjca-pdp02 CISE_Failed_Attempts 0000370931 1 0 2014-08-07 00:06:15.016 -07:00 0011268196 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270985, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F753E32567, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
 Aug  6 16:47:08 10.42.7.63 Aug  7 00:06:20 npf-sjca-pdp01 CISE_Failed_Attempts 0001970324 1 0 2014-08-07 00:06:20.055 -07:00 0098669942 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085599, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5153E3256C, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
 Aug  6 16:47:13 10.42.7.64 Aug  7 00:06:25 npf-sjca-pdp02 CISE_Failed_Attempts 0000370934 1 0 2014-08-07 00:06:25.097 -07:00 0011268209 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270987, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F853E32571, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
 Aug  6 16:47:18 10.42.7.63 Aug  7 00:06:30 npf-sjca-pdp01 CISE_Failed_Attempts 0001970335 1 0 2014-08-07 00:06:30.119 -07:00 0098670037 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085618, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5253E32576, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:47:48 10.34.84.145 Aug  7 00:07:00 stage-pdp01 CISE_Failed_Attempts 0000024649 1 0 2014-08-07 00:07:00.418 -07:00 0000287210 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19342, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:47:50 10.34.84.145 Aug  7 00:07:02 stage-pdp01 CISE_Failed_Attempts 0000024650 1 0 2014-08-07 00:07:02.421 -07:00 0000287214 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19343, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:47:52 10.34.84.145 Aug  7 00:07:04 stage-pdp01 CISE_Failed_Attempts 0000024651 1 0 2014-08-07 00:07:04.425 -07:00 0000287218 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19344, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:47:54 10.34.84.145 Aug  7 00:07:06 stage-pdp01 CISE_Failed_Attempts 0000024652 1 0 2014-08-07 00:07:06.429 -07:00 0000287222 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19345, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:47:56 10.34.84.145 Aug  7 00:07:08 stage-pdp01 CISE_Failed_Attempts 0000024653 1 0 2014-08-07 00:07:08.429 -07:00 0000287226 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19346, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:47:58 10.34.84.145 Aug  7 00:07:10 stage-pdp01 CISE_Failed_Attempts 0000024654 1 0 2014-08-07 00:07:10.433 -07:00 0000287230 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19347, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:47:48 10.34.84.145 Aug  7 00:07:00 stage-pdp01 CISE_Failed_Attempts 0000024649 1 0 2014-08-07 00:07:00.418 -07:00 0000287210 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19342, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:47:50 10.34.84.145 Aug  7 00:07:02 stage-pdp01 CISE_Failed_Attempts 0000024650 1 0 2014-08-07 00:07:02.421 -07:00 0000287214 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19343, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:47:52 10.34.84.145 Aug  7 00:07:04 stage-pdp01 CISE_Failed_Attempts 0000024651 1 0 2014-08-07 00:07:04.425 -07:00 0000287218 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19344, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:47:54 10.34.84.145 Aug  7 00:07:06 stage-pdp01 CISE_Failed_Attempts 0000024652 1 0 2014-08-07 00:07:06.429 -07:00 0000287222 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19345, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:47:56 10.34.84.145 Aug  7 00:07:08 stage-pdp01 CISE_Failed_Attempts 0000024653 1 0 2014-08-07 00:07:08.429 -07:00 0000287226 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19346, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:47:58 10.34.84.145 Aug  7 00:07:10 stage-pdp01 CISE_Failed_Attempts 0000024654 1 0 2014-08-07 00:07:10.433 -07:00 0000287230 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19347, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
 Aug  6 16:48:23 10.42.7.64 Aug  7 00:07:35 npf-sjca-pdp02 CISE_Failed_Attempts 0000370955 1 0 2014-08-07 00:07:35.138 -07:00 0011268472 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271001, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F953E325B7, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
 Aug  6 16:48:28 10.42.7.63 Aug  7 00:07:40 npf-sjca-pdp01 CISE_Failed_Attempts 0001970420 1 0 2014-08-07 00:07:40.178 -07:00 0098673462 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085757, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5353E325BC, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
 Aug  6 16:49:33 10.42.7.64 Aug  7 00:08:45 npf-sjca-pdp02 CISE_Failed_Attempts 0000370984 1 0 2014-08-07 00:08:45.219 -07:00 0011269071 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271016, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FB53E325FD, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
@@ -53,24 +53,24 @@ Aug  6 16:49:43 10.42.7.64 Aug  7 00:08:55 npf-sjca-pdp02 CISE_Failed_Attempts 0
 Aug  6 16:49:48 10.42.7.63 Aug  7 00:09:00 npf-sjca-pdp01 CISE_Failed_Attempts 0001970524 1 0 2014-08-07 00:09:00.330 -07:00 0098678019 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085909, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5553E3260C, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
 Aug  6 16:50:53 10.42.7.64 Aug  7 00:10:05 npf-sjca-pdp02 CISE_Failed_Attempts 0000370999 1 0 2014-08-07 00:10:05.339 -07:00 0011269371 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271027, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FD53E3264D, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
 Aug  6 16:50:58 10.42.7.63 Aug  7 00:10:10 npf-sjca-pdp01 CISE_Failed_Attempts 0001970625 1 0 2014-08-07 00:10:10.388 -07:00 0098682297 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086061, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5653E32652, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:51:00 10.34.84.145 Aug  7 00:10:12 stage-pdp01 CISE_Failed_Attempts 0000024661 1 0 2014-08-07 00:10:12.492 -07:00 0000287258 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19354, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:51:02 10.34.84.145 Aug  7 00:10:14 stage-pdp01 CISE_Failed_Attempts 0000024662 1 0 2014-08-07 00:10:14.496 -07:00 0000287262 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19355, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:51:04 10.34.84.145 Aug  7 00:10:16 stage-pdp01 CISE_Failed_Attempts 0000024663 1 0 2014-08-07 00:10:16.496 -07:00 0000287266 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19356, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:51:06 10.34.84.145 Aug  7 00:10:18 stage-pdp01 CISE_Failed_Attempts 0000024664 1 0 2014-08-07 00:10:18.500 -07:00 0000287270 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19357, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:51:08 10.34.84.145 Aug  7 00:10:20 stage-pdp01 CISE_Failed_Attempts 0000024665 1 0 2014-08-07 00:10:20.504 -07:00 0000287274 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19358, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:51:10 10.34.84.145 Aug  7 00:10:22 stage-pdp01 CISE_Failed_Attempts 0000024667 1 0 2014-08-07 00:10:22.507 -07:00 0000287279 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19359, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
+Aug  6 16:51:00 10.34.84.145 Aug  7 00:10:12 stage-pdp01 CISE_Failed_Attempts 0000024661 1 0 2014-08-07 00:10:12.492 -07:00 0000287258 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19354, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:51:02 10.34.84.145 Aug  7 00:10:14 stage-pdp01 CISE_Failed_Attempts 0000024662 1 0 2014-08-07 00:10:14.496 -07:00 0000287262 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19355, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:51:04 10.34.84.145 Aug  7 00:10:16 stage-pdp01 CISE_Failed_Attempts 0000024663 1 0 2014-08-07 00:10:16.496 -07:00 0000287266 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19356, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:51:06 10.34.84.145 Aug  7 00:10:18 stage-pdp01 CISE_Failed_Attempts 0000024664 1 0 2014-08-07 00:10:18.500 -07:00 0000287270 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19357, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:51:08 10.34.84.145 Aug  7 00:10:20 stage-pdp01 CISE_Failed_Attempts 0000024665 1 0 2014-08-07 00:10:20.504 -07:00 0000287274 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19358, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:51:10 10.34.84.145 Aug  7 00:10:22 stage-pdp01 CISE_Failed_Attempts 0000024667 1 0 2014-08-07 00:10:22.507 -07:00 0000287279 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19359, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
 Aug  6 16:52:03 10.42.7.64 Aug  7 00:11:15 npf-sjca-pdp02 CISE_Failed_Attempts 0000371005 1 0 2014-08-07 00:11:15.432 -07:00 0011269421 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=4, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271031, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FE53E32693, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
 Aug  6 16:52:08 10.42.7.63 Aug  7 00:11:20 npf-sjca-pdp01 CISE_Failed_Attempts 0001970691 1 0 2014-08-07 00:11:20.468 -07:00 0098685176 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086181, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5753E32698, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
 Aug  6 16:52:13 10.42.7.64 Aug  7 00:11:25 npf-sjca-pdp02 CISE_Failed_Attempts 0000371007 1 0 2014-08-07 00:11:25.515 -07:00 0011269426 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271032, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FF53E3269D, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
 Aug  6 16:52:18 10.42.7.63 Aug  7 00:11:30 npf-sjca-pdp01 CISE_Failed_Attempts 0001970708 1 0 2014-08-07 00:11:30.551 -07:00 0098685669 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=8, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086202, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5853E326A2, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
 Aug  6 16:53:23 10.42.7.64 Aug  7 00:12:35 npf-sjca-pdp02 CISE_Failed_Attempts 0000371016 1 0 2014-08-07 00:12:35.547 -07:00 0011269586 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271040, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570053E326E3, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
 Aug  6 16:53:28 10.42.7.63 Aug  7 00:12:40 npf-sjca-pdp01 CISE_Failed_Attempts 0001970802 1 0 2014-08-07 00:12:40.596 -07:00 0098689883 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086334, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5953E326E8, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
-Aug  6 16:54:12 10.34.84.145 Aug  7 00:13:24 stage-pdp01 CISE_Failed_Attempts 0000024680 1 0 2014-08-07 00:13:24.527 -07:00 0000287388 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19368, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:54:14 10.34.84.145 Aug  7 00:13:26 stage-pdp01 CISE_Failed_Attempts 0000024681 1 0 2014-08-07 00:13:26.531 -07:00 0000287392 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19369, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:54:16 10.34.84.145 Aug  7 00:13:28 stage-pdp01 CISE_Failed_Attempts 0000024682 1 0 2014-08-07 00:13:28.534 -07:00 0000287396 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19370, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405,
-Aug  6 16:54:18 10.34.84.145 Aug  7 00:13:30 stage-pdp01 CISE_Failed_Attempts 0000024683 1 0 2014-08-07 00:13:30.538 -07:00 00002

<TRUNCATED>


[25/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

Posted by ce...@apache.org.
replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37


Project: http://git-wip-us.apache.org/repos/asf/incubator-metron/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-metron/commit/a919cc19
Tree: http://git-wip-us.apache.org/repos/asf/incubator-metron/tree/a919cc19
Diff: http://git-wip-us.apache.org/repos/asf/incubator-metron/diff/a919cc19

Branch: refs/heads/master
Commit: a919cc191a207373c7aa854dfe4949f9f0fe9daa
Parents: 05e188b
Author: charlesporter <pu...@gmail.com>
Authored: Sun Dec 13 21:04:53 2015 -0800
Committer: charlesporter <pu...@gmail.com>
Committed: Sun Dec 13 21:04:53 2015 -0800

----------------------------------------------------------------------
 opensoc-streaming/.gitignore                    |   15 -
 opensoc-streaming/.travis.yml                   |   14 -
 opensoc-streaming/OpenSOC-Alerts/pom.xml        |   61 +-
 opensoc-streaming/OpenSOC-Alerts/readme.md      |  104 +
 .../com/opensoc/alerts/AbstractAlertBolt.java   |   13 +-
 .../com/opensoc/alerts/TelemetryAlertsBolt.java |   31 +-
 .../alerts/adapters/AbstractAlertAdapter.java   |    8 +-
 .../alerts/adapters/AllAlertAdapter.java        |  249 +-
 .../alerts/adapters/CIFAlertsAdapter.java       |  311 +
 .../adapters/HbaseWhiteAndBlacklistAdapter.java |  130 +-
 .../alerts/adapters/KeywordsAlertAdapter.java   |  274 +
 .../opensoc/alerts/adapters/RangeChecker.java   |   23 +
 .../alerts/adapters/ThreatAlertsAdapter.java    |  311 +
 .../src/main/resources/hbase-site.xml           |  221 +-
 .../alerts/adapters/AllAlertAdapterTest.java    |  166 +
 .../resources/AllAlertAdapterTest.properties    |    1 +
 .../TestSchemas/AllAlertAdapterSchema.json      |   42 +
 .../resources/config/AllAlertAdapterTest.config |    8 +
 opensoc-streaming/OpenSOC-Common/.gitignore     |    1 +
 opensoc-streaming/OpenSOC-Common/pom.xml        |   48 +-
 .../configuration/ConfigurationManager.java     |  119 +
 .../dataloads/interfaces/ThreatIntelSource.java |   11 +
 .../main/java/com/opensoc/hbase/HBaseBolt.java  |   14 +-
 .../helpers/services/PcapServiceCli.java        |  110 +
 .../java/com/opensoc/helpers/topology/Cli.java  |  186 +
 .../helpers/topology/ErrorGenerator.java        |   37 +
 .../helpers/topology/SettingsLoader.java        |  149 +
 .../opensoc/index/interfaces/IndexAdapter.java  |    6 +-
 .../java/com/opensoc/ise/parser/ISEParser.java  |   45 +-
 .../ise/parser/ISEParserTokenManager.java       |    3 -
 .../json/serialization/JSONEncoderHelper.java   |    2 +
 .../json/serialization/JSONKafkaSerializer.java |   18 +-
 .../parser/interfaces/MessageParser.java        |    1 +
 .../main/java/com/opensoc/pcap/PacketInfo.java  |   73 +-
 .../com/opensoc/pcap/PcapByteOutputStream.java  |  288 +
 .../main/java/com/opensoc/pcap/PcapMerger.java  |  245 +
 .../com/opensoc/pcap/PcapPacketComparator.java  |   22 +
 .../main/java/com/opensoc/pcap/PcapParser.java  |  183 +
 .../main/java/com/opensoc/pcap/PcapUtils.java   |   27 +
 .../src/main/java/com/opensoc/pcap/asdf.java    |    5 -
 .../com/opensoc/test/AbstractConfigTest.java    |  299 +
 .../com/opensoc/test/AbstractSchemaTest.java    |  198 +
 .../com/opensoc/test/AbstractTestContext.java   |    2 +-
 .../java/com/opensoc/test/ISEParserTest.java    |   27 -
 .../opensoc/tldextractor/BasicTldExtractor.java |  137 +
 .../test/BasicTldExtractorTest.java             |  125 +
 .../java/com/opensoc/topologyhelpers/Cli.java   |  186 -
 .../opensoc/topologyhelpers/ErrorGenerator.java |   27 -
 .../opensoc/topologyhelpers/SettingsLoader.java |  118 -
 .../config/BasicTldExtractorTest.config         |    2 +
 .../src/test/resources/effective_tld_names.dat  | 9719 ++++++++++++++++++
 opensoc-streaming/OpenSOC-DataLoads/README.md   |   50 +
 .../dependency-reduced-pom.xml                  |  145 -
 opensoc-streaming/OpenSOC-DataLoads/pom.xml     |   22 +-
 .../opensoc/dataloads/cif/HBaseTableLoad.java   |  122 -
 .../OpenSOC-DataLoads/src/hbase-site.xml        |  100 -
 .../opensoc/dataloads/ThreatIntelLoader.java    |  174 +
 .../opensoc/dataloads/cif/HBaseTableLoad.java   |  238 +
 .../src/main/resources/hbase-site.xml           |  100 +
 .../OpenSOC-DataServices/README.md              |    1 +
 opensoc-streaming/OpenSOC-DataServices/pom.xml  |  232 +-
 .../alerts/server/AlertsProcessingServer.java   |    1 -
 .../opensoc/alerts/server/AlertsSearcher.java   |    2 -
 .../dataservices/kafkaclient/KafkaClient.java   |   10 +-
 .../dataservices/kafkaclient/KafkaConsumer.java |    8 +-
 .../modules/guice/DefaultServletModule.java     |    1 -
 .../modules/guice/RestEasyModule.java           |    5 +-
 .../modules/guice/ServiceModule.java            |    1 -
 .../opensoc/dataservices/rest/RestServices.java |    2 -
 .../dataservices/servlet/LogoutServlet.java     |    8 -
 .../websocket/KafkaMessageSenderServlet.java    |    1 -
 .../websocket/KafkaMessageSenderSocket.java     |    2 -
 .../websocket/KafkaWebSocketCreator.java        |    1 -
 .../pcapservice/CellTimestampComparator.java    |   23 +
 .../opensoc/pcapservice/ConfigurationUtil.java  |  269 +
 .../pcapservice/HBaseConfigConstants.java       |   40 +
 .../pcapservice/HBaseConfigurationUtil.java     |  165 +
 .../com/opensoc/pcapservice/IPcapGetter.java    |   88 +
 .../com/opensoc/pcapservice/IPcapScanner.java   |   49 +
 .../pcapservice/PcapGetterHBaseImpl.java        |  809 ++
 .../com/opensoc/pcapservice/PcapHelper.java     |  205 +
 .../pcapservice/PcapReceiverImplRestEasy.java   |  256 +
 .../pcapservice/PcapScannerHBaseImpl.java       |  302 +
 .../com/opensoc/pcapservice/PcapsResponse.java  |  153 +
 .../opensoc/pcapservice/RestTestingUtil.java    |  238 +
 .../pcapservice/rest/JettyServiceRunner.java    |   26 +
 .../opensoc/pcapservice/rest/PcapService.java   |   34 +
 .../ElasticSearch_KafkaAlertsService.java       |    1 -
 .../main/resources/config-definition-hbase.xml  |   34 +
 .../resources/hbase-config-default.properties   |   40 +
 .../src/main/resources/hbase-site.xml           |  127 +
 .../CellTimestampComparatorTest.java            |   92 +
 .../OpenSOC-EnrichmentAdapters/pom.xml          |  138 +-
 .../OpenSOC-EnrichmentAdapters/readme.md        |  125 +
 .../adapters/geo/GeoMysqlAdapter.java           |    5 +-
 .../host/HostFromPropertiesFileAdapter.java     |    4 +-
 .../adapters/threat/AbstractThreatAdapter.java  |   36 +
 .../adapters/threat/ThreatHbaseAdapter.java     |  129 +
 .../adapters/whois/WhoisHBaseAdapter.java       |   23 +-
 .../common/AbstractEnrichmentBolt.java          |   16 +-
 .../common/GenericEnrichmentBolt.java           |   21 +-
 .../src/main/resources/hbase-site.xml           |  221 +-
 .../adapters/cif/CIFHbaseAdapterTest.java       |   27 +
 .../adapters/geo/GeoMysqlAdapterTest.java       |   46 +-
 .../adapters/whois/WhoisHBaseAdapterTest.java   |   31 +-
 .../resources/CIFHbaseAdapterTest.properties    |    8 +-
 .../resources/GeoMysqlAdapterTest.properties    |    8 +-
 .../resources/TestSchemas/CIFHbaseSchema.json   |    0
 .../resources/TestSchemas/GeoMySqlSchema.json   |   42 +
 .../resources/TestSchemas/WhoisHbaseSchema.json |    0
 .../resources/WhoisHbaseAdapterTest.properties  |    8 +-
 opensoc-streaming/OpenSOC-Indexing/pom.xml      |   16 +-
 opensoc-streaming/OpenSOC-Indexing/readme.md    |   61 +
 .../opensoc/indexing/TelemetryIndexingBolt.java |   26 +-
 .../indexing/adapters/AbstractIndexAdapter.java |    2 +-
 .../indexing/adapters/ESBaseBulkAdapter.java    |   12 +-
 .../adapters/ESBulkRotatingAdapter.java         |   18 +-
 .../adapters/ESTimedRotatingAdapter.java        |   49 +-
 .../OpenSOC-MessageParsers/pom.xml              |   30 +-
 .../OpenSOC-MessageParsers/readme.md            |   82 +
 .../com/opensoc/parsing/AbstractParserBolt.java |   33 +-
 .../com/opensoc/parsing/PcapParserBolt.java     |   83 +-
 .../opensoc/parsing/TelemetryParserBolt.java    |    9 +-
 .../opensoc/parsing/parsers/AbstractParser.java |    8 +-
 .../opensoc/parsing/parsers/BasicBroParser.java |  101 +-
 .../parsing/parsers/BasicFireEyeParser.java     |  234 +
 .../opensoc/parsing/parsers/BasicIseParser.java |    2 +
 .../parsing/parsers/BasicLancopeParser.java     |    2 +
 .../parsing/parsers/BasicLogstashParser.java    |   65 +
 .../parsers/BasicPaloAltoFirewallParser.java    |  184 +
 .../parsing/parsers/BasicSourcefireParser.java  |   25 +-
 .../opensoc/parsing/parsers/GrokAsaParser.java  |  269 +
 .../parsing/parsers/GrokSourcefireParser.java   |    2 +
 .../com/opensoc/parsing/parsers/GrokUtils.java  |   26 +
 .../parsing/parsers/OpenSOCConverter.java       |  183 +
 .../opensoc/parsing/parsers/OpenSOCGarbage.java |  130 +
 .../opensoc/parsing/parsers/OpenSOCGrok.java    |  367 +
 .../opensoc/parsing/parsers/OpenSOCMatch.java   |  280 +
 .../opensoc/parsing/parsers/ParserUtils.java    |   23 +
 .../src/main/resources/patterns/asa             |  176 +
 .../src/main/resources/patterns/fireeye         |    9 +
 .../src/main/resources/patterns/sourcefire      |   30 +
 .../src/main/resources/patters/sourcefire       |   30 -
 .../parsing/test/BasicBroParserTest.java        |  103 +
 .../parsing/test/BasicFireEyeParserTest.java    |  141 +
 .../parsing/test/BasicIseParserTest.java        |  149 +-
 .../parsing/test/BasicLancopeParserTest.java    |  144 +-
 .../test/BasicPaloAltoFirewallParserTest.java   |  136 +
 .../parsing/test/BasicSourcefireParserTest.java |  103 +-
 .../com/opensoc/parsing/test/BroParserTest.java |  120 +-
 .../opensoc/parsing/test/GrokAsaParserTest.java |  149 +
 .../src/test/resources/BroParserTest.log        |    3 +
 .../src/test/resources/FireEyeParserTest.log    |    8 +
 .../src/test/resources/GrokParserTest.log       |   12 +
 .../src/test/resources/IseParserTest.log        |  308 +
 .../src/test/resources/IseSample.log            |  308 -
 .../src/test/resources/LancopeParserTest.log    |    1 +
 .../resources/PaloAltoFirewallParserTest.log    |    2 +
 .../src/test/resources/SourceFireTest.log       |    3 +
 .../test/resources/TestSchemas/BroSchema.json   |   28 +
 .../resources/TestSchemas/LancopeSchema.json    |   14 +-
 .../resources/TestSchemas/SourcefireSchema.json |   20 +-
 .../config/BasicFireEyeParserTest.config        |    2 +
 .../resources/config/BasicIseParserTest.config  |    2 +
 .../config/BasicLancopeParserTest.config        |    2 +
 .../BasicPaloAltoFirewallParserTest.config      |    2 +
 .../config/BasicSourcefireParserTest.config     |    2 +
 .../test/resources/config/BroParserTest.config  |    2 +
 .../resources/config/GrokAsaParserTest.config   |    2 +
 .../src/test/resources/effective_tld_names.dat  | 9719 ++++++++++++++++++
 .../OpenSOC-PCAP_Reconstruction/hbase/.pmd      | 1262 ---
 .../hbase/README.txt                            |   16 -
 .../hbase/dependency-reduced-pom.xml            |  230 -
 .../OpenSOC-PCAP_Reconstruction/hbase/pom.xml   |  137 -
 .../hbase/client/CellTimestampComparator.java   |   23 -
 .../opensoc/hbase/client/ConfigurationUtil.java |  267 -
 .../hbase/client/HBaseConfigConstants.java      |   40 -
 .../hbase/client/HBaseConfigurationUtil.java    |  165 -
 .../cisco/opensoc/hbase/client/IPcapGetter.java |   88 -
 .../opensoc/hbase/client/IPcapReceiver.java     |  109 -
 .../opensoc/hbase/client/IPcapScanner.java      |   49 -
 .../hbase/client/PcapGetterHBaseImpl.java       |  790 --
 .../cisco/opensoc/hbase/client/PcapHelper.java  |  205 -
 .../opensoc/hbase/client/PcapReceiverImpl.java  |  212 -
 .../hbase/client/PcapScannerHBaseImpl.java      |  302 -
 .../opensoc/hbase/client/PcapsResponse.java     |  151 -
 .../opensoc/hbase/client/RestTestingUtil.java   |  238 -
 .../main/resources/config-definition-hbase.xml  |   34 -
 .../resources/hbase-config-default.properties   |   40 -
 .../hbase/src/main/resources/log4j.properties   |   21 -
 .../client/CellTimestampComparatorTest.java     |   92 -
 .../hbase/client/ConfigurationUtilTest.java     |   50 -
 .../client/HBaseConfigurationUtilTest.java      |   52 -
 .../hbase/client/HBaseIntegrationTest.java      |   74 -
 .../hbase/client/PcapGetterHBaseImplTest.java   |  536 -
 .../opensoc/hbase/client/PcapHelperTest.java    |  321 -
 .../hbase/client/PcapReceiverImplTest.java      |  232 -
 .../hbase/client/PcapScannerHBaseImplTest.java  |  234 -
 .../src/test/resources/hbase-config.properties  |   40 -
 .../src/test/resources/test-tcp-packet.pcap     |  Bin 144 -> 0 bytes
 .../OpenSOC-PCAP_Reconstruction/service/.pmd    | 1190 ---
 .../service/README.txt                          |   10 -
 .../OpenSOC-PCAP_Reconstruction/service/pom.xml |   52 -
 .../service/src/main/resources/log4j.properties |   10 -
 .../src/main/webapp/META-INF/MANIFEST.MF        |    3 -
 .../src/main/webapp/WEB-INF/ipcap-config.xml    |    7 -
 .../service/src/main/webapp/WEB-INF/web.xml     |   17 -
 .../OpenSOC-Pcap_Service/README.txt             |   16 +
 opensoc-streaming/OpenSOC-Pcap_Service/pom.xml  |  267 +
 .../OpenSOC-Pcap_Service/pom.xml.versionsBackup |  268 +
 .../pcapservice/CellTimestampComparator.java    |   23 +
 .../opensoc/pcapservice/ConfigurationUtil.java  |  269 +
 .../pcapservice/HBaseConfigConstants.java       |   40 +
 .../pcapservice/HBaseConfigurationUtil.java     |  165 +
 .../com/opensoc/pcapservice/IPcapGetter.java    |   88 +
 .../com/opensoc/pcapservice/IPcapScanner.java   |   49 +
 .../pcapservice/PcapGetterHBaseImpl.java        |  809 ++
 .../com/opensoc/pcapservice/PcapHelper.java     |  205 +
 .../pcapservice/PcapReceiverImplRestEasy.java   |  250 +
 .../pcapservice/PcapScannerHBaseImpl.java       |  302 +
 .../com/opensoc/pcapservice/PcapsResponse.java  |  153 +
 .../opensoc/pcapservice/RestTestingUtil.java    |  238 +
 .../pcapservice/rest/JettyServiceRunner.java    |   26 +
 .../opensoc/pcapservice/rest/PcapService.java   |   34 +
 .../OnlyDeleteExpiredFilesCompactionPolicy.java |   37 +
 .../main/resources/config-definition-hbase.xml  |   34 +
 .../resources/hbase-config-default.properties   |   40 +
 .../src/main/resources/hbase-site.xml           |  127 +
 .../src/main/resources/log4j.properties         |   21 +
 .../CellTimestampComparatorTest.java            |   92 +
 .../pcapservice/ConfigurationUtilTest.java      |   50 +
 .../pcapservice/HBaseConfigurationUtilTest.java |   52 +
 .../pcapservice/HBaseIntegrationTest.java       |   74 +
 .../pcapservice/PcapGetterHBaseImplTest.java    |  536 +
 .../com/opensoc/pcapservice/PcapHelperTest.java |  321 +
 .../pcapservice/PcapScannerHBaseImplTest.java   |  232 +
 .../src/test/resources/hbase-config.properties  |   40 +
 .../src/test/resources/test-tcp-packet.pcap     |  Bin 0 -> 144 bytes
 opensoc-streaming/OpenSOC-Topologies/pom.xml    |  188 +
 opensoc-streaming/OpenSOC-Topologies/readme.md  |   47 +
 .../src/main/java/com/opensoc/topology/Asa.java |   40 +
 .../main/java/com/opensoc/topology/FireEye.java |   21 +
 .../src/main/java/com/opensoc/topology/Ise.java |    4 +
 .../main/java/com/opensoc/topology/Lancope.java |    5 +
 .../com/opensoc/topology/PaloAltoFirewall.java  |   41 +
 .../main/java/com/opensoc/topology/Pcap.java    |    6 +
 .../com/opensoc/topology/runner/AsaRunner.java  |   94 +
 .../com/opensoc/topology/runner/BroRunner.java  |   15 +-
 .../opensoc/topology/runner/FireEyeRunner.java  |   77 +
 .../com/opensoc/topology/runner/ISERunner.java  |   17 +-
 .../opensoc/topology/runner/LancopeRunner.java  |   15 +-
 .../topology/runner/PaloAltoFirewallRunner.java |   95 +
 .../com/opensoc/topology/runner/PcapRunner.java |    4 +-
 .../topology/runner/SourcefireRunner.java       |   17 +-
 .../opensoc/topology/runner/TopologyRunner.java |  205 +-
 .../topologies/asa/features_enabled.conf        |  113 +
 .../OpenSOC_Configs/topologies/asa/metrics.conf |   26 +
 .../topologies/asa/topology.conf                |  110 +
 .../topologies/asa/topology_identifier.conf     |    4 +
 .../topologies/bro/features_enabled.conf        |   14 +-
 .../topologies/bro/topology.conf                |   71 +-
 .../topologies/fireeye/features_enabled.conf    |  113 +
 .../topologies/fireeye/metrics.conf             |   26 +
 .../topologies/fireeye/topology.conf            |  110 +
 .../topologies/fireeye/topology_identifier.conf |    4 +
 .../topologies/ise/features_enabled.conf        |   10 +-
 .../topologies/ise/topology.conf                |   39 +-
 .../topologies/lancope/features_enabled.conf    |   10 +-
 .../topologies/lancope/topology.conf            |   39 +-
 .../topologies/paloalto/features_enabled.conf   |  113 +
 .../topologies/paloalto/metrics.conf            |   26 +
 .../topologies/paloalto/topology.conf           |  113 +
 .../paloalto/topology_identifier.conf           |    4 +
 .../topologies/pcap/features_enabled.conf       |   10 +-
 .../topologies/pcap/topology.conf               |   64 +-
 .../topologies/sourcefire/features_enabled.conf |   10 +-
 .../topologies/sourcefire/topology.conf         |   48 +-
 .../src/main/resources/SampleInput/AsaOutput    |  100 +
 .../resources/SampleInput/FireeyeExampleOutput  |   90 +
 .../main/resources/SampleInput/ISESampleOutput  |  314 +-
 .../main/resources/SampleInput/PaloaltoOutput   |  100 +
 .../resources/TopologyConfigs_old/lancope.conf  |   16 +-
 .../src/main/resources/effective_tld_names.dat  | 9719 ++++++++++++++++++
 .../src/main/resources/hbase-site.xml           |  131 +
 opensoc-streaming/pom.xml                       |   15 +-
 opensoc-streaming/readme.md                     |  136 +-
 286 files changed, 48498 insertions(+), 9780 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/.gitignore
----------------------------------------------------------------------
diff --git a/opensoc-streaming/.gitignore b/opensoc-streaming/.gitignore
deleted file mode 100644
index 6f6d811..0000000
--- a/opensoc-streaming/.gitignore
+++ /dev/null
@@ -1,15 +0,0 @@
-*.class
-target/
-copy/
-
-# Package Files #
-*.jar
-*.war
-*.ear
-
-# Eclipse related files
-.classpath
-.project
-.settings/
-
-OpenSOC-Topologies/pom.xml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/.travis.yml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/.travis.yml b/opensoc-streaming/.travis.yml
deleted file mode 100644
index 7c87471..0000000
--- a/opensoc-streaming/.travis.yml
+++ /dev/null
@@ -1,14 +0,0 @@
-language: java
-before_script:
-  mvn clean install -Dmode=local
-notifications:
-  email:
-    recipients:
-      - spiddapa@cisco.com
-      - jsirota@cisco.com
-    on_success: always
-    on_failure: always
-  hipchat:
-    rooms:
-      secure: grhlFGHjjEIiOUa/Wt7pyB78La9WHQCQOZEsGSjaYDAObIClBtmfP0TYEIa+Sk6auNFWdRwhxCu8xGtT+G554loR+9A5iQaCGqlJGQyygHl5PwlWu0kWRFRO75zfvLMTQ+beowM/tgGnf0MBz5adjZmnGu+L0Fet3SYcQOJixe0=
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Alerts/pom.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Alerts/pom.xml b/opensoc-streaming/OpenSOC-Alerts/pom.xml
index a2fcfe5..55fb68b 100644
--- a/opensoc-streaming/OpenSOC-Alerts/pom.xml
+++ b/opensoc-streaming/OpenSOC-Alerts/pom.xml
@@ -15,19 +15,21 @@
 	<parent>
 		<groupId>com.opensoc</groupId>
 		<artifactId>OpenSOC-Streaming</artifactId>
-		<version>0.3BETA-SNAPSHOT</version>
+		<version>0.6BETA</version>
 	</parent>
 	<artifactId>OpenSOC-Alerts</artifactId>
 	<name>OpenSOC-Alerts</name>
 	<description>Taggers for alerts</description>
 	<properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>		
 		<commons.validator.version>1.4.0</commons.validator.version>
 	</properties>
 	<dependencies>
 		<dependency>
 			<groupId>com.opensoc</groupId>
 			<artifactId>OpenSOC-Common</artifactId>
-			<version>${parent.version}</version>
+			<version>${project.parent.version}</version>
 		</dependency>
 		<dependency>
 			<groupId>com.googlecode.json-simple</groupId>
@@ -39,6 +41,12 @@
 			<artifactId>storm-core</artifactId>
 			<version>${global_storm_version}</version>
 			<scope>provided</scope>
+			<exclusions>
+				<exclusion>
+				   <artifactId>servlet-api</artifactId>
+				   <groupId>javax.servlet</groupId>
+				  </exclusion>
+		    </exclusions>					
 		</dependency>
 		<dependency>
 			<groupId>org.apache.kafka</groupId>
@@ -69,13 +77,60 @@
 			<groupId>commons-validator</groupId>
 			<artifactId>commons-validator</artifactId>
 			<version>${commons.validator.version}</version>
+			<exclusions>
+				<exclusion>
+				
+				<groupId>commons-beanutils</groupId>
+				
+				<artifactId>commons-beanutils</artifactId>
+				
+				</exclusion>
+			</exclusions>
 		</dependency>
 	</dependencies>
 	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.18</version>
+				<configuration>
+					<systemProperties>
+						<property>
+							<name>mode</name>
+							<value>local</value>
+						</property>
+					</systemProperties>
+				</configuration>
+			</plugin>		
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>3.1</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-pmd-plugin</artifactId>
+				<version>3.3</version>
+				<configuration>
+					<targetJdk>1.7</targetJdk>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.codehaus.mojo</groupId>
+				<artifactId>emma-maven-plugin</artifactId>
+				<version>1.0-alpha-3</version>
+				<inherited>true</inherited>
+			</plugin>			
+		</plugins>
 		<resources>
 			<resource>
 				<directory>src/main/resources</directory>
 			</resource>
 		</resources>
 	</build>
-</project>
\ No newline at end of file
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Alerts/readme.md
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Alerts/readme.md b/opensoc-streaming/OpenSOC-Alerts/readme.md
new file mode 100644
index 0000000..1c410a4
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Alerts/readme.md
@@ -0,0 +1,104 @@
+#OpenSOC-Alerts
+
+##Module Description
+
+This module enables telemetry alerts.  It splits the mssage stream into two streams.  The original message is emitted on the "message" stream.  The corresponding alert is emitted on the "alerts" stream.  The two are tied together through the alerts UUID.  
+
+##Message Format
+
+Assuming the original message (with enrichments enabled) has the following format:
+
+```json
+{
+"message": 
+{"ip_src_addr": xxxx, 
+"ip_dst_addr": xxxx, 
+"ip_src_port": xxxx, 
+"ip_dst_port": xxxx, 
+"protocol": xxxx, 
+"timestamp": xxxx.
+"original_string": xxxx,
+"additional-field 1": xxxx,
+},
+"enrichment" : {"geo": xxxx, "whois": xxxx, "hosts": xxxxx, "CIF": "xxxxx"}
+
+}
+```
+
+The telemetry message will be tagged with a UUID alert tag like so:
+
+```json
+{
+"message": 
+{"ip_src_addr": xxxx, 
+"ip_dst_addr": xxxx, 
+"ip_src_port": xxxx, 
+"ip_dst_port": xxxx, 
+"protocol": xxxx, 
+"timestamp": xxxx,
+"original_string": xxxx,
+"additional-field 1": xxxx,
+},
+"enrichment" : {"geo": xxxx, "whois": xxxx, "hosts": xxxxx, "CIF": "xxxxx"},
+"alerts": [UUID1, UUID2, UUID3, etc]
+
+}
+```
+
+The alert will be fired on the "alerts" stream and can be customized to have any format as long as it includes the required mandatory fields.  The mandatory fields are:
+
+* timestamp (epoch): The time from the message that triggered the alert
+* description: A human friendly string representation of the alert
+* alert_id: The UUID generated for the alert. This uniquely identifies an alert
+
+There are other standard but not mandatory fields that can be leveraged by opensoc-ui and other alert consumers:
+
+* designated_host: The IP address that corresponds to an asset. Ex. The IP address of the company device associated with the alert.
+* enrichment: A copy of the enrichment data from the message that triggered the alert
+* priority: The priority of the alert. Mustb e set to one of HIGH, MED or LOW
+
+An example of an alert with all mandatory and standard fields would look like so:
+
+```json
+{
+"timestamp": xxxx,
+"alert_id": UUID,
+"description": xxxx,
+"designated_host": xxxx,
+"enrichment": { "geo": xxxx, "whois": xxxx, "cif": xxxx },
+"priority": "MED"
+}
+```
+
+##Alerts Bolt
+
+The bolt can be extended with a variety of alerts adapters.  The ability to stack alerts is currently in beta, but is not currently advisable.  We advice to only have one alerts bolt per topology.  The adapters are rules-based adapters which fire alerts when rules are a match.  Currently only Java adapters are provided, but there are future plans to provide Grok-Based adapters as well.
+
+The signature of the Alerts bolt is as follows:
+
+``` 
+TelemetryAlertsBolt alerts_bolt = new TelemetryAlertsBolt()
+.withIdentifier(alerts_identifier).withMaxCacheSize(1000)
+.withMaxTimeRetain(3600).withAlertsAdapter(alerts_adapter)
+.withMetricConfiguration(config);
+```
+Identifier - JSON key where the alert is attached
+TimeRetain & MaxCacheSize - Caching parameters for the bolt
+MetricConfiguration - export custom bolt metrics to graphite (if not null)
+AlertsAdapter - pick the appropriate adapter for generating the alerts
+
+### Java Adapters
+
+Java adapters are designed for high volume topologies, but are not easily extensible.  The adapters provided are:
+
+* com.opensoc.alerts.adapters.AllAlertsAdapter - will tag every single message with the static alert (appropriate for topologies like Sourcefire, etc, where every single message is an alert)
+* com.opensoc.alerts.adapters.HbaseWhiteAndBlacklistAdapter - will read white and blacklists from HBase and fire alerts if source or dest IP are not on the whitelist or if any IP is on the blacklist
+* com.opensoc.alerts.adapters.CIFAlertsAdapter - will alert on messages that have results in enrichment.cif.
+* com.opensoc.alerts.adpaters.KeywordsAlertAdapter - will alert on messages that contain any of a list of keywords
+###Grok Adapters
+
+Grok alerts adapters for OpenSOC are still under devleopment
+
+###Stacking Alert Adapters
+
+The functionality to stack alerts adapters is still under development

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/AbstractAlertBolt.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/AbstractAlertBolt.java b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/AbstractAlertBolt.java
index 8dc4c9d..4ea1d58 100644
--- a/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/AbstractAlertBolt.java
+++ b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/AbstractAlertBolt.java
@@ -54,8 +54,8 @@ public abstract class AbstractAlertBolt extends BaseRichBolt {
 	protected JSONObject _identifier;
 	protected MetricReporter _reporter;
 
-	protected int _MAX_CACHE_SIZE = -1;
-	protected int _MAX_TIME_RETAIN = -1;
+	protected int _MAX_CACHE_SIZE_OBJECTS_NUM = -1;
+	protected int _MAX_TIME_RETAIN_MINUTES = -1;
 
 	protected Counter ackCounter, emitCounter, failCounter;
 
@@ -82,10 +82,10 @@ public abstract class AbstractAlertBolt extends BaseRichBolt {
 		if (this._identifier == null)
 			throw new IllegalStateException("Identifier must be specified");
 
-		if (this._MAX_CACHE_SIZE == -1)
-			throw new IllegalStateException("MAX_CACHE_SIZE must be specified");
-		if (this._MAX_TIME_RETAIN == -1)
-			throw new IllegalStateException("MAX_TIME_RETAIN must be specified");
+		if (this._MAX_CACHE_SIZE_OBJECTS_NUM == -1)
+			throw new IllegalStateException("MAX_CACHE_SIZE_OBJECTS_NUM must be specified");
+		if (this._MAX_TIME_RETAIN_MINUTES == -1)
+			throw new IllegalStateException("MAX_TIME_RETAIN_MINUTES must be specified");
 
 		try {
 			doPrepare(conf, topologyContext, collector);
@@ -95,6 +95,7 @@ public abstract class AbstractAlertBolt extends BaseRichBolt {
 		}
 
 		boolean success = _adapter.initialize();
+		
 		try {
 			if (!success)
 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/TelemetryAlertsBolt.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/TelemetryAlertsBolt.java b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/TelemetryAlertsBolt.java
index 36bd0fc..7fdba59 100644
--- a/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/TelemetryAlertsBolt.java
+++ b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/TelemetryAlertsBolt.java
@@ -31,12 +31,11 @@ import backtype.storm.task.TopologyContext;
 import backtype.storm.tuple.Tuple;
 import backtype.storm.tuple.Values;
 
-import com.esotericsoftware.minlog.Log;
 import com.google.common.cache.CacheBuilder;
 import com.opensoc.alerts.interfaces.AlertsAdapter;
+import com.opensoc.helpers.topology.ErrorGenerator;
 import com.opensoc.json.serialization.JSONEncoderHelper;
 import com.opensoc.metrics.MetricReporter;
-import com.opensoc.topologyhelpers.ErrorGenerator;
 
 @SuppressWarnings("rawtypes")
 public class TelemetryAlertsBolt extends AbstractAlertBolt {
@@ -120,24 +119,24 @@ public class TelemetryAlertsBolt extends AbstractAlertBolt {
 	}
 
 	/**
-	 * @param MAX_CACHE_SIZE
+	 * @param MAX_CACHE_SIZE_OBJECTS_NUM
 	 *            Maximum size of cache before flushing
 	 * @return Instance of this class
 	 */
 
-	public TelemetryAlertsBolt withMaxCacheSize(int MAX_CACHE_SIZE) {
-		_MAX_CACHE_SIZE = MAX_CACHE_SIZE;
+	public TelemetryAlertsBolt withMaxCacheSize(int MAX_CACHE_SIZE_OBJECTS_NUM) {
+		_MAX_CACHE_SIZE_OBJECTS_NUM = MAX_CACHE_SIZE_OBJECTS_NUM;
 		return this;
 	}
 
 	/**
-	 * @param MAX_TIME_RETAIN
+	 * @param MAX_TIME_RETAIN_MINUTES
 	 *            Maximum time to retain cached entry before expiring
 	 * @return Instance of this class
 	 */
 
-	public TelemetryAlertsBolt withMaxTimeRetain(int MAX_TIME_RETAIN) {
-		_MAX_TIME_RETAIN = MAX_TIME_RETAIN;
+	public TelemetryAlertsBolt withMaxTimeRetain(int MAX_TIME_RETAIN_MINUTES) {
+		_MAX_TIME_RETAIN_MINUTES = MAX_TIME_RETAIN_MINUTES;
 		return this;
 	}
 
@@ -145,8 +144,8 @@ public class TelemetryAlertsBolt extends AbstractAlertBolt {
 	void doPrepare(Map conf, TopologyContext topologyContext,
 			OutputCollector collector) throws IOException {
 
-		cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE)
-				.expireAfterWrite(_MAX_TIME_RETAIN, TimeUnit.MINUTES).build();
+		cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE_OBJECTS_NUM)
+				.expireAfterWrite(_MAX_TIME_RETAIN_MINUTES, TimeUnit.MINUTES).build();
 
 		LOG.info("[OpenSOC] Preparing TelemetryAlert Bolt...");
 
@@ -185,10 +184,10 @@ public class TelemetryAlertsBolt extends AbstractAlertBolt {
 			JSONArray uuid_list = new JSONArray();
 
 			if (alerts_list == null || alerts_list.isEmpty()) {
-				LOG.trace("[OpenSOC] No alerts detected in: "
+				System.out.println("[OpenSOC] No alerts detected in: "
 						+ original_message);
 				_collector.ack(tuple);
-				_collector.emit(new Values(original_message));
+				_collector.emit("message", new Values(key, original_message));
 			} else {
 				for (String alert : alerts_list.keySet()) {
 					uuid_list.add(alert);
@@ -196,11 +195,11 @@ public class TelemetryAlertsBolt extends AbstractAlertBolt {
 					LOG.trace("[OpenSOC] Checking alerts cache: " + alert);
 
 					if (cache.getIfPresent(alert) == null) {
-						LOG.trace("[OpenSOC]: Alert not found in cache: " + alert);
+						System.out.println("[OpenSOC]: Alert not found in cache: " + alert);
 
 						JSONObject global_alert = new JSONObject();
 						global_alert.putAll(_identifier);
-						global_alert.put("triggered", alerts_list.get(alert));
+						global_alert.putAll(alerts_list.get(alert));
 						global_alert.put("timestamp", System.currentTimeMillis());
 						_collector.emit("alert", new Values(global_alert));
 
@@ -244,11 +243,9 @@ public class TelemetryAlertsBolt extends AbstractAlertBolt {
 			 * if (metricConfiguration != null) { failCounter.inc(); }
 			 */
 
-			String error_as_string = org.apache.commons.lang.exception.ExceptionUtils
-					.getStackTrace(e);
 
 			JSONObject error = ErrorGenerator.generateErrorMessage(
-					"Alerts problem: " + original_message, error_as_string);
+					"Alerts problem: " + original_message, e);
 			_collector.emit("error", new Values(error));
 		}
 	}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/AbstractAlertAdapter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/AbstractAlertAdapter.java b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/AbstractAlertAdapter.java
index 35595a0..1330e21 100644
--- a/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/AbstractAlertAdapter.java
+++ b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/AbstractAlertAdapter.java
@@ -19,8 +19,6 @@
 package com.opensoc.alerts.adapters;
 
 import java.io.Serializable;
-import java.util.Set;
-import java.util.TreeSet;
 import java.util.UUID;
 import java.util.concurrent.TimeUnit;
 
@@ -62,9 +60,9 @@ public abstract class AbstractAlertAdapter implements AlertsAdapter, Serializabl
 		return (ip1 + "-" + ip2 + "-" + alert_type);
 	}
 	
-	private void generateCache(int _MAX_CACHE_SIZE, int _MAX_TIME_RETAIN)
+	protected void generateCache(int _MAX_CACHE_SIZE_OBJECTS_NUM, int _MAX_TIME_RETAIN_MINUTES)
 	{
-		cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE)
-				.expireAfterWrite(_MAX_TIME_RETAIN, TimeUnit.MINUTES).build();
+		cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE_OBJECTS_NUM)
+				.expireAfterWrite(_MAX_TIME_RETAIN_MINUTES, TimeUnit.MINUTES).build();
 	}
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/AllAlertAdapter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/AllAlertAdapter.java b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/AllAlertAdapter.java
index 035a865..db667e7 100644
--- a/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/AllAlertAdapter.java
+++ b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/AllAlertAdapter.java
@@ -1,6 +1,5 @@
 package com.opensoc.alerts.adapters;
 
-import java.io.IOException;
 import java.io.Serializable;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -12,7 +11,6 @@ import java.util.concurrent.TimeUnit;
 import org.apache.commons.validator.routines.InetAddressValidator;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HConnection;
 import org.apache.hadoop.hbase.client.HConnectionManager;
@@ -23,64 +21,96 @@ import org.apache.hadoop.hbase.client.ResultScanner;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
+import org.apache.log4j.Logger;
 import com.google.common.cache.Cache;
 import com.google.common.cache.CacheBuilder;
 import com.opensoc.alerts.interfaces.AlertsAdapter;
 
+@SuppressWarnings("serial")
 public class AllAlertAdapter implements AlertsAdapter, Serializable {
 
+	HTableInterface blacklist_table;
 	HTableInterface whitelist_table;
 	InetAddressValidator ipvalidator = new InetAddressValidator();
 	String _whitelist_table_name;
-	// String _blacklist_table_name;
+	String _blacklist_table_name;
 	String _quorum;
 	String _port;
 	String _topologyname;
 	Configuration conf = null;
 
-	protected  Cache<String, String> cache;
-
-	Map<String, String> id_list = new HashMap<String, String>();
+	Cache<String, String> cache;
+	String _topology_name;
 
 	Set<String> loaded_whitelist = new HashSet<String>();
 	Set<String> loaded_blacklist = new HashSet<String>();
 
-	String _topology_name;
-
-	protected static final Logger LOG = LoggerFactory
+	protected static final Logger LOG = Logger
 			.getLogger(AllAlertAdapter.class);
 
-	public AllAlertAdapter(String whitelist_table_name,
-			String blacklist_table_name, String quorum, String port,
-			int _MAX_TIME_RETAIN, int _MAX_CACHE_SIZE) {
-
-		_whitelist_table_name = whitelist_table_name;
-
-		_quorum = quorum;
-		_port = port;
-
-		cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE)
-				.expireAfterWrite(_MAX_TIME_RETAIN, TimeUnit.MINUTES).build();
+	public AllAlertAdapter(Map<String, String> config) {
+		try {
+			if(!config.containsKey("whitelist_table_name"))
+				throw new Exception("Whitelist table name is missing");
+				
+			_whitelist_table_name = config.get("whitelist_table_name");
+			
+			if(!config.containsKey("blacklist_table_name"))
+				throw new Exception("Blacklist table name is missing");
+			
+			_blacklist_table_name = config.get("blacklist_table_name");
+			
+			if(!config.containsKey("quorum"))
+				throw new Exception("Quorum name is missing");
+			
+			_quorum = config.get("quorum");
+			
+			if(!config.containsKey("port"))
+				throw new Exception("port name is missing");
+			
+			_port = config.get("port");
 
+			if(!config.containsKey("_MAX_CACHE_SIZE_OBJECTS_NUM"))
+				throw new Exception("_MAX_CACHE_SIZE_OBJECTS_NUM name is missing");
+			
+			int _MAX_CACHE_SIZE_OBJECTS_NUM = Integer.parseInt(config
+					.get("_MAX_CACHE_SIZE_OBJECTS_NUM"));
+			
+			if(!config.containsKey("_MAX_TIME_RETAIN_MINUTES"))
+				throw new Exception("_MAX_TIME_RETAIN_MINUTES name is missing");
+			
+			int _MAX_TIME_RETAIN_MINUTES = Integer.parseInt(config
+					.get("_MAX_TIME_RETAIN_MINUTES"));
 
+			cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE_OBJECTS_NUM)
+					.expireAfterWrite(_MAX_TIME_RETAIN_MINUTES, TimeUnit.MINUTES)
+					.build();
+		} catch (Exception e) {
+			System.out.println("Could not initialize Alerts Adapter");
+			e.printStackTrace();
+			System.exit(0);
+		}
 	}
 
-
+	@SuppressWarnings("resource")
+    @Override
 	public boolean initialize() {
 
 		conf = HBaseConfiguration.create();
-		conf.set("hbase.zookeeper.quorum", _quorum);
-		conf.set("hbase.zookeeper.property.clientPort", _port);
+		//conf.set("hbase.zookeeper.quorum", _quorum);
+		//conf.set("hbase.zookeeper.property.clientPort", _port);
 
-		LOG.trace("[OpenSOC] Connecting to hbase with conf:" + conf);		
+		LOG.trace("[OpenSOC] Connecting to hbase with conf:" + conf);
 		LOG.trace("[OpenSOC] Whitelist table name: " + _whitelist_table_name);
-		LOG.trace("[OpenSOC] ZK Client/port: " + conf.get("hbase.zookeeper.quorum") + " -> " + conf.get("hbase.zookeeper.property.clientPort"));
+		LOG.trace("[OpenSOC] Whitelist table name: " + _blacklist_table_name);
+		LOG.trace("[OpenSOC] ZK Client/port: "
+				+ conf.get("hbase.zookeeper.quorum") + " -> "
+				+ conf.get("hbase.zookeeper.property.clientPort"));
 
 		try {
 
+			LOG.trace("[OpenSOC] Attempting to connect to hbase");
+
 			HConnection connection = HConnectionManager.createConnection(conf);
 
 			LOG.trace("[OpenSOC] CONNECTED TO HBASE");
@@ -90,12 +120,20 @@ public class AllAlertAdapter implements AlertsAdapter, Serializable {
 			if (!hba.tableExists(_whitelist_table_name))
 				throw new Exception("Whitelist table doesn't exist");
 
+			if (!hba.tableExists(_blacklist_table_name))
+				throw new Exception("Blacklist table doesn't exist");
+
 			whitelist_table = new HTable(conf, _whitelist_table_name);
 
-			LOG.trace("[OpenSOC] CONNECTED TO TABLE: "+ _whitelist_table_name);
+			LOG.trace("[OpenSOC] CONNECTED TO TABLE: " + _whitelist_table_name);
+			blacklist_table = new HTable(conf, _blacklist_table_name);
+			LOG.trace("[OpenSOC] CONNECTED TO TABLE: " + _blacklist_table_name);
 
-			Scan scan = new Scan();
+			if (connection == null || whitelist_table == null
+					|| blacklist_table == null)
+				throw new Exception("Unable to initialize hbase connection");
 
+			Scan scan = new Scan();
 
 			ResultScanner rs = whitelist_table.getScanner(scan);
 			try {
@@ -106,18 +144,42 @@ public class AllAlertAdapter implements AlertsAdapter, Serializable {
 				LOG.trace("[OpenSOC] COULD NOT READ FROM HBASE");
 				e.printStackTrace();
 			} finally {
-				rs.close(); 
+				rs.close(); // always close the ResultScanner!
+				hba.close();
 			}
 			whitelist_table.close();
 
-			LOG.trace("[OpenSOC] Number of entires in white list: " + loaded_whitelist.size());
+			LOG.trace("[OpenSOC] READ IN WHITELIST: " + loaded_whitelist.size());
+			
+			System.out.println("LOADED WHITELIST IS: ");
 			
-			if(loaded_whitelist.size() == 0)
-				throw new Exception("Hbase connection is OK, but the table is empty: " + whitelist_table);
+			for(String str: loaded_whitelist)
+				System.out.println("WHITELIST: " + str);
+
+			scan = new Scan();
+
+			rs = blacklist_table.getScanner(scan);
+			try {
+				for (Result r = rs.next(); r != null; r = rs.next()) {
+					loaded_blacklist.add(Bytes.toString(r.getRow()));
+				}
+			} catch (Exception e) {
+				LOG.trace("[OpenSOC] COULD NOT READ FROM HBASE");
+				e.printStackTrace();
+			} finally {
+				rs.close(); // always close the ResultScanner!
+				hba.close();
+			}
+			blacklist_table.close();
+
+			LOG.trace("[OpenSOC] READ IN WHITELIST: " + loaded_whitelist.size());
+
+			rs.close(); // always close the ResultScanner!
+			hba.close();
 
 			return true;
 		} catch (Exception e) {
-			// TODO Auto-generated catch block
+
 			e.printStackTrace();
 		}
 
@@ -125,99 +187,88 @@ public class AllAlertAdapter implements AlertsAdapter, Serializable {
 
 	}
 
-	protected String generateAlertId(String source_ip, String dst_ip,
-			int alert_type) {
+	@Override
+	public boolean refresh() throws Exception {
+		// TODO Auto-generated method stub
+		return false;
+	}
 
-		String key = makeKey(source_ip, dst_ip, alert_type);
+	@SuppressWarnings("unchecked")
+    @Override
+	public Map<String, JSONObject> alert(JSONObject raw_message) {
 
-		if (cache.getIfPresent(key) != null)
-			return cache.getIfPresent(key);
+		Map<String, JSONObject> alerts = new HashMap<String, JSONObject>();
+		JSONObject content = (JSONObject) raw_message.get("message");
 
-		String new_UUID = System.currentTimeMillis() + "-" + UUID.randomUUID();
+		JSONObject enrichment = null;
 
-		cache.put(key, new_UUID);
-		key = makeKey(dst_ip, source_ip, alert_type);
-		cache.put(key, new_UUID);
+		if (raw_message.containsKey("enrichment"))
+			enrichment = (JSONObject) raw_message.get("enrichment");
 
-		return new_UUID;
+		JSONObject alert = new JSONObject();
 
-	}
 
-	public boolean getByKey(String metadata, HTableInterface table) {
 
-		LOG.trace("[OpenSOC] Pinging HBase For:" + metadata);
+		String source = "unknown";
+		String dest = "unknown";
+		String host = "unknown";
 
+		if (content.containsKey("ip_src_addr"))
+		{
+			source = content.get("ip_src_addr").toString();
+			
+			if(RangeChecker.checkRange(loaded_whitelist, source))
+				host = source;				
+		}
 
-		Get get = new Get(metadata.getBytes());
-		Result rs;
+		if (content.containsKey("ip_dst_addr"))
+		{
+			dest = content.get("ip_dst_addr").toString();
+			
+			if(RangeChecker.checkRange(loaded_whitelist, dest))
+				host = dest;	
+		}
 
-		try {
-			rs = table.get(get);
+		alert.put("designated_host", host);
+		alert.put("description", content.get("original_string").toString());
+		alert.put("priority", "MED");	
 
-			if (rs.size() > 0)
-				return true;
-			else
-				return false;
+		String alert_id = generateAlertId(source, dest, 0);
 
-		} catch (IOException e) {
+		alert.put("alert_id", alert_id);
+		alerts.put(alert_id, alert);
 
-			e.printStackTrace();
-		}
+		alert.put("enrichment", enrichment);
 
-		return false;
+		return alerts;
 
 	}
 
-	public boolean refresh() throws Exception {
+	@Override
+	public boolean containsAlertId(String alert) {
 		// TODO Auto-generated method stub
 		return false;
 	}
 
-	private String makeKey(String ip1, String ip2, int alert_type) {
-		return (ip1 + "-" + ip2 + "-" + alert_type);
-	}
-
-	@SuppressWarnings("unchecked")
-	public Map<String, JSONObject> alert(JSONObject raw_message) {
-
-		Map<String, JSONObject> alerts = new HashMap<String, JSONObject>();
-		JSONObject alert = new JSONObject();
-
-		JSONObject content = (JSONObject) raw_message.get("message");
-		String source_ip = content.get("ip_src_addr").toString();
-		String dst_ip = content.get("ip_dst_addr").toString();
+	protected String generateAlertId(String source_ip, String dst_ip,
+			int alert_type) {
 
-		String source = null;
+		String key = makeKey(source_ip, dst_ip, alert_type);
 
-		if (loaded_whitelist.contains(source_ip))
-			source = source_ip;
-		else if (loaded_whitelist.contains(dst_ip))
-			source = dst_ip;
-		else
-			source = "unknown";
+		if (cache.getIfPresent(key) != null)
+			return cache.getIfPresent(key);
 
-		alert.put("title", "Appliance alert for: " + source_ip + "->" + dst_ip);
-		alert.put("priority", "1");
-		alert.put("type", "error");
-		alert.put("designated_host", source);
-		alert.put("source", source_ip);
-		alert.put("dest", dst_ip);
-		alert.put("body", "Appliance alert for: " + source_ip + "->" + dst_ip);
+		String new_UUID = System.currentTimeMillis() + "-" + UUID.randomUUID();
 
-		String alert_id = generateAlertId(source_ip, dst_ip, 0);
+		cache.put(key, new_UUID);
+		key = makeKey(dst_ip, source_ip, alert_type);
+		cache.put(key, new_UUID);
 
-		alert.put("reference_id", alert_id);
-		alerts.put(alert_id, alert);
-		
-		LOG.trace("[OpenSOC] Returning alert: " + alerts);
+		return new_UUID;
 
-		 return alerts;
 	}
 
-
-	public boolean containsAlertId(String alert) {
-		// TODO Auto-generated method stub
-		return false;
+	private String makeKey(String ip1, String ip2, int alert_type) {
+		return (ip1 + "-" + ip2 + "-" + alert_type);
 	}
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/CIFAlertsAdapter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/CIFAlertsAdapter.java b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/CIFAlertsAdapter.java
new file mode 100644
index 0000000..5e54556
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/CIFAlertsAdapter.java
@@ -0,0 +1,311 @@
+package com.opensoc.alerts.adapters;
+
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.commons.validator.routines.InetAddressValidator;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
+import com.opensoc.alerts.interfaces.AlertsAdapter;
+
+@SuppressWarnings("serial")
+public class CIFAlertsAdapter implements AlertsAdapter, Serializable {
+
+	String enrichment_tag;
+
+	HTableInterface blacklist_table;
+	HTableInterface whitelist_table;
+	InetAddressValidator ipvalidator = new InetAddressValidator();
+	String _whitelist_table_name;
+	String _blacklist_table_name;
+	String _quorum;
+	String _port;
+	String _topologyname;
+	Configuration conf = null;
+
+	Cache<String, String> cache;
+	String _topology_name;
+
+	Set<String> loaded_whitelist = new HashSet<String>();
+	Set<String> loaded_blacklist = new HashSet<String>();
+
+	protected static final Logger LOG = LoggerFactory
+			.getLogger(CIFAlertsAdapter.class);
+
+	public CIFAlertsAdapter(Map<String, String> config) {
+		try {
+
+			if (!config.containsKey("whitelist_table_name"))
+				throw new Exception("Whitelist table name is missing");
+
+			_whitelist_table_name = config.get("whitelist_table_name");
+
+			if (!config.containsKey("blacklist_table_name"))
+				throw new Exception("Blacklist table name is missing");
+
+			_blacklist_table_name = config.get("blacklist_table_name");
+
+			if (!config.containsKey("quorum"))
+				throw new Exception("Quorum name is missing");
+
+			_quorum = config.get("quorum");
+
+			if (!config.containsKey("port"))
+				throw new Exception("port name is missing");
+
+			_port = config.get("port");
+
+			if (!config.containsKey("_MAX_CACHE_SIZE_OBJECTS_NUM"))
+				throw new Exception("_MAX_CACHE_SIZE_OBJECTS_NUM name is missing");
+
+			int _MAX_CACHE_SIZE_OBJECTS_NUM = Integer.parseInt(config
+					.get("_MAX_CACHE_SIZE_OBJECTS_NUM"));
+
+			if (!config.containsKey("_MAX_TIME_RETAIN_MINUTES"))
+				throw new Exception("_MAX_TIME_RETAIN_MINUTES name is missing");
+
+			int _MAX_TIME_RETAIN_MINUTES = Integer.parseInt(config
+					.get("_MAX_TIME_RETAIN_MINUTES"));
+
+			cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE_OBJECTS_NUM)
+					.expireAfterWrite(_MAX_TIME_RETAIN_MINUTES, TimeUnit.MINUTES)
+					.build();
+
+			enrichment_tag = config.get("enrichment_tag");
+
+		} catch (Exception e) {
+			System.out.println("Could not initialize alerts adapter");
+			e.printStackTrace();
+			System.exit(0);
+		}
+	}
+
+	@SuppressWarnings("resource")
+    @Override
+	public boolean initialize() {
+
+		conf = HBaseConfiguration.create();
+		// conf.set("hbase.zookeeper.quorum", _quorum);
+		// conf.set("hbase.zookeeper.property.clientPort", _port);
+
+		LOG.trace("[OpenSOC] Connecting to hbase with conf:" + conf);
+		LOG.trace("[OpenSOC] Whitelist table name: " + _whitelist_table_name);
+		LOG.trace("[OpenSOC] Whitelist table name: " + _blacklist_table_name);
+		LOG.trace("[OpenSOC] ZK Client/port: "
+				+ conf.get("hbase.zookeeper.quorum") + " -> "
+				+ conf.get("hbase.zookeeper.property.clientPort"));
+
+		try {
+
+			LOG.trace("[OpenSOC] Attempting to connect to hbase");
+
+			HConnection connection = HConnectionManager.createConnection(conf);
+
+			LOG.trace("[OpenSOC] CONNECTED TO HBASE");
+
+			HBaseAdmin hba = new HBaseAdmin(conf);
+
+			if (!hba.tableExists(_whitelist_table_name))
+				throw new Exception("Whitelist table doesn't exist");
+
+			if (!hba.tableExists(_blacklist_table_name))
+				throw new Exception("Blacklist table doesn't exist");
+
+			whitelist_table = new HTable(conf, _whitelist_table_name);
+
+			LOG.trace("[OpenSOC] CONNECTED TO TABLE: " + _whitelist_table_name);
+			blacklist_table = new HTable(conf, _blacklist_table_name);
+			LOG.trace("[OpenSOC] CONNECTED TO TABLE: " + _blacklist_table_name);
+
+			if (connection == null || whitelist_table == null
+					|| blacklist_table == null)
+				throw new Exception("Unable to initialize hbase connection");
+
+			Scan scan = new Scan();
+
+			ResultScanner rs = whitelist_table.getScanner(scan);
+			try {
+				for (Result r = rs.next(); r != null; r = rs.next()) {
+					loaded_whitelist.add(Bytes.toString(r.getRow()));
+				}
+			} catch (Exception e) {
+				LOG.trace("[OpenSOC] COULD NOT READ FROM HBASE");
+				e.printStackTrace();
+			} finally {
+				rs.close(); // always close the ResultScanner!
+				hba.close();
+			}
+			whitelist_table.close();
+
+			LOG.trace("[OpenSOC] READ IN WHITELIST: " + loaded_whitelist.size());
+
+			scan = new Scan();
+
+			rs = blacklist_table.getScanner(scan);
+			try {
+				for (Result r = rs.next(); r != null; r = rs.next()) {
+					loaded_blacklist.add(Bytes.toString(r.getRow()));
+				}
+			} catch (Exception e) {
+				LOG.trace("[OpenSOC] COULD NOT READ FROM HBASE");
+				e.printStackTrace();
+			} finally {
+				rs.close(); // always close the ResultScanner!
+				hba.close();
+			}
+			blacklist_table.close();
+
+			LOG.trace("[OpenSOC] READ IN WHITELIST: " + loaded_whitelist.size());
+
+			rs.close(); // always close the ResultScanner!
+			hba.close();
+
+			return true;
+		} catch (Exception e) {
+
+			e.printStackTrace();
+		}
+
+		return false;
+
+	}
+
+	@Override
+	public boolean refresh() throws Exception {
+		return true;
+	}
+
+	@SuppressWarnings("unchecked")
+    @Override
+	public Map<String, JSONObject> alert(JSONObject raw_message) {
+
+		System.out.println("LOOKING FOR ENRICHMENT TAG: " + enrichment_tag);
+
+		Map<String, JSONObject> alerts = new HashMap<String, JSONObject>();
+		JSONObject content = (JSONObject) raw_message.get("message");
+
+		JSONObject enrichment = null;
+
+		if (raw_message.containsKey("enrichment"))
+			enrichment = (JSONObject) raw_message.get("enrichment");
+		else
+			return null;
+
+		if (enrichment.containsKey(enrichment_tag)) {
+
+			System.out.println("FOUND TAG: " + enrichment_tag);
+
+			JSONObject cif = (JSONObject) enrichment.get(enrichment_tag);
+
+			int cnt = 0;
+			Object enriched_key = null;
+			
+			for (Object key : cif.keySet()) {
+				JSONObject tmp = (JSONObject) cif.get(key);
+				cnt = cnt + tmp.size();
+				if (tmp.size() > 0)
+					enriched_key = key;
+			}
+
+			if (cnt == 0) {
+				System.out.println("TAG HAS NO ELEMENTS");
+				return null;
+			}
+
+			JSONObject alert = new JSONObject();
+
+			String source = "unknown";
+			String dest = "unknown";
+			String host = "unknown";
+
+			if (content.containsKey("ip_src_addr")) {
+				source = content.get("ip_src_addr").toString();
+
+				if (RangeChecker.checkRange(loaded_whitelist, source))
+					host = source;
+			}
+
+			if (content.containsKey("ip_dst_addr")) {
+				dest = content.get("ip_dst_addr").toString();
+
+				if (RangeChecker.checkRange(loaded_whitelist, dest))
+					host = dest;
+			}
+			
+			JSONObject cifQualifier = (JSONObject) cif.get(enriched_key);
+			
+			alert.put("designated_host", host);
+			String description = new StringBuilder()
+					.append(host)
+					.append(" communicated with a host (")
+					.append(content.get(enriched_key).toString())
+					.append(") identified as ")
+					.append(cifQualifier.keySet().iterator().next().toString())
+					.append(" by CIF")
+					.toString();	
+			alert.put("description", description);
+			alert.put("priority", "MED");
+
+			String alert_id = generateAlertId(source, dest, 0);
+
+			alert.put("alert_id", alert_id);
+			alerts.put(alert_id, alert);
+
+			alert.put("enrichment", enrichment);
+
+			return alerts;
+		} else {
+			System.out.println("DID NOT FIND TAG: " + enrichment_tag);
+			return null;
+		}
+
+	}
+
+	@Override
+	public boolean containsAlertId(String alert) {
+		// TODO Auto-generated method stub
+		return false;
+	}
+
+	protected String generateAlertId(String source_ip, String dst_ip,
+			int alert_type) {
+
+		String key = makeKey(source_ip, dst_ip, alert_type);
+
+		if (cache.getIfPresent(key) != null)
+			return cache.getIfPresent(key);
+
+		String new_UUID = System.currentTimeMillis() + "-" + UUID.randomUUID();
+
+		cache.put(key, new_UUID);
+		key = makeKey(dst_ip, source_ip, alert_type);
+		cache.put(key, new_UUID);
+
+		return new_UUID;
+
+	}
+
+	private String makeKey(String ip1, String ip2, int alert_type) {
+		return (ip1 + "-" + ip2 + "-" + alert_type);
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/HbaseWhiteAndBlacklistAdapter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/HbaseWhiteAndBlacklistAdapter.java b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/HbaseWhiteAndBlacklistAdapter.java
index 6bfed85..d8bbf16 100644
--- a/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/HbaseWhiteAndBlacklistAdapter.java
+++ b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/HbaseWhiteAndBlacklistAdapter.java
@@ -1,6 +1,5 @@
 package com.opensoc.alerts.adapters;
 
-import java.io.IOException;
 import java.io.Serializable;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -12,7 +11,6 @@ import java.util.concurrent.TimeUnit;
 import org.apache.commons.validator.routines.InetAddressValidator;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HConnection;
 import org.apache.hadoop.hbase.client.HConnectionManager;
@@ -43,41 +41,73 @@ public class HbaseWhiteAndBlacklistAdapter implements AlertsAdapter,
 	String _topologyname;
 	Configuration conf = null;
 
-	Cache<String, String>cache;
+	Cache<String, String> cache;
 	String _topology_name;
-	
+
 	Set<String> loaded_whitelist = new HashSet<String>();
 	Set<String> loaded_blacklist = new HashSet<String>();
 
 	protected static final Logger LOG = LoggerFactory
 			.getLogger(HbaseWhiteAndBlacklistAdapter.class);
 
-	public HbaseWhiteAndBlacklistAdapter(String whitelist_table_name,
-			String blacklist_table_name, String quorum, String port,
-			int _MAX_TIME_RETAIN, int _MAX_CACHE_SIZE) {
+	public HbaseWhiteAndBlacklistAdapter(Map<String, String> config) {
 
-		_whitelist_table_name = whitelist_table_name;
-		_blacklist_table_name = blacklist_table_name;
-		_quorum = quorum;
-		_port = port;
+		try {
+			if(!config.containsKey("whitelist_table_name"))
+				throw new Exception("Whitelist table name is missing");
+				
+			_whitelist_table_name = config.get("whitelist_table_name");
+			
+			if(!config.containsKey("blacklist_table_name"))
+				throw new Exception("Blacklist table name is missing");
+			
+			_blacklist_table_name = config.get("blacklist_table_name");
+			
+			if(!config.containsKey("quorum"))
+				throw new Exception("Quorum name is missing");
+			
+			_quorum = config.get("quorum");
+			
+			if(!config.containsKey("port"))
+				throw new Exception("port name is missing");
+			
+			_port = config.get("port");
 
-		cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE)
-				.expireAfterWrite(_MAX_TIME_RETAIN, TimeUnit.MINUTES).build();
+			if(!config.containsKey("_MAX_CACHE_SIZE_OBJECTS_NUM"))
+				throw new Exception("_MAX_CACHE_SIZE_OBJECTS_NUM name is missing");
+			
+			int _MAX_CACHE_SIZE_OBJECTS_NUM = Integer.parseInt(config
+					.get("_MAX_CACHE_SIZE_OBJECTS_NUM"));
+			
+			if(!config.containsKey("_MAX_TIME_RETAIN_MINUTES"))
+				throw new Exception("_MAX_TIME_RETAIN_MINUTES name is missing");
+			
+			int _MAX_TIME_RETAIN_MINUTES = Integer.parseInt(config
+					.get("_MAX_TIME_RETAIN_MINUTES"));
 
-	}
-	
+			cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE_OBJECTS_NUM)
+					.expireAfterWrite(_MAX_TIME_RETAIN_MINUTES, TimeUnit.MINUTES)
+					.build();
+		} catch (Exception e) {
+			System.out.println("Could not initialize Alerts Adapter");
+			e.printStackTrace();
+			System.exit(0);
+		}
 
+	}
 
 	public boolean initialize() {
 
 		conf = HBaseConfiguration.create();
-		conf.set("hbase.zookeeper.quorum", _quorum);
-		conf.set("hbase.zookeeper.property.clientPort", _port);
+		//conf.set("hbase.zookeeper.quorum", _quorum);
+		//conf.set("hbase.zookeeper.property.clientPort", _port);
 
-		LOG.trace("[OpenSOC] Connecting to hbase with conf:" + conf);		
+		LOG.trace("[OpenSOC] Connecting to hbase with conf:" + conf);
 		LOG.trace("[OpenSOC] Whitelist table name: " + _whitelist_table_name);
 		LOG.trace("[OpenSOC] Whitelist table name: " + _blacklist_table_name);
-		LOG.trace("[OpenSOC] ZK Client/port: " + conf.get("hbase.zookeeper.quorum") + " -> " + conf.get("hbase.zookeeper.property.clientPort"));
+		LOG.trace("[OpenSOC] ZK Client/port: "
+				+ conf.get("hbase.zookeeper.quorum") + " -> "
+				+ conf.get("hbase.zookeeper.property.clientPort"));
 
 		try {
 
@@ -97,18 +127,15 @@ public class HbaseWhiteAndBlacklistAdapter implements AlertsAdapter,
 
 			whitelist_table = new HTable(conf, _whitelist_table_name);
 
-			LOG.trace("[OpenSOC] CONNECTED TO TABLE: "
-					+ _whitelist_table_name);
+			LOG.trace("[OpenSOC] CONNECTED TO TABLE: " + _whitelist_table_name);
 			blacklist_table = new HTable(conf, _blacklist_table_name);
-			LOG.trace("[OpenSOC] CONNECTED TO TABLE: "
-					+ _blacklist_table_name);
+			LOG.trace("[OpenSOC] CONNECTED TO TABLE: " + _blacklist_table_name);
 
 			if (connection == null || whitelist_table == null
 					|| blacklist_table == null)
 				throw new Exception("Unable to initialize hbase connection");
-			
-			Scan scan = new Scan();
 
+			Scan scan = new Scan();
 
 			ResultScanner rs = whitelist_table.getScanner(scan);
 			try {
@@ -120,16 +147,15 @@ public class HbaseWhiteAndBlacklistAdapter implements AlertsAdapter,
 				e.printStackTrace();
 			} finally {
 				rs.close(); // always close the ResultScanner!
+				hba.close();
 			}
 			whitelist_table.close();
 
 			LOG.trace("[OpenSOC] READ IN WHITELIST: " + loaded_whitelist.size());
-			
-			
-			 scan = new Scan();
 
+			scan = new Scan();
 
-			 rs = blacklist_table.getScanner(scan);
+			rs = blacklist_table.getScanner(scan);
 			try {
 				for (Result r = rs.next(); r != null; r = rs.next()) {
 					loaded_blacklist.add(Bytes.toString(r.getRow()));
@@ -139,11 +165,15 @@ public class HbaseWhiteAndBlacklistAdapter implements AlertsAdapter,
 				e.printStackTrace();
 			} finally {
 				rs.close(); // always close the ResultScanner!
+				hba.close();
 			}
 			blacklist_table.close();
 
 			LOG.trace("[OpenSOC] READ IN WHITELIST: " + loaded_whitelist.size());
 
+			rs.close(); // always close the ResultScanner!
+			hba.close();
+
 			return true;
 		} catch (Exception e) {
 
@@ -172,7 +202,6 @@ public class HbaseWhiteAndBlacklistAdapter implements AlertsAdapter,
 
 	}
 
-
 	public boolean refresh() throws Exception {
 		// TODO Auto-generated method stub
 		return false;
@@ -188,8 +217,13 @@ public class HbaseWhiteAndBlacklistAdapter implements AlertsAdapter,
 		Map<String, JSONObject> alerts = new HashMap<String, JSONObject>();
 
 		JSONObject content = (JSONObject) raw_message.get("message");
-		
-		if (!content.containsKey("ip_src_addr") || !content.containsKey("ip_dst_addr") ) {
+		JSONObject enrichment = null;
+
+		if (raw_message.containsKey("enrichment"))
+			enrichment = (JSONObject) raw_message.get("enrichment");
+
+		if (!content.containsKey("ip_src_addr")
+				|| !content.containsKey("ip_dst_addr")) {
 
 			int alert_type = 0;
 
@@ -201,17 +235,18 @@ public class HbaseWhiteAndBlacklistAdapter implements AlertsAdapter,
 			alert.put("designated_host", "Uknown");
 			alert.put("source", "NA");
 			alert.put("dest", "NA");
-			alert.put(
-					"body",
-					"Source or destination IP is missing");
+			alert.put("body", "Source or destination IP is missing");
 
 			String alert_id = UUID.randomUUID().toString();
 
 			alert.put("reference_id", alert_id);
 			alerts.put(alert_id, alert);
 
+			if (enrichment != null)
+				alert.put("enrichment", enrichment);
+
 			LOG.trace("[OpenSOC] Returning alert: " + alerts);
-			
+
 			return alerts;
 
 		}
@@ -240,7 +275,9 @@ public class HbaseWhiteAndBlacklistAdapter implements AlertsAdapter,
 
 			alert.put("reference_id", alert_id);
 			alerts.put(alert_id, alert);
-			
+			if (enrichment != null)
+				alert.put("enrichment", enrichment);
+
 			LOG.trace("[OpenSOC] Returning alert: " + alerts);
 
 			return alerts;
@@ -268,7 +305,9 @@ public class HbaseWhiteAndBlacklistAdapter implements AlertsAdapter,
 
 			alert.put("reference_id", alert_id);
 			alerts.put(alert_id, alert);
-			
+			if (enrichment != null)
+				alert.put("enrichment", enrichment);
+
 			LOG.trace("[OpenSOC] Returning alert: " + alerts);
 
 			return alerts;
@@ -281,7 +320,6 @@ public class HbaseWhiteAndBlacklistAdapter implements AlertsAdapter,
 			designated_host = source_ip;
 		else if (loaded_whitelist.contains(dst_ip))
 			designated_host = dst_ip;
-		
 
 		if (designated_host == null) {
 			int alert_type = 3;
@@ -303,7 +341,9 @@ public class HbaseWhiteAndBlacklistAdapter implements AlertsAdapter,
 
 			alert.put("reference_id", alert_id);
 			alerts.put(alert_id, alert);
-			
+			if (enrichment != null)
+				alert.put("enrichment", enrichment);
+
 			LOG.trace("[OpenSOC] Returning alert: " + alerts);
 
 			return alerts;
@@ -331,6 +371,8 @@ public class HbaseWhiteAndBlacklistAdapter implements AlertsAdapter,
 
 			alert.put("reference_id", alert_id);
 			alerts.put(alert_id, alert);
+			if (enrichment != null)
+				alert.put("enrichment", enrichment);
 
 		}
 
@@ -355,6 +397,8 @@ public class HbaseWhiteAndBlacklistAdapter implements AlertsAdapter,
 
 			alert.put("reference_id", alert_id);
 			alerts.put(alert_id, alert);
+			if (enrichment != null)
+				alert.put("enrichment", enrichment);
 
 		}
 
@@ -378,6 +422,8 @@ public class HbaseWhiteAndBlacklistAdapter implements AlertsAdapter,
 
 			alert.put("reference_id", alert_id);
 			alerts.put(alert_id, alert);
+			if (enrichment != null)
+				alert.put("enrichment", enrichment);
 
 		}
 
@@ -401,6 +447,8 @@ public class HbaseWhiteAndBlacklistAdapter implements AlertsAdapter,
 
 			alert.put("reference_id", alert_id);
 			alerts.put(alert_id, alert);
+			if (enrichment != null)
+				alert.put("enrichment", enrichment);
 
 		}
 
@@ -410,8 +458,6 @@ public class HbaseWhiteAndBlacklistAdapter implements AlertsAdapter,
 			return alerts;
 	}
 
-
-
 	public boolean containsAlertId(String alert) {
 		// TODO Auto-generated method stub
 		return false;

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/KeywordsAlertAdapter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/KeywordsAlertAdapter.java b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/KeywordsAlertAdapter.java
new file mode 100644
index 0000000..e4df273
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/KeywordsAlertAdapter.java
@@ -0,0 +1,274 @@
+package com.opensoc.alerts.adapters;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.commons.validator.routines.InetAddressValidator;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.json.simple.JSONObject;
+import org.apache.log4j.Logger;
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
+
+import com.opensoc.alerts.interfaces.AlertsAdapter;
+
+public class KeywordsAlertAdapter extends AbstractAlertAdapter {
+
+	HTableInterface blacklist_table;
+	HTableInterface whitelist_table;
+	InetAddressValidator ipvalidator = new InetAddressValidator();
+	String _whitelist_table_name;
+	String _blacklist_table_name;
+	String _quorum;
+	String _port;
+	String _topologyname;
+	Configuration conf = null;
+
+	String _topology_name;
+
+	Set<String> loaded_whitelist = new HashSet<String>();
+	Set<String> loaded_blacklist = new HashSet<String>();
+
+	List<String> keywordList;
+	List<String> keywordExceptionList;
+	
+	protected static final Logger LOG = Logger.getLogger(AllAlertAdapter.class);
+	
+	public KeywordsAlertAdapter(Map<String, String> config) {
+		try {
+			
+			if(!config.containsKey("keywords"))
+				throw new Exception("Keywords are missing");
+			
+			keywordList = Arrays.asList(config.get("keywords").split("\\|"));
+			
+			if(	config.containsKey("exceptions")) {
+				keywordExceptionList = Arrays.asList(config.get("exceptions").split("\\|"));
+			} else {
+				keywordExceptionList = new ArrayList<String>();
+			}
+				
+			if(!config.containsKey("whitelist_table_name"))
+				throw new Exception("Whitelist table name is missing");
+				
+			_whitelist_table_name = config.get("whitelist_table_name");
+			
+			if(!config.containsKey("blacklist_table_name"))
+				throw new Exception("Blacklist table name is missing");
+			
+			_blacklist_table_name = config.get("blacklist_table_name");
+			
+			if(!config.containsKey("quorum"))
+				throw new Exception("Quorum name is missing");
+			
+			_quorum = config.get("quorum");
+			
+			if(!config.containsKey("port"))
+				throw new Exception("port name is missing");
+			
+			_port = config.get("port");
+
+			if(!config.containsKey("_MAX_CACHE_SIZE_OBJECTS_NUM"))
+				throw new Exception("_MAX_CACHE_SIZE_OBJECTS_NUM name is missing");
+			
+			int _MAX_CACHE_SIZE_OBJECTS_NUM = Integer.parseInt(config
+					.get("_MAX_CACHE_SIZE_OBJECTS_NUM"));
+			
+			if(!config.containsKey("_MAX_TIME_RETAIN_MINUTES"))
+				throw new Exception("_MAX_TIME_RETAIN_MINUTES name is missing");
+			
+			int _MAX_TIME_RETAIN_MINUTES = Integer.parseInt(config
+					.get("_MAX_TIME_RETAIN_MINUTES"));
+
+			generateCache(_MAX_CACHE_SIZE_OBJECTS_NUM, _MAX_TIME_RETAIN_MINUTES);
+			
+		} catch (Exception e) {
+			System.out.println("Could not initialize Alerts Adapter");
+			e.printStackTrace();
+			System.exit(0);
+		}
+	}
+	
+	@Override
+	public boolean initialize() {
+		conf = HBaseConfiguration.create();
+		//conf.set("hbase.zookeeper.quorum", _quorum);
+		//conf.set("hbase.zookeeper.property.clientPort", _port);
+
+		LOG.trace("[OpenSOC] Connecting to hbase with conf:" + conf);
+		LOG.trace("[OpenSOC] Whitelist table name: " + _whitelist_table_name);
+		LOG.trace("[OpenSOC] Whitelist table name: " + _blacklist_table_name);
+		LOG.trace("[OpenSOC] ZK Client/port: "
+				+ conf.get("hbase.zookeeper.quorum") + " -> "
+				+ conf.get("hbase.zookeeper.property.clientPort"));
+
+		try {
+
+			LOG.trace("[OpenSOC] Attempting to connect to hbase");
+
+			HConnection connection = HConnectionManager.createConnection(conf);
+
+			LOG.trace("[OpenSOC] CONNECTED TO HBASE");
+
+			HBaseAdmin hba = new HBaseAdmin(conf);
+
+			if (!hba.tableExists(_whitelist_table_name))
+				throw new Exception("Whitelist table doesn't exist");
+
+			if (!hba.tableExists(_blacklist_table_name))
+				throw new Exception("Blacklist table doesn't exist");
+
+			whitelist_table = new HTable(conf, _whitelist_table_name);
+
+			LOG.trace("[OpenSOC] CONNECTED TO TABLE: " + _whitelist_table_name);
+			blacklist_table = new HTable(conf, _blacklist_table_name);
+			LOG.trace("[OpenSOC] CONNECTED TO TABLE: " + _blacklist_table_name);
+
+			if (connection == null || whitelist_table == null
+					|| blacklist_table == null)
+				throw new Exception("Unable to initialize hbase connection");
+
+			Scan scan = new Scan();
+
+			ResultScanner rs = whitelist_table.getScanner(scan);
+			try {
+				for (Result r = rs.next(); r != null; r = rs.next()) {
+					loaded_whitelist.add(Bytes.toString(r.getRow()));
+				}
+			} catch (Exception e) {
+				LOG.trace("[OpenSOC] COULD NOT READ FROM HBASE");
+				e.printStackTrace();
+			} finally {
+				rs.close(); // always close the ResultScanner!
+				hba.close();
+			}
+			whitelist_table.close();
+
+			LOG.trace("[OpenSOC] READ IN WHITELIST: " + loaded_whitelist.size());
+			
+			System.out.println("LOADED WHITELIST IS: ");
+			
+			for(String str: loaded_whitelist)
+				System.out.println("WHITELIST: " + str);
+
+			scan = new Scan();
+
+			rs = blacklist_table.getScanner(scan);
+			try {
+				for (Result r = rs.next(); r != null; r = rs.next()) {
+					loaded_blacklist.add(Bytes.toString(r.getRow()));
+				}
+			} catch (Exception e) {
+				LOG.trace("[OpenSOC] COULD NOT READ FROM HBASE");
+				e.printStackTrace();
+			} finally {
+				rs.close(); // always close the ResultScanner!
+				hba.close();
+			}
+			blacklist_table.close();
+
+			LOG.trace("[OpenSOC] READ IN WHITELIST: " + loaded_whitelist.size());
+
+			rs.close(); // always close the ResultScanner!
+			hba.close();
+
+			return true;
+		} catch (Exception e) {
+
+			e.printStackTrace();
+		}
+
+		return false;
+	}
+
+	@Override
+	public boolean refresh() throws Exception {
+		// TODO Auto-generated method stub
+		return false;
+	}
+
+	@Override
+	public boolean containsAlertId(String alert) {
+		// TODO Auto-generated method stub
+		return false;
+	}
+
+	@Override
+	public Map<String, JSONObject> alert(JSONObject raw_message) {
+		
+		Map<String, JSONObject> alerts = new HashMap<String, JSONObject>();
+		JSONObject content = (JSONObject) raw_message.get("message");
+
+		JSONObject enrichment = null;
+		if (raw_message.containsKey("enrichment"))
+			enrichment = (JSONObject) raw_message.get("enrichment");
+
+		for (String keyword : keywordList) {
+			if (content.toString().contains(keyword)) {
+				
+				//check it doesn't have an "exception" keyword in it
+				for (String exception : keywordExceptionList) {
+					if (content.toString().contains(exception)) {
+						LOG.info("[OpenSOC] KeywordAlertsAdapter: Omitting alert due to exclusion: " + exception);
+						return null;
+					}
+				}
+				
+				LOG.info("[OpenSOC] KeywordAlertsAdapter: Found match for " + keyword);
+				JSONObject alert = new JSONObject();
+
+				String source = "unknown";
+				String dest = "unknown";
+				String host = "unknown";
+
+				if (content.containsKey("ip_src_addr"))
+				{
+					source = content.get("ip_src_addr").toString();
+					
+					if(RangeChecker.checkRange(loaded_whitelist, source))
+						host = source;				
+				}
+
+				if (content.containsKey("ip_dst_addr"))
+				{
+					dest = content.get("ip_dst_addr").toString();
+					
+					if(RangeChecker.checkRange(loaded_whitelist, dest))
+						host = dest;	
+				}
+
+				alert.put("designated_host", host);
+				alert.put("description", content.get("original_string").toString());
+				alert.put("priority", "MED");	
+
+				String alert_id = generateAlertId(source, dest, 0);
+
+				alert.put("alert_id", alert_id);
+				alerts.put(alert_id, alert);
+
+				alert.put("enrichment", enrichment);
+
+				return alerts;
+			}
+		}
+		
+		return null;
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/RangeChecker.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/RangeChecker.java b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/RangeChecker.java
new file mode 100644
index 0000000..1999a62
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/RangeChecker.java
@@ -0,0 +1,23 @@
+package com.opensoc.alerts.adapters;
+
+import java.util.Set;
+
+import org.apache.commons.net.util.SubnetUtils;
+
+public class RangeChecker {
+
+	static boolean checkRange(Set<String> CIDR_networks, String ip) {
+		for (String network : CIDR_networks) {
+				
+			System.out.println("Looking at range: " + network + " and ip " + ip);
+			SubnetUtils utils = new SubnetUtils(network);
+			if(utils.getInfo().isInRange(ip)) {
+				System.out.println(ip + " in range " + network);
+				return true;
+			}
+		}
+		
+		//no matches
+		return false;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/ThreatAlertsAdapter.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/ThreatAlertsAdapter.java b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/ThreatAlertsAdapter.java
new file mode 100644
index 0000000..c85087d
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Alerts/src/main/java/com/opensoc/alerts/adapters/ThreatAlertsAdapter.java
@@ -0,0 +1,311 @@
+package com.opensoc.alerts.adapters;
+
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.commons.validator.routines.InetAddressValidator;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
+import com.opensoc.alerts.interfaces.AlertsAdapter;
+
+@SuppressWarnings("serial")
+public class ThreatAlertsAdapter implements AlertsAdapter, Serializable {
+
+	String enrichment_tag;
+
+	HTableInterface blacklist_table;
+	HTableInterface whitelist_table;
+	InetAddressValidator ipvalidator = new InetAddressValidator();
+	String _whitelist_table_name;
+	String _blacklist_table_name;
+	String _quorum;
+	String _port;
+	String _topologyname;
+	Configuration conf = null;
+
+	Cache<String, String> cache;
+	String _topology_name;
+
+	Set<String> loaded_whitelist = new HashSet<String>();
+	Set<String> loaded_blacklist = new HashSet<String>();
+
+	protected static final Logger LOG = LoggerFactory
+			.getLogger(ThreatAlertsAdapter.class);
+
+	public ThreatAlertsAdapter(Map<String, String> config) {
+		try {
+
+			if (!config.containsKey("whitelist_table_name"))
+				throw new Exception("Whitelist table name is missing");
+
+			_whitelist_table_name = config.get("whitelist_table_name");
+
+			if (!config.containsKey("blacklist_table_name"))
+				throw new Exception("Blacklist table name is missing");
+
+			_blacklist_table_name = config.get("blacklist_table_name");
+
+			if (!config.containsKey("quorum"))
+				throw new Exception("Quorum name is missing");
+
+			_quorum = config.get("quorum");
+
+			if (!config.containsKey("port"))
+				throw new Exception("port name is missing");
+
+			_port = config.get("port");
+
+			if (!config.containsKey("_MAX_CACHE_SIZE_OBJECTS_NUM"))
+				throw new Exception("_MAX_CACHE_SIZE_OBJECTS_NUM name is missing");
+
+			int _MAX_CACHE_SIZE_OBJECTS_NUM = Integer.parseInt(config
+					.get("_MAX_CACHE_SIZE_OBJECTS_NUM"));
+
+			if (!config.containsKey("_MAX_TIME_RETAIN_MINUTES"))
+				throw new Exception("_MAX_TIME_RETAIN_MINUTES name is missing");
+
+			int _MAX_TIME_RETAIN_MINUTES = Integer.parseInt(config
+					.get("_MAX_TIME_RETAIN_MINUTES"));
+
+			cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE_OBJECTS_NUM)
+					.expireAfterWrite(_MAX_TIME_RETAIN_MINUTES, TimeUnit.MINUTES)
+					.build();
+
+			enrichment_tag = config.get("enrichment_tag");
+
+		} catch (Exception e) {
+			System.out.println("Could not initialize alerts adapter");
+			e.printStackTrace();
+			System.exit(0);
+		}
+	}
+
+	@SuppressWarnings("resource")
+    @Override
+	public boolean initialize() {
+
+		conf = HBaseConfiguration.create();
+		// conf.set("hbase.zookeeper.quorum", _quorum);
+		// conf.set("hbase.zookeeper.property.clientPort", _port);
+
+		LOG.trace("[OpenSOC] Connecting to hbase with conf:" + conf);
+		LOG.trace("[OpenSOC] Whitelist table name: " + _whitelist_table_name);
+		LOG.trace("[OpenSOC] Whitelist table name: " + _blacklist_table_name);
+		LOG.trace("[OpenSOC] ZK Client/port: "
+				+ conf.get("hbase.zookeeper.quorum") + " -> "
+				+ conf.get("hbase.zookeeper.property.clientPort"));
+
+		try {
+
+			LOG.trace("[OpenSOC] Attempting to connect to hbase");
+
+			HConnection connection = HConnectionManager.createConnection(conf);
+
+			LOG.trace("[OpenSOC] CONNECTED TO HBASE");
+
+			HBaseAdmin hba = new HBaseAdmin(conf);
+
+			if (!hba.tableExists(_whitelist_table_name))
+				throw new Exception("Whitelist table doesn't exist");
+
+			if (!hba.tableExists(_blacklist_table_name))
+				throw new Exception("Blacklist table doesn't exist");
+
+			whitelist_table = new HTable(conf, _whitelist_table_name);
+
+			LOG.trace("[OpenSOC] CONNECTED TO TABLE: " + _whitelist_table_name);
+			blacklist_table = new HTable(conf, _blacklist_table_name);
+			LOG.trace("[OpenSOC] CONNECTED TO TABLE: " + _blacklist_table_name);
+
+			if (connection == null || whitelist_table == null
+					|| blacklist_table == null)
+				throw new Exception("Unable to initialize hbase connection");
+
+			Scan scan = new Scan();
+
+			ResultScanner rs = whitelist_table.getScanner(scan);
+			try {
+				for (Result r = rs.next(); r != null; r = rs.next()) {
+					loaded_whitelist.add(Bytes.toString(r.getRow()));
+				}
+			} catch (Exception e) {
+				LOG.trace("[OpenSOC] COULD NOT READ FROM HBASE");
+				e.printStackTrace();
+			} finally {
+				rs.close(); // always close the ResultScanner!
+				hba.close();
+			}
+			whitelist_table.close();
+
+			LOG.trace("[OpenSOC] READ IN WHITELIST: " + loaded_whitelist.size());
+
+			scan = new Scan();
+
+			rs = blacklist_table.getScanner(scan);
+			try {
+				for (Result r = rs.next(); r != null; r = rs.next()) {
+					loaded_blacklist.add(Bytes.toString(r.getRow()));
+				}
+			} catch (Exception e) {
+				LOG.trace("[OpenSOC] COULD NOT READ FROM HBASE");
+				e.printStackTrace();
+			} finally {
+				rs.close(); // always close the ResultScanner!
+				hba.close();
+			}
+			blacklist_table.close();
+
+			LOG.trace("[OpenSOC] READ IN WHITELIST: " + loaded_whitelist.size());
+
+			rs.close(); // always close the ResultScanner!
+			hba.close();
+
+			return true;
+		} catch (Exception e) {
+
+			e.printStackTrace();
+		}
+
+		return false;
+
+	}
+
+	@Override
+	public boolean refresh() throws Exception {
+		return true;
+	}
+
+	@SuppressWarnings("unchecked")
+    @Override
+	public Map<String, JSONObject> alert(JSONObject raw_message) {
+
+		System.out.println("LOOKING FOR ENRICHMENT TAG: " + enrichment_tag);
+
+		Map<String, JSONObject> alerts = new HashMap<String, JSONObject>();
+		JSONObject content = (JSONObject) raw_message.get("message");
+
+		JSONObject enrichment = null;
+
+		if (raw_message.containsKey("enrichment"))
+			enrichment = (JSONObject) raw_message.get("enrichment");
+		else
+			return null;
+
+		if (enrichment.containsKey(enrichment_tag)) {
+
+			System.out.println("FOUND TAG: " + enrichment_tag);
+
+			JSONObject threat = (JSONObject) enrichment.get(enrichment_tag);
+
+			int cnt = 0;
+			Object enriched_key = null;
+			
+			for (Object key : threat.keySet()) {
+				JSONObject tmp = (JSONObject) threat.get(key);
+				cnt = cnt + tmp.size();
+				if (tmp.size() > 0)
+					enriched_key = key;
+			}
+
+			if (cnt == 0) {
+				System.out.println("TAG HAS NO ELEMENTS");
+				return null;
+			}
+
+			JSONObject alert = new JSONObject();
+
+			String source = "unknown";
+			String dest = "unknown";
+			String host = "unknown";
+
+			if (content.containsKey("ip_src_addr")) {
+				source = content.get("ip_src_addr").toString();
+
+				if (RangeChecker.checkRange(loaded_whitelist, source))
+					host = source;
+			}
+
+			if (content.containsKey("ip_dst_addr")) {
+				dest = content.get("ip_dst_addr").toString();
+
+				if (RangeChecker.checkRange(loaded_whitelist, dest))
+					host = dest;
+			}
+			
+			JSONObject threatQualifier = (JSONObject) threat.get(enriched_key);
+			
+			alert.put("designated_host", host);
+			String description =
+
+					new StringBuilder()
+					.append("Threat Intelligence match for ")
+					.append(content.get(enriched_key).toString())
+					.append(" from source: ")
+					.append(threatQualifier.keySet().iterator().next().toString())
+					.toString();	
+			alert.put("description", description);
+			alert.put("priority", "MED");
+
+			String alert_id = generateAlertId(source, dest, 0);
+
+			alert.put("alert_id", alert_id);
+			alerts.put(alert_id, alert);
+
+			alert.put("enrichment", enrichment);
+
+			return alerts;
+		} else {
+			System.out.println("DID NOT FIND TAG: " + enrichment_tag);
+			return null;
+		}
+
+	}
+
+	@Override
+	public boolean containsAlertId(String alert) {
+		// TODO Auto-generated method stub
+		return false;
+	}
+
+	protected String generateAlertId(String source_ip, String dst_ip,
+			int alert_type) {
+
+		String key = makeKey(source_ip, dst_ip, alert_type);
+
+		if (cache.getIfPresent(key) != null)
+			return cache.getIfPresent(key);
+
+		String new_UUID = System.currentTimeMillis() + "-" + UUID.randomUUID();
+
+		cache.put(key, new_UUID);
+		key = makeKey(dst_ip, source_ip, alert_type);
+		cache.put(key, new_UUID);
+
+		return new_UUID;
+
+	}
+
+	private String makeKey(String ip1, String ip2, int alert_type) {
+		return (ip1 + "-" + ip2 + "-" + alert_type);
+	}
+}