You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@metron.apache.org by rm...@apache.org on 2016/04/26 16:45:49 UTC

[01/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Repository: incubator-metron
Updated Branches:
  refs/heads/master 86f6deb3e -> 0117987ea


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/storm/kafka/EmitContext.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/storm/kafka/EmitContext.java b/metron-streaming/Metron-Common/src/main/java/storm/kafka/EmitContext.java
deleted file mode 100644
index 1f9ef59..0000000
--- a/metron-streaming/Metron-Common/src/main/java/storm/kafka/EmitContext.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package storm.kafka;
-
-import backtype.storm.task.TopologyContext;
-
-import java.io.Serializable;
-import java.util.EnumMap;
-import java.util.Map;
-
-public class EmitContext implements Cloneable,Serializable {
-  static final long serialVersionUID = 0xDEADBEEFL;
-
-  public enum Type{
-    MESSAGE_ID(PartitionManager.KafkaMessageId.class)
-    ,STREAM_ID(String.class)
-    ,TASK_ID(Integer.class)
-    ,UUID(String.class)
-    ,SPOUT_CONFIG(SpoutConfig.class)
-    ,OPEN_CONFIG(Map.class)
-    ,TOPOLOGY_CONTEXT(TopologyContext.class)
-    ;
-    Class<?> clazz;
-    Type(Class<?> clazz) {
-      this.clazz=  clazz;
-    }
-
-    public Class<?> clazz() {
-      return clazz;
-    }
-  }
-  public EmitContext() {
-    this(new EnumMap<>(Type.class));
-  }
-  public EmitContext(EnumMap<Type, Object> context) {
-    _context = context;
-  }
-  private EnumMap<Type, Object> _context;
-
-  public <T> EmitContext with(Type t, T o ) {
-    _context.put(t, t.clazz().cast(o));
-    return this;
-  }
-  public <T> void add(Type t, T o ) {
-    with(t, o);
-  }
-
-  public <T> T get(Type t) {
-    Object o = _context.get(t);
-    if(o == null) {
-      return null;
-    }
-    else {
-      return (T) o;
-    }
-  }
-
-  public EmitContext cloneContext() {
-    try {
-      return (EmitContext)this.clone();
-    } catch (CloneNotSupportedException e) {
-      throw new RuntimeException("Unable to clone emit context.", e);
-    }
-  }
-
-  /**
-   * Creates and returns a copy of this object.  The precise meaning
-   * of "copy" may depend on the class of the object. The general
-   * intent is that, for any object {@code x}, the expression:
-   * <blockquote>
-   * <pre>
-   * x.clone() != x</pre></blockquote>
-   * will be true, and that the expression:
-   * <blockquote>
-   * <pre>
-   * x.clone().getClass() == x.getClass()</pre></blockquote>
-   * will be {@code true}, but these are not absolute requirements.
-   * While it is typically the case that:
-   * <blockquote>
-   * <pre>
-   * x.clone().equals(x)</pre></blockquote>
-   * will be {@code true}, this is not an absolute requirement.
-   *
-   * By convention, the returned object should be obtained by calling
-   * {@code super.clone}.  If a class and all of its superclasses (except
-   * {@code Object}) obey this convention, it will be the case that
-   * {@code x.clone().getClass() == x.getClass()}.
-   *
-   * By convention, the object returned by this method should be independent
-   * of this object (which is being cloned).  To achieve this independence,
-   * it may be necessary to modify one or more fields of the object returned
-   * by {@code super.clone} before returning it.  Typically, this means
-   * copying any mutable objects that comprise the internal "deep structure"
-   * of the object being cloned and replacing the references to these
-   * objects with references to the copies.  If a class contains only
-   * primitive fields or references to immutable objects, then it is usually
-   * the case that no fields in the object returned by {@code super.clone}
-   * need to be modified.
-   *
-   * The method {@code clone} for class {@code Object} performs a
-   * specific cloning operation. First, if the class of this object does
-   * not implement the interface {@code Cloneable}, then a
-   * {@code CloneNotSupportedException} is thrown. Note that all arrays
-   * are considered to implement the interface {@code Cloneable} and that
-   * the return type of the {@code clone} method of an array type {@code T[]}
-   * is {@code T[]} where T is any reference or primitive type.
-   * Otherwise, this method creates a new instance of the class of this
-   * object and initializes all its fields with exactly the contents of
-   * the corresponding fields of this object, as if by assignment; the
-   * contents of the fields are not themselves cloned. Thus, this method
-   * performs a "shallow copy" of this object, not a "deep copy" operation.
-   *
-   * The class {@code Object} does not itself implement the interface
-   * {@code Cloneable}, so calling the {@code clone} method on an object
-   * whose class is {@code Object} will result in throwing an
-   * exception at run time.
-   *
-   * @return a clone of this instance.
-   * @throws CloneNotSupportedException if the object's class does not
-   *                                    support the {@code Cloneable} interface. Subclasses
-   *                                    that override the {@code clone} method can also
-   *                                    throw this exception to indicate that an instance cannot
-   *                                    be cloned.
-   * @see Cloneable
-   */
-  @Override
-  protected Object clone() throws CloneNotSupportedException {
-    EmitContext context = new EmitContext(_context.clone());
-    return context;
-  }
-}


[20/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/FireeyeExampleOutput
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/FireeyeExampleOutput b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/FireeyeExampleOutput
new file mode 100644
index 0000000..0210010
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/FireeyeExampleOutput
@@ -0,0 +1,90 @@
+<164>fenotify-3483808.2.alert: 1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js 
 HTTP
+<164>fenotify-793972.2.alert: ontrol: no-cache::~~::~~ dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Exploit.Kit.Magnitude 
+<164>fenotify-797180.2.alert: 0.8::~~User-Agent: Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36::~~Accept-Encoding: gzip, deflate, sdch::~~Accept-Language: en-US,en;q\=0.8::~~::~~ dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Redirector 
+<164>fenotify-3483808.3.alert: /1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microads.me::~~Connection: Keep-Alive::~~::~~GET /files/microads/update/InjectScript.js HTTP/1.1::~~User-Agent: WinHttpClient::~~Host: www.microad cs2Label=anomaly cs2=misc-anomaly cs1Label=sname cs1=Malware.Binary 
+<164>fenotify-791429.2.alert: t: rapidvideohere.pw::~~Connection: Keep-Alive::~~::~~ dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Exploit.Kit.Magnitude 
+<164>fenotify-851777.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 00:27:43 UTC dvc=10.201.78.190 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=61395 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851777 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851777 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851901.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 08:56:45 UTC dvc=10.201.78.6 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=59131 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851901 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851901 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851980.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 12:23:51 UTC dvc=10.201.78.20 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=53295 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851980 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851980 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851795.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 02:19:05 UTC dvc=10.201.78.37 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=54975 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851795 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851795 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851805.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 03:23:14 UTC dvc=10.201.78.113 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50807 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851805 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851805 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851844.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 06:19:41 UTC dvc=10.201.78.59 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50767 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851844 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851844 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851782.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 01:18:22 UTC dvc=10.201.78.59 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50940 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851782 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851782 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851940.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 10:57:19 UTC dvc=10.201.78.85 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50646 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851940 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851940 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851881.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 08:13:15 UTC dvc=10.201.78.84 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=61237 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851881 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851881 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851839.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 05:33:19 UTC dvc=10.201.78.10 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=49186 dvc=10.100.25.5 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=143 externalId=851839 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851839 dmac=00:1b:17:00:09:01 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851983.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 12:28:26 UTC dvc=10.201.78.57 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=54527 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851983 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851983 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851987.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 12:33:41 UTC dvc=10.201.78.113 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=51218 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851987 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851987 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852010.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 13:15:08 UTC dvc=10.201.78.12 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=55203 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852010 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852010 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852053.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 14:16:45 UTC dvc=10.201.78.84 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=62235 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852053 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852053 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852455.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 17:28:38 UTC dvc=10.201.78.34 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=65175 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852455 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852455 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851887.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 08:24:54 UTC dvc=10.201.78.44 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=56334 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851887 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851887 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851822.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 04:41:49 UTC dvc=10.201.78.54 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=49732 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851822 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851822 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851832.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 05:19:15 UTC dvc=10.201.78.160 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=62962 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851832 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851832 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851780.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 00:56:46 UTC dvc=10.201.78.12 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=54301 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851780 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851780 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851792.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 02:15:06 UTC dvc=10.201.78.194 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=64831 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851792 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851792 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851806.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 03:24:05 UTC dvc=10.201.78.57 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=53417 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851806 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851806 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851840.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 06:00:58 UTC dvc=10.201.78.40 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50709 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851840 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851840 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851929.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 10:37:14 UTC dvc=10.201.78.87 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=62909 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851929 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851929 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851918.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 10:17:41 UTC dvc=10.201.78.34 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=63483 dvc=10.100.25.5 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=143 externalId=851918 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851918 dmac=00:1b:17:00:09:01 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851842.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 06:03:05 UTC dvc=10.201.78.68 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=59908 dvc=10.100.25.16 smac=00:00:0c:07:ac:5a cn1Label=vlan cn1=0 externalId=851842 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851842 dmac=00:09:0f:33:4f:48 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851948.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 11:13:18 UTC dvc=10.201.78.86 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=51327 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851948 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851948 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852008.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 13:13:25 UTC dvc=10.201.78.20 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=63619 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852008 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852008 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852072.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 14:30:09 UTC dvc=10.201.78.37 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=53467 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=852072 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852072 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852077.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 14:31:58 UTC dvc=10.201.78.11 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=58546 dvc=10.100.25.5 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=143 externalId=852077 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852077 dmac=00:1b:17:00:09:01 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852110.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 14:56:32 UTC dvc=10.201.78.160 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=61983 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=852110 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852110 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852378.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 17:03:31 UTC dvc=10.201.78.85 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=49942 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852378 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852378 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851787.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 01:57:21 UTC dvc=10.201.78.44 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=55199 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851787 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851787 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851800.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 02:54:32 UTC dvc=10.201.78.34 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50605 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851800 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851800 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851941.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 10:58:30 UTC dvc=10.201.78.54 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=51721 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851941 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851941 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851850.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 06:29:59 UTC dvc=10.201.78.113 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50606 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851850 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851850 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851885.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 08:22:40 UTC dvc=10.201.78.37 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=53481 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851885 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851885 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851801.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 02:55:09 UTC dvc=10.201.78.6 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=59875 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851801 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851801 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851884.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 08:20:10 UTC dvc=10.201.78.194 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50039 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851884 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851884 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851815.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 04:06:05 UTC dvc=10.201.78.11 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=53889 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851815 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851815 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851825.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 04:49:07 UTC dvc=10.201.78.85 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=51906 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851825 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851825 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851966.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 11:50:43 UTC dvc=10.201.78.10 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50758 dvc=10.100.25.5 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=143 externalId=851966 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851966 dmac=00:1b:17:00:09:01 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852112.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 14:58:20 UTC dvc=10.201.78.6 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=60631 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852112 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852112 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852126.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 15:03:43 UTC dvc=10.201.78.60 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=65017 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=852126 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852126 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852407.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 17:15:10 UTC dvc=10.201.78.54 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=49620 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852407 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852407 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852417.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 17:17:11 UTC dvc=10.201.78.86 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=51333 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852417 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852417 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852431.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 17:20:08 UTC dvc=10.201.78.11 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=53525 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852431 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852431 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852438.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 17:21:21 UTC dvc=10.201.78.84 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=62464 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852438 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852438 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-3483822.1.alert: CEF:0|FireEye|CMS|7.2.1.244420|MO|malware-object|4|rt=Feb 09 2015 07:24:06 UTC dvc=10.201.78.216 cn3Label=cncPort cn3=80 dst=191.235.179.140 fileHash=6126d97e5bd4e6d93e3e3579cc5b3ce0 filePath=/analysis/191.235.179.140_80-10.220.55.216_56118--833719413_9204551_T.pcoff cs5Label=cncHost cs5=api.shamenchik.info cs3Label=osinfo cs3=Microsoft WindowsXP 32-bit 5.1 sp3 14.0528 proto=tcp dvchost=DEVFEYE1 dvc=10.100.25.16 cn1Label=vlan cn1=0 externalId=3483822 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ma_id\=3483822 cs6Label=channel cs6=POST /api/sdm HTTP/1.1::~~Content-Type: application/x-json::~~Accept: */*::~~User-Agent: Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; .NET CLR 2.0.50727; .NET CLR 3.0.04506.648; .NET CLR 3.5.21022; .NET4.0C; .NET4.0E)::~~Host: api.shamenchik.info::~~Content-Length: 800::~~Connection: Keep-Alive::~~Cache-Control: no-cache::~~::~~g+3CouWsTcAym6cirpXcrPeCqh2q2xYh//aNKX15/lgvTM
+<164>fenotify-851890.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 08:36:36 UTC dvc=10.201.78.160 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=63018 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851890 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851890 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851861.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 07:11:45 UTC dvc=10.201.78.20 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=62660 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851861 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851861 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851781.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 01:10:09 UTC dvc=10.201.78.20 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=63319 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851781 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851781 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851837.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 05:30:01 UTC dvc=10.201.78.60 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=49533 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851837 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851837 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851846.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 06:26:50 UTC dvc=10.201.78.57 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=53933 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851846 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851846 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851920.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 10:26:37 UTC dvc=10.201.78.51 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=60410 dvc=10.100.25.16 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=0 externalId=851920 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851920 dmac=5c:5e:ab:eb:ab:0d cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851818.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 04:25:02 UTC dvc=10.201.78.51 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=60319 dvc=10.100.25.16 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=0 externalId=851818 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851818 dmac=5c:5e:ab:eb:ab:0d cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851866.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 07:13:28 UTC dvc=10.201.78.12 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=54836 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851866 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851866 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851773.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 00:01:29 UTC dvc=10.201.78.68 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=60239 dvc=10.100.25.16 smac=00:00:0c:07:ac:5a cn1Label=vlan cn1=0 externalId=851773 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851773 dmac=00:09:0f:33:4f:48 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851935.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 10:48:18 UTC dvc=10.201.78.11 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=54362 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851935 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851935 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851970.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 12:04:50 UTC dvc=10.201.78.40 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50327 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851970 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851970 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851975.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 12:21:18 UTC dvc=10.201.78.59 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=51420 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851975 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851975 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852454.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 17:28:34 UTC dvc=10.201.78.44 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=55348 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852454 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852454 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-3483798.2.alert: act;Trojan.Kuloz;Trojan.Kuluoz 
+<164>fenotify-834781.2.alert: Connection: Keep-Alive::~~::~~ dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Malicious.URL 
+<164>fenotify-3483794.3.alert: 0d3cc7cc055f8d686a1b5d5c30db85c5423620e6bd231d592266782cf5e1647ae575e77b HTTP/1.1::~~Accept: */*::~~Proxy-Authorization: Basic ::~~User-Agent: Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36::~~Host: 5aqobwcp1xuqztwht.0eq0w6k.com::~~Connection: Keep-Alive::~~::~~ cs2Label=anomaly cs2=misc-anomaly cs1Label=sname cs1=FE_Evasion_Sandboxie;FE_Evasion_VMDetect 
+<164>fenotify-3483796.2.alert: jan.Kuloz;Trojan.Kuluoz 
+<164>fenotify-851894.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 08:45:48 UTC dvc=10.201.78.60 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=49433 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851894 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851894 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851899.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 08:54:50 UTC dvc=10.201.78.34 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50711 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851899 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851899 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851851.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 06:31:05 UTC dvc=10.201.78.190 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=61134 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851851 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851851 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851845.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 06:20:46 UTC dvc=10.201.78.20 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=55294 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851845 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851845 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851789.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 02:03:48 UTC dvc=10.201.78.84 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=62782 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851789 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851789 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851820.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 04:33:45 UTC dvc=10.201.78.87 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=63559 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851820 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851820 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851828.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 05:09:07 UTC dvc=10.201.78.86 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=52967 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=851828 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851828 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851816.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 04:16:05 UTC dvc=10.201.78.34 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=61806 dvc=10.100.25.5 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=143 externalId=851816 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851816 dmac=00:1b:17:00:09:01 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851831.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 05:14:31 UTC dvc=10.201.78.11 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=58655 dvc=10.100.25.5 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=143 externalId=851831 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851831 dmac=00:1b:17:00:09:01 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851950.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 11:16:07 UTC dvc=10.201.78.11 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=58855 dvc=10.100.25.5 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=143 externalId=851950 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851950 dmac=00:1b:17:00:09:01 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-851988.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 12:35:26 UTC dvc=10.201.78.190 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=61427 dvc=10.100.25.16 smac=00:00:0c:07:ac:00 cn1Label=vlan cn1=0 externalId=851988 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=851988 dmac=00:1d:a2:af:32:a1 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852013.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 13:18:29 UTC dvc=10.201.78.34 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=61630 dvc=10.100.25.5 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=143 externalId=852013 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852013 dmac=00:1b:17:00:09:01 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852070.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 14:27:45 UTC dvc=10.201.78.44 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=54769 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852070 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852070 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852082.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 14:35:15 UTC dvc=10.201.78.68 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=60922 dvc=10.100.25.16 smac=00:00:0c:07:ac:5a cn1Label=vlan cn1=0 externalId=852082 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852082 dmac=00:09:0f:33:4f:48 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852114.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 14:59:08 UTC dvc=10.201.78.194 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=50396 dvc=10.100.25.16 smac=00:00:0c:07:ac:c8 cn1Label=vlan cn1=0 externalId=852114 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852114 dmac=88:43:e1:95:13:29 cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-852295.alert: CEF:0|FireEye|CMS|7.2.1.244420|DM|domain-match|1|rt=Feb 09 2015 16:30:40 UTC dvc=10.201.78.51 cn3Label=cncPort cn3=53 cn2Label=sid cn2=80494706 shost=dev001srv02.example.com proto=udp cs5Label=cncHost cs5=mfdclk001.org dvchost=DEVFEYE1 spt=60266 dvc=10.100.25.16 smac=00:00:0c:07:ac:63 cn1Label=vlan cn1=0 externalId=852295 cs4Label=link cs4=https://DEVCMS01.example.com/event_stream/events_for_bot?ev_id\=852295 dmac=5c:5e:ab:eb:ab:0d cs1Label=sname cs1=Trojan.Generic.DNS 
+<164>fenotify-3483807.2.alert: z0Q6RNzwu2BoLSVUhiBihE4z0mlPDacuE1Waqs86Z9VVYg6iM2MlFH8GZgagnlOuzfB2JHdKPc/GwnzFk5DPfUPJAe8DH9Y6hwohv0t6XFVWx5UDSGARW8w3GAop9R+9iaSCuomuLU26/gaqL4gfjZqjLHzoDx+vhOCiOP6RnGMio5v2kcKxitPL7pPVu5FJ6MwUG7QOLecwONRzQsFh/jXFT4gyR2iS/EbufAeRofQVZHsj9dhgHZKNLcsFPnw/8lWlvgku7s28l57lAGxtp99jkzYD58jPgBm1nGbJPubFTL47ZmBkPPNsc1XjRcNvPz5/nzb0eWctXZ2cKocAQnT2zHOgBxRM6my9QW/Lg0JWaQyqBO2EOyTfej6KgVlHoIf0E3bv6C5PgVrJunAIqqlO6EvKvILlDYk2yoklvP3Fry5p4Nrw2isE95Used9Zqsoxx0bWInNcfyQhoqqlmYKiZZb+aBwGvJEL634pmoTMEBMdn4s3gz2a7aLV+vOVULQbgR15PygsYQdOnymv7uWZtdKOp7ut21GwNu9ZxJGMrssW0gzvaZiZDs7FSordVPUiUqcfS6ciU1cl29fNTWnmRkq4vk+vBgvUQLxTTAleV9k5svtB237GvvolWE72ugJQXUun51WxAqOAZpV0c6tEbK5qd6Z55z8Rs/LpN8VM4/nbZmfB5XY+eCCLfULjisVoStUUeH67&report\=p509XA27GEFLLes0RJ8pJJdIkbJ+3YkVUv2qjhuxlRPlVrrEZckeXFIaD+4/a1xulR8kKMx9GrPD2uc/wC+NxgKg/ok/kttHH45shX4YjPLsS4QtXUHugcE5Rr1238CYegHwOKWzAp3g5Mpt7loabRTBtmzXXeLBV4cFKv3zWpxQ7+CBGpsDfsvkD2Qgst3FX05VQHBpnJfXgRqdRrLyUjezF1tlIgvvNCv6hQ+zffxKk0WcD
 oUe8
+<164>fenotify-3483794.2.alert: 53 Safari/537.36::~~Host: 5aqobwcp1xuqztwht.0eq0w6k.com::~~Connection: Keep-Alive::~~::~~GET /93ea73bcdaf32d5074e62be84ee83a84cacefa8dcf855c265457842d6b05f469863ca7110d3cc7cc055f8d686a1b5d5c30db85c5423620e6bd231d592266782cf5e1647ae575e77b HTTP/1.1::~~Accept: */*::~~Proxy-Authorization: Basic ::~~User-Agent: Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36::~~Host: 5aqobwcp1xuqztwht.0eq0w6k.com::~~Connection: Keep-Alive::~~::~~GET /93ea73bcdaf32d5074e62be84ee83a84cacefa8dcf855c265457842d6b05f469863ca7110d3cc7cc055f8d686a1b5d5c30db85c5423620e6bd231d592266782cf5e1647ae575e77b HTTP/1.1::~~Accept: */*::~~Proxy-Authorization: Basic ::~~User-Agent: Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36::~~Host: 5aqobwcp1xuqztwht.0eq0w6k.com::~~Connection: Keep-Alive::~~::~~GET /93ea73bcdaf32d5074e62be84ee83a84cacefa8dcf855c265457842d6b05f469863c
 a711
+<164>fenotify-3483799.2.alert: L, like Gecko) Chrome/35.0.1916.153 Safari/537.36::~~Host: pkeyqcot5gzamu.5t9dyvo2.com::~~Connection: Keep-Alive::~~::~~ cs2Label=anomaly cs2=misc-anomaly cs1Label=sname cs1=FE_PUP_Softpulse;FE_Evasion_VMDetect;FE_Evasion_DBGDetect_Files;FE_Evasion_Sandboxie 
+<164>fenotify-3483807.3.alert: n6o4JWRQX2V1jsLkx8LFQz3nXe7Bbiuuc1sMcdS/lEv7f9zpw09qs0LvVpRJe4tZjE4Gsghh7Xh5OAxE2A7HBLnWjloIazv6jvun+R1BpF1vuujyEdDgKWIv4BeMmQQJ6p66O/U0jHvWelTBMT+RTVFERsryrpWE+g7AHeRyzDIERgWxHxzA9y6cQ9JYp2/JOPdUzWnLWM24Be6fWmlJ37J90GuEvHh+WXWsaewcBg8xUAhlQBfEHP01PGcuX2yJin2rQ8/GhkiF210HCJUCIbxxz6rZuf6CaksKSXPIeXf1Iifha58Rtm cs2Label=anomaly cs2=misc-anomaly cs1Label=sname cs1=Malware.Binary 
\ No newline at end of file



[40/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/solr/templates/solrconfig.xml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/solr/templates/solrconfig.xml b/metron-deployment/roles/solr/templates/solrconfig.xml
new file mode 100644
index 0000000..b00af0f
--- /dev/null
+++ b/metron-deployment/roles/solr/templates/solrconfig.xml
@@ -0,0 +1,583 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- 
+     For more details about configurations options that may appear in
+     this file, see http://wiki.apache.org/solr/SolrConfigXml. 
+-->
+<config>
+  <!-- In all configuration below, a prefix of "solr." for class names
+       is an alias that causes solr to search appropriate packages,
+       including org.apache.solr.(search|update|request|core|analysis)
+
+       You may also specify a fully qualified Java classname if you
+       have your own custom plugins.
+    -->
+
+  <!-- Controls what version of Lucene various components of Solr
+       adhere to.  Generally, you want to use the latest version to
+       get all bug fixes and improvements. It is highly recommended
+       that you fully re-index after changing this setting as it can
+       affect both how text is indexed and queried.
+  -->
+  <luceneMatchVersion>5.2.1</luceneMatchVersion>
+
+  <!-- Data Directory
+
+       Used to specify an alternate directory to hold all index data
+       other than the default ./data under the Solr home.  If
+       replication is in use, this should match the replication
+       configuration.
+    -->
+  <dataDir>${solr.data.dir:}</dataDir>
+
+
+  <!-- The DirectoryFactory to use for indexes.
+       
+       solr.StandardDirectoryFactory is filesystem
+       based and tries to pick the best implementation for the current
+       JVM and platform.  solr.NRTCachingDirectoryFactory, the default,
+       wraps solr.StandardDirectoryFactory and caches small files in memory
+       for better NRT performance.
+
+       One can force a particular implementation via solr.MMapDirectoryFactory,
+       solr.NIOFSDirectoryFactory, or solr.SimpleFSDirectoryFactory.
+
+       solr.RAMDirectoryFactory is memory based, not
+       persistent, and doesn't work with replication.
+    -->
+  <directoryFactory name="DirectoryFactory" 
+                    class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}">
+  </directoryFactory> 
+
+  <!-- The CodecFactory for defining the format of the inverted index.
+       The default implementation is SchemaCodecFactory, which is the official Lucene
+       index format, but hooks into the schema to provide per-field customization of
+       the postings lists and per-document values in the fieldType element
+       (postingsFormat/docValuesFormat). Note that most of the alternative implementations
+       are experimental, so if you choose to customize the index format, it's a good
+       idea to convert back to the official format e.g. via IndexWriter.addIndexes(IndexReader)
+       before upgrading to a newer version to avoid unnecessary reindexing.
+  -->
+  <codecFactory class="solr.SchemaCodecFactory"/>
+
+  <schemaFactory class="ClassicIndexSchemaFactory"/>
+
+  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+       Index Config - These settings control low-level behavior of indexing
+       Most example settings here show the default value, but are commented
+       out, to more easily see where customizations have been made.
+       
+       Note: This replaces <indexDefaults> and <mainIndex> from older versions
+       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
+  <indexConfig>
+
+    <!-- LockFactory 
+
+         This option specifies which Lucene LockFactory implementation
+         to use.
+      
+         single = SingleInstanceLockFactory - suggested for a
+                  read-only index or when there is no possibility of
+                  another process trying to modify the index.
+         native = NativeFSLockFactory - uses OS native file locking.
+                  Do not use when multiple solr webapps in the same
+                  JVM are attempting to share a single index.
+         simple = SimpleFSLockFactory  - uses a plain file for locking
+
+         Defaults: 'native' is default for Solr3.6 and later, otherwise
+                   'simple' is the default
+
+         More details on the nuances of each LockFactory...
+         http://wiki.apache.org/lucene-java/AvailableLockFactories
+    -->
+    <lockType>${solr.lock.type:native}</lockType>
+
+    <!-- Lucene Infostream
+       
+         To aid in advanced debugging, Lucene provides an "InfoStream"
+         of detailed information when indexing.
+
+         Setting the value to true will instruct the underlying Lucene
+         IndexWriter to write its info stream to solr's log. By default,
+         this is enabled here, and controlled through log4j.properties.
+      -->
+     <infoStream>true</infoStream>
+  </indexConfig>
+
+
+  <!-- JMX
+       
+       This example enables JMX if and only if an existing MBeanServer
+       is found, use this if you want to configure JMX through JVM
+       parameters. Remove this to disable exposing Solr configuration
+       and statistics to JMX.
+
+       For more details see http://wiki.apache.org/solr/SolrJmx
+    -->
+  <jmx />
+  <!-- If you want to connect to a particular server, specify the
+       agentId 
+    -->
+  <!-- <jmx agentId="myAgent" /> -->
+  <!-- If you want to start a new MBeanServer, specify the serviceUrl -->
+  <!-- <jmx serviceUrl="service:jmx:rmi:///jndi/rmi://localhost:9999/solr"/>
+    -->
+
+  <!-- The default high-performance update handler -->
+  <updateHandler class="solr.DirectUpdateHandler2">
+
+    <!-- Enables a transaction log, used for real-time get, durability, and
+         and solr cloud replica recovery.  The log can grow as big as
+         uncommitted changes to the index, so use of a hard autoCommit
+         is recommended (see below).
+         "dir" - the target directory for transaction logs, defaults to the
+                solr data directory.
+         "numVersionBuckets" - sets the number of buckets used to keep
+                track of max version values when checking for re-ordered
+                updates; increase this value to reduce the cost of
+                synchronizing access to version buckets during high-volume
+                indexing, this requires 8 bytes (long) * numVersionBuckets
+                of heap space per Solr core.
+    -->
+    <updateLog>
+      <str name="dir">${solr.ulog.dir:}</str>
+      <int name="numVersionBuckets">${solr.ulog.numVersionBuckets:65536}</int>
+    </updateLog>
+ 
+    <!-- AutoCommit
+
+         Perform a hard commit automatically under certain conditions.
+         Instead of enabling autoCommit, consider using "commitWithin"
+         when adding documents. 
+
+         http://wiki.apache.org/solr/UpdateXmlMessages
+
+         maxDocs - Maximum number of documents to add since the last
+                   commit before automatically triggering a new commit.
+
+         maxTime - Maximum amount of time in ms that is allowed to pass
+                   since a document was added before automatically
+                   triggering a new commit. 
+         openSearcher - if false, the commit causes recent index changes
+           to be flushed to stable storage, but does not cause a new
+           searcher to be opened to make those changes visible.
+
+         If the updateLog is enabled, then it's highly recommended to
+         have some sort of hard autoCommit to limit the log size.
+      -->
+     <autoCommit> 
+       <maxTime>${solr.autoCommit.maxTime:15000}</maxTime> 
+       <openSearcher>false</openSearcher> 
+     </autoCommit>
+
+    <!-- softAutoCommit is like autoCommit except it causes a
+         'soft' commit which only ensures that changes are visible
+         but does not ensure that data is synced to disk.  This is
+         faster and more near-realtime friendly than a hard commit.
+      -->
+     <autoSoftCommit>
+       <maxTime>${solr.autoSoftCommit.maxTime:{{ solr_autoSoftCommit_maxTime }}}</maxTime>
+     </autoSoftCommit>
+
+  </updateHandler>
+  
+  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+       Query section - these settings control query time things like caches
+       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
+  <query>
+    <!-- Max Boolean Clauses
+
+         Maximum number of clauses in each BooleanQuery,  an exception
+         is thrown if exceeded.
+
+         ** WARNING **
+         
+         This option actually modifies a global Lucene property that
+         will affect all SolrCores.  If multiple solrconfig.xml files
+         disagree on this property, the value at any given moment will
+         be based on the last SolrCore to be initialized.
+         
+      -->
+    <maxBooleanClauses>1024</maxBooleanClauses>
+
+
+    <!-- Solr Internal Query Caches
+
+         There are two implementations of cache available for Solr,
+         LRUCache, based on a synchronized LinkedHashMap, and
+         FastLRUCache, based on a ConcurrentHashMap.  
+
+         FastLRUCache has faster gets and slower puts in single
+         threaded operation and thus is generally faster than LRUCache
+         when the hit ratio of the cache is high (> 75%), and may be
+         faster under other scenarios on multi-cpu systems.
+    -->
+
+    <!-- Filter Cache
+
+         Cache used by SolrIndexSearcher for filters (DocSets),
+         unordered sets of *all* documents that match a query.  When a
+         new searcher is opened, its caches may be prepopulated or
+         "autowarmed" using data from caches in the old searcher.
+         autowarmCount is the number of items to prepopulate.  For
+         LRUCache, the autowarmed items will be the most recently
+         accessed items.
+
+         Parameters:
+           class - the SolrCache implementation LRUCache or
+               (LRUCache or FastLRUCache)
+           size - the maximum number of entries in the cache
+           initialSize - the initial capacity (number of entries) of
+               the cache.  (see java.util.HashMap)
+           autowarmCount - the number of entries to prepopulate from
+               and old cache.  
+      -->
+    <filterCache class="solr.FastLRUCache"
+                 size="512"
+                 initialSize="512"
+                 autowarmCount="0"/>
+
+    <!-- Query Result Cache
+
+        Caches results of searches - ordered lists of document ids
+        (DocList) based on a query, a sort, and the range of documents requested.
+        Additional supported parameter by LRUCache:
+           maxRamMB - the maximum amount of RAM (in MB) that this cache is allowed
+                      to occupy
+     -->
+    <queryResultCache class="solr.LRUCache"
+                     size="512"
+                     initialSize="512"
+                     autowarmCount="0"/>
+   
+    <!-- Document Cache
+
+         Caches Lucene Document objects (the stored fields for each
+         document).  Since Lucene internal document ids are transient,
+         this cache will not be autowarmed.  
+      -->
+    <documentCache class="solr.LRUCache"
+                   size="512"
+                   initialSize="512"
+                   autowarmCount="0"/>
+    
+    <!-- custom cache currently used by block join --> 
+    <cache name="perSegFilter"
+      class="solr.search.LRUCache"
+      size="10"
+      initialSize="0"
+      autowarmCount="10"
+      regenerator="solr.NoOpRegenerator" />
+
+    <!-- Lazy Field Loading
+
+         If true, stored fields that are not requested will be loaded
+         lazily.  This can result in a significant speed improvement
+         if the usual case is to not load all stored fields,
+         especially if the skipped fields are large compressed text
+         fields.
+    -->
+    <enableLazyFieldLoading>true</enableLazyFieldLoading>
+
+   <!-- Result Window Size
+
+        An optimization for use with the queryResultCache.  When a search
+        is requested, a superset of the requested number of document ids
+        are collected.  For example, if a search for a particular query
+        requests matching documents 10 through 19, and queryWindowSize is 50,
+        then documents 0 through 49 will be collected and cached.  Any further
+        requests in that range can be satisfied via the cache.  
+     -->
+   <queryResultWindowSize>20</queryResultWindowSize>
+
+   <!-- Maximum number of documents to cache for any entry in the
+        queryResultCache. 
+     -->
+   <queryResultMaxDocsCached>200</queryResultMaxDocsCached>
+
+    <!-- Use Cold Searcher
+
+         If a search request comes in and there is no current
+         registered searcher, then immediately register the still
+         warming searcher and use it.  If "false" then all requests
+         will block until the first searcher is done warming.
+      -->
+    <useColdSearcher>false</useColdSearcher>
+
+    <!-- Max Warming Searchers
+         
+         Maximum number of searchers that may be warming in the
+         background concurrently.  An error is returned if this limit
+         is exceeded.
+
+         Recommend values of 1-2 for read-only slaves, higher for
+         masters w/o cache warming.
+      -->
+    <maxWarmingSearchers>2</maxWarmingSearchers>
+
+  </query>
+
+
+  <!-- Request Dispatcher
+
+       This section contains instructions for how the SolrDispatchFilter
+       should behave when processing requests for this SolrCore.
+
+       handleSelect is a legacy option that affects the behavior of requests
+       such as /select?qt=XXX
+
+       handleSelect="true" will cause the SolrDispatchFilter to process
+       the request and dispatch the query to a handler specified by the 
+       "qt" param, assuming "/select" isn't already registered.
+
+       handleSelect="false" will cause the SolrDispatchFilter to
+       ignore "/select" requests, resulting in a 404 unless a handler
+       is explicitly registered with the name "/select"
+
+       handleSelect="true" is not recommended for new users, but is the default
+       for backwards compatibility
+    -->
+  <requestDispatcher handleSelect="false" >
+    <!-- Request Parsing
+
+         These settings indicate how Solr Requests may be parsed, and
+         what restrictions may be placed on the ContentStreams from
+         those requests
+
+         enableRemoteStreaming - enables use of the stream.file
+         and stream.url parameters for specifying remote streams.
+
+         multipartUploadLimitInKB - specifies the max size (in KiB) of
+         Multipart File Uploads that Solr will allow in a Request.
+         
+         formdataUploadLimitInKB - specifies the max size (in KiB) of
+         form data (application/x-www-form-urlencoded) sent via
+         POST. You can use POST to pass request parameters not
+         fitting into the URL.
+         
+         addHttpRequestToContext - if set to true, it will instruct
+         the requestParsers to include the original HttpServletRequest
+         object in the context map of the SolrQueryRequest under the 
+         key "httpRequest". It will not be used by any of the existing
+         Solr components, but may be useful when developing custom 
+         plugins.
+         
+         *** WARNING ***
+         The settings below authorize Solr to fetch remote files, You
+         should make sure your system has some authentication before
+         using enableRemoteStreaming="true"
+
+      --> 
+    <requestParsers enableRemoteStreaming="true" 
+                    multipartUploadLimitInKB="2048000"
+                    formdataUploadLimitInKB="2048"
+                    addHttpRequestToContext="false"/>
+
+    <!-- HTTP Caching
+
+         Set HTTP caching related parameters (for proxy caches and clients).
+
+         The options below instruct Solr not to output any HTTP Caching
+         related headers
+      -->
+    <httpCaching never304="true" />
+
+  </requestDispatcher>
+
+  <!-- Request Handlers 
+
+       http://wiki.apache.org/solr/SolrRequestHandler
+
+       Incoming queries will be dispatched to a specific handler by name
+       based on the path specified in the request.
+
+       Legacy behavior: If the request path uses "/select" but no Request
+       Handler has that name, and if handleSelect="true" has been specified in
+       the requestDispatcher, then the Request Handler is dispatched based on
+       the qt parameter.  Handlers without a leading '/' are accessed this way
+       like so: http://host/app/[core/]select?qt=name  If no qt is
+       given, then the requestHandler that declares default="true" will be
+       used or the one named "standard".
+
+       If a Request Handler is declared with startup="lazy", then it will
+       not be initialized until the first request that uses it.
+
+    -->
+  <!-- SearchHandler
+
+       http://wiki.apache.org/solr/SearchHandler
+
+       For processing Search Queries, the primary Request Handler
+       provided with Solr is "SearchHandler" It delegates to a sequent
+       of SearchComponents (see below) and supports distributed
+       queries across multiple shards
+    -->
+  <requestHandler name="/select" class="solr.SearchHandler">
+    <!-- default values for query parameters can be specified, these
+         will be overridden by parameters in the request
+      -->
+     <lst name="defaults">
+       <str name="echoParams">explicit</str>
+       <int name="rows">10</int>
+     </lst>
+
+    </requestHandler>
+
+  <!-- A request handler that returns indented JSON by default -->
+  <requestHandler name="/query" class="solr.SearchHandler">
+     <lst name="defaults">
+       <str name="echoParams">explicit</str>
+       <str name="wt">json</str>
+       <str name="indent">true</str>
+       <str name="df">text</str>
+     </lst>
+  </requestHandler>
+
+  <!--
+    The export request handler is used to export full sorted result sets.
+    Do not change these defaults.
+  -->
+  <requestHandler name="/export" class="solr.SearchHandler">
+    <lst name="invariants">
+      <str name="rq">{!xport}</str>
+      <str name="wt">xsort</str>
+      <str name="distrib">false</str>
+    </lst>
+
+    <arr name="components">
+      <str>query</str>
+    </arr>
+  </requestHandler>
+
+
+  <initParams path="/update/**,/query,/select,/tvrh,/elevate,/spell">
+    <lst name="defaults">
+      <str name="df">text</str>
+    </lst>
+  </initParams>
+
+  <!-- Field Analysis Request Handler
+
+       RequestHandler that provides much the same functionality as
+       analysis.jsp. Provides the ability to specify multiple field
+       types and field names in the same request and outputs
+       index-time and query-time analysis for each of them.
+
+       Request parameters are:
+       analysis.fieldname - field name whose analyzers are to be used
+
+       analysis.fieldtype - field type whose analyzers are to be used
+       analysis.fieldvalue - text for index-time analysis
+       q (or analysis.q) - text for query time analysis
+       analysis.showmatch (true|false) - When set to true and when
+           query analysis is performed, the produced tokens of the
+           field value analysis will be marked as "matched" for every
+           token that is produces by the query analysis
+   -->
+  <requestHandler name="/analysis/field" 
+                  startup="lazy"
+                  class="solr.FieldAnalysisRequestHandler" />
+
+
+  <!-- Document Analysis Handler
+
+       http://wiki.apache.org/solr/AnalysisRequestHandler
+
+       An analysis handler that provides a breakdown of the analysis
+       process of provided documents. This handler expects a (single)
+       content stream with the following format:
+
+       <docs>
+         <doc>
+           <field name="id">1</field>
+           <field name="name">The Name</field>
+           <field name="text">The Text Value</field>
+         </doc>
+         <doc>...</doc>
+         <doc>...</doc>
+         ...
+       </docs>
+
+    Note: Each document must contain a field which serves as the
+    unique key. This key is used in the returned response to associate
+    an analysis breakdown to the analyzed document.
+
+    Like the FieldAnalysisRequestHandler, this handler also supports
+    query analysis by sending either an "analysis.query" or "q"
+    request parameter that holds the query text to be analyzed. It
+    also supports the "analysis.showmatch" parameter which when set to
+    true, all field tokens that match the query tokens will be marked
+    as a "match". 
+  -->
+  <requestHandler name="/analysis/document" 
+                  class="solr.DocumentAnalysisRequestHandler" 
+                  startup="lazy" />
+
+  <!-- Echo the request contents back to the client -->
+  <requestHandler name="/debug/dump" class="solr.DumpRequestHandler" >
+    <lst name="defaults">
+     <str name="echoParams">explicit</str> 
+     <str name="echoHandler">true</str>
+    </lst>
+  </requestHandler>
+  
+
+
+  <!-- Search Components
+
+       Search components are registered to SolrCore and used by 
+       instances of SearchHandler (which can access them by name)
+       
+       By default, the following components are available:
+       
+       <searchComponent name="query"     class="solr.QueryComponent" />
+       <searchComponent name="facet"     class="solr.FacetComponent" />
+       <searchComponent name="mlt"       class="solr.MoreLikeThisComponent" />
+       <searchComponent name="highlight" class="solr.HighlightComponent" />
+       <searchComponent name="stats"     class="solr.StatsComponent" />
+       <searchComponent name="debug"     class="solr.DebugComponent" />
+       
+     -->
+
+  <!-- Terms Component
+
+       http://wiki.apache.org/solr/TermsComponent
+
+       A component to return terms and document frequency of those
+       terms
+    -->
+  <searchComponent name="terms" class="solr.TermsComponent"/>
+
+  <!-- A request handler for demonstrating the terms component -->
+  <requestHandler name="/terms" class="solr.SearchHandler" startup="lazy">
+     <lst name="defaults">
+      <bool name="terms">true</bool>
+      <bool name="distrib">false</bool>
+    </lst>     
+    <arr name="components">
+      <str>terms</str>
+    </arr>
+  </requestHandler>
+
+  <!-- Legacy config for the admin interface -->
+  <admin>
+    <defaultQuery>*:*</defaultQuery>
+  </admin>
+
+</config>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/tap_interface/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/tap_interface/defaults/main.yml b/metron-deployment/roles/tap_interface/defaults/main.yml
new file mode 100644
index 0000000..ca752b4
--- /dev/null
+++ b/metron-deployment/roles/tap_interface/defaults/main.yml
@@ -0,0 +1,19 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+tap_if: tap0
+tap_ip: 10.0.0.1

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/tap_interface/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/tap_interface/tasks/main.yml b/metron-deployment/roles/tap_interface/tasks/main.yml
new file mode 100644
index 0000000..1de3abe
--- /dev/null
+++ b/metron-deployment/roles/tap_interface/tasks/main.yml
@@ -0,0 +1,35 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Install tunctl
+  yum:
+    name: tunctl
+    state: installed
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10
+
+- name: Create {{ tap_if }}
+  command: tunctl -p
+
+- name: Bring up {{ tap_if }} on {{ tap_ip }}
+  command: ifconfig {{ tap_if }} {{ tap_ip }} up
+
+- name:  Put {{ tap_if }} in PROMISC
+  command: ip link set {{ tap_if }} promisc on
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/yaf/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/yaf/defaults/main.yml b/metron-deployment/roles/yaf/defaults/main.yml
new file mode 100644
index 0000000..d0b53c3
--- /dev/null
+++ b/metron-deployment/roles/yaf/defaults/main.yml
@@ -0,0 +1,30 @@
+#
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+fixbuf_version: 1.7.1
+yaf_version: 2.8.0
+yaf_home: /opt/yaf
+yaf_topic: yaf
+hdp_repo_def: http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.2.0/hdp.repo
+yaf_bin: /usr/local/bin/yaf
+yafscii_bin: /usr/local/bin/yafscii
+yaf_log: /var/log/yaf.log
+kafka_prod: /usr/hdp/current/kafka-broker/bin/kafka-console-producer.sh
+daemon_bin: /usr/local/bin/airdaemon
+yaf_start: /opt/yaf/start-yaf.sh
+yaf_args:

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/yaf/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/yaf/meta/main.yml b/metron-deployment/roles/yaf/meta/main.yml
new file mode 100644
index 0000000..ff366b8
--- /dev/null
+++ b/metron-deployment/roles/yaf/meta/main.yml
@@ -0,0 +1,23 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - ambari_gather_facts
+  - build-tools
+  - java_jdk
+  - libselinux-python
+  - kafka-client

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/yaf/tasks/fixbuf.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/yaf/tasks/fixbuf.yml b/metron-deployment/roles/yaf/tasks/fixbuf.yml
new file mode 100644
index 0000000..9cd9244
--- /dev/null
+++ b/metron-deployment/roles/yaf/tasks/fixbuf.yml
@@ -0,0 +1,37 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Download fixbuf
+  get_url:
+    url: "http://tools.netsa.cert.org/releases/libfixbuf-{{fixbuf_version}}.tar.gz"
+    dest: "/tmp/libfixbuf-{{fixbuf_version}}.tar.gz"
+
+- name: Extract fixbuf tarball
+  unarchive:
+    src: "/tmp/libfixbuf-{{fixbuf_version}}.tar.gz"
+    dest: /tmp
+    copy: no
+    creates: "/tmp/libfixbuf-{{fixbuf_version}}"
+
+- name: Compile and Install fixbuf
+  shell: "{{item}}"
+  args:
+    chdir: "/tmp/libfixbuf-{{fixbuf_version}}"
+  with_items:
+    - ./configure
+    - make
+    - make install

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/yaf/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/yaf/tasks/main.yml b/metron-deployment/roles/yaf/tasks/main.yml
new file mode 100644
index 0000000..15f67f6
--- /dev/null
+++ b/metron-deployment/roles/yaf/tasks/main.yml
@@ -0,0 +1,19 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- include: fixbuf.yml
+- include: yaf.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/yaf/tasks/yaf.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/yaf/tasks/yaf.yml b/metron-deployment/roles/yaf/tasks/yaf.yml
new file mode 100644
index 0000000..10d3205
--- /dev/null
+++ b/metron-deployment/roles/yaf/tasks/yaf.yml
@@ -0,0 +1,60 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Download yaf
+  get_url:
+    url: "http://tools.netsa.cert.org/releases/yaf-{{yaf_version}}.tar.gz"
+    dest: "/tmp/yaf-{{yaf_version}}.tar.gz"
+
+- name: Extract yaf tarball
+  unarchive:
+    src: "/tmp/yaf-{{yaf_version}}.tar.gz"
+    dest: /tmp
+    copy: no
+    creates: /usr/local/bin/yaf
+
+- name: Compile and install yaf
+  shell: "{{item}}"
+  args:
+    chdir: "/tmp/yaf-{{yaf_version}}"
+    creates: /usr/local/bin/yaf
+  with_items:
+    - ./configure --enable-applabel --enable-plugins
+    - make
+    - make install
+
+- name: Create yaf home directory
+  file:
+    path: "{{ yaf_home }}"
+    state: directory
+    mode: 0755
+
+- name: Install yaf start script
+  template: src=start-yaf.sh dest={{ yaf_home }}/start-yaf.sh mode=0755
+
+- name: Install init.d service script
+  template: src=yaf dest=/etc/init.d/yaf mode=0755
+
+- name: Register the service with systemd
+  shell: systemctl enable pcap-replay
+  when: ansible_distribution == "CentOS" and ansible_distribution_major_version == "7"
+
+- name: Turn on promiscuous mode for {{ sniff_interface }}
+  shell: "ip link set {{ sniff_interface }} promisc on"
+
+- name: Start yaf
+  service: name=yaf state=restarted args="{{ yaf_args }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/yaf/templates/start-yaf.sh
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/yaf/templates/start-yaf.sh b/metron-deployment/roles/yaf/templates/start-yaf.sh
new file mode 100644
index 0000000..9660e72
--- /dev/null
+++ b/metron-deployment/roles/yaf/templates/start-yaf.sh
@@ -0,0 +1,25 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# a very simply metron probe that captures the output of yaf - yet another
+# flowmeter - and sends the output to kafka so that it can be consumed
+# by metron
+#
+{{ yaf_bin }} --in {{ sniff_interface }} --live pcap "${@:1}" | {{ yafscii_bin }} --tabular | {{ kafka_prod }} --broker-list {{ kafka_broker_url }} --topic {{ yaf_topic }}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/yaf/templates/yaf
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/yaf/templates/yaf b/metron-deployment/roles/yaf/templates/yaf
new file mode 100644
index 0000000..18bc4ac
--- /dev/null
+++ b/metron-deployment/roles/yaf/templates/yaf
@@ -0,0 +1,83 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# yaf daemon
+# chkconfig: 345 20 80
+# description: Runs yaf - yet another flowmeter
+# processname: yaf
+#
+NAME=yaf
+DESC="Executes yaf - yet another flowmeter"
+PIDFILE=/var/run/$NAME.pid
+SCRIPTNAME=/etc/init.d/$NAME
+DAEMON_PATH="{{ yaf_home }}"
+DAEMON="{{ yaf_start }}"
+DAEMONOPTS="${@:2}"
+
+case "$1" in
+  start)
+    printf "%-50s" "Starting $NAME..."
+
+    # kick-off the daemon
+    cd $DAEMON_PATH
+    PID=`$DAEMON $DAEMONOPTS > /dev/null 2>&1 & echo $!`
+    if [ -z $PID ]; then
+        printf "%s\n" "Fail"
+    else
+        echo $PID > $PIDFILE
+        printf "%s\n" "Ok"
+    fi
+  ;;
+
+  status)
+    printf "%-50s" "Checking $NAME..."
+    if [ -f $PIDFILE ]; then
+      PID=`cat $PIDFILE`
+      if [ -z "`ps axf | grep ${PID} | grep -v grep`" ]; then
+        printf "%s\n" "Process dead but pidfile exists"
+      else
+        echo "Running"
+      fi
+    else
+      printf "%s\n" "Service not running"
+    fi
+  ;;
+
+  stop)
+    printf "%-50s" "Stopping $NAME"
+    PID=`cat $PIDFILE`
+    cd $DAEMON_PATH
+    if [ -f $PIDFILE ]; then
+        kill -HUP $PID
+        killall $NAME
+        printf "%s\n" "Ok"
+        rm -f $PIDFILE
+    else
+        printf "%s\n" "pidfile not found"
+    fi
+  ;;
+
+  restart)
+    $0 stop
+    $0 start
+  ;;
+
+  *)
+    echo "Usage: $0 {status|start|stop|restart}"
+    exit 1
+esac

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/yum-update/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/yum-update/tasks/main.yml b/metron-deployment/roles/yum-update/tasks/main.yml
new file mode 100644
index 0000000..4db6297
--- /dev/null
+++ b/metron-deployment/roles/yum-update/tasks/main.yml
@@ -0,0 +1,26 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Yum Update Packages
+  yum:
+    name: "*"
+    state: latest
+    update_cache: yes
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/vagrant/multinode-vagrant/.gitignore
----------------------------------------------------------------------
diff --git a/metron-deployment/vagrant/multinode-vagrant/.gitignore b/metron-deployment/vagrant/multinode-vagrant/.gitignore
new file mode 100644
index 0000000..8000dd9
--- /dev/null
+++ b/metron-deployment/vagrant/multinode-vagrant/.gitignore
@@ -0,0 +1 @@
+.vagrant

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/vagrant/multinode-vagrant/Vagrantfile
----------------------------------------------------------------------
diff --git a/metron-deployment/vagrant/multinode-vagrant/Vagrantfile b/metron-deployment/vagrant/multinode-vagrant/Vagrantfile
new file mode 100644
index 0000000..61d656f
--- /dev/null
+++ b/metron-deployment/vagrant/multinode-vagrant/Vagrantfile
@@ -0,0 +1,65 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+hosts = [
+  { hostname: "node1", ip: "192.168.66.101", memory: "2048", cpus: 2 },
+  { hostname: "node2", ip: "192.168.66.102", memory: "2048", cpus: 2 },
+  { hostname: "node3", ip: "192.168.66.103", memory: "2048", cpus: 2 },
+  { hostname: "node4", ip: "192.168.66.104", memory: "2048", cpus: 2 }
+]
+
+Vagrant.configure(2) do |config|
+
+  # all hosts built on centos 6
+  config.vm.box = "bento/centos-6.7"
+  config.ssh.insert_key = false
+
+  # enable the hostmanager plugin
+  config.hostmanager.enabled = true
+  config.hostmanager.manage_host = true
+
+  # define each host
+  hosts.each_with_index do |host, index|
+    config.vm.define host[:hostname] do |node|
+
+      # host settings
+      node.vm.hostname = host[:hostname]
+      node.vm.network "private_network", ip: host[:ip]
+
+      # vm settings
+      node.vm.provider "virtualbox" do |vb|
+        vb.memory = host[:memory]
+        vb.cpus = host[:cpus]
+      end
+
+      # enable promisc mode on the network interface
+      if host.has_key?(:promisc)
+        vb.customize ["modifyvm", :id, "--nicpromisc#{host[:promisc]}", "allow-all"]
+      end
+
+      # provisioning; only after all hosts created
+      if index == hosts.size - 1
+        node.vm.provision :ansible do |ansible|
+          ansible.playbook = "../../playbooks/metron_full_install.yml"
+          ansible.sudo = true
+          ansible.inventory_path = "../../inventory/multinode-vagrant"
+          ansible.limit = "all"
+        end
+      end
+    end
+  end
+end

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/vagrant/multinode-vagrant/ansible.cfg
----------------------------------------------------------------------
diff --git a/metron-deployment/vagrant/multinode-vagrant/ansible.cfg b/metron-deployment/vagrant/multinode-vagrant/ansible.cfg
new file mode 100644
index 0000000..7a41ec8
--- /dev/null
+++ b/metron-deployment/vagrant/multinode-vagrant/ansible.cfg
@@ -0,0 +1,22 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+[defaults]
+host_key_checking = false
+library = ../../extra_modules
+roles_path = ../../roles
+pipelining = True
+log_path = ./ansible.log

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/vagrant/packet-capture/Vagrantfile
----------------------------------------------------------------------
diff --git a/metron-deployment/vagrant/packet-capture/Vagrantfile b/metron-deployment/vagrant/packet-capture/Vagrantfile
new file mode 100644
index 0000000..1303712
--- /dev/null
+++ b/metron-deployment/vagrant/packet-capture/Vagrantfile
@@ -0,0 +1,69 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+Vagrant.configure("2") do |config|
+
+  # enable hostmanager
+  config.hostmanager.enabled = true
+  config.hostmanager.manage_host = true
+
+  #
+  # source
+  #
+  config.vm.define "source" do |node|
+
+    # host settings
+    node.vm.hostname = "source"
+    node.vm.box = "bento/centos-7.1"
+    node.ssh.insert_key = "true"
+    node.vm.network :private_network, ip: "192.168.33.10", netmask: "255.255.255.0"
+
+    # provider
+    node.vm.provider "virtualbox" do |vb|
+      vb.memory = 1024
+      vb.cpus = 1
+    end
+  end
+
+  #
+  # sink
+  #
+  config.vm.define "sink" do |node|
+
+    # host settings
+    node.vm.hostname = "sink"
+    node.vm.box = "bento/centos-7.1"
+    node.ssh.insert_key = "true"
+    node.vm.network "public_network"
+    node.vm.network :private_network, ip: "192.168.33.11", netmask: "255.255.255.0"
+
+    # provider
+    node.vm.provider "virtualbox" do |vb|
+      vb.memory = 4096
+      vb.cpus = 3
+
+      # network adapter settings; [Am79C970A|Am79C973|82540EM|82543GC|82545EM|virtio]
+      vb.customize ["modifyvm", :id, "--nicpromisc2", "allow-all"]
+      vb.customize ["modifyvm", :id, "--nictype2","82545EM"]
+    end
+  end
+
+  # provision hosts
+  config.vm.provision :ansible do |ansible|
+    ansible.playbook = "playbook.yml"
+  end
+end

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/vagrant/packet-capture/ansible.cfg
----------------------------------------------------------------------
diff --git a/metron-deployment/vagrant/packet-capture/ansible.cfg b/metron-deployment/vagrant/packet-capture/ansible.cfg
new file mode 100644
index 0000000..9c650c2
--- /dev/null
+++ b/metron-deployment/vagrant/packet-capture/ansible.cfg
@@ -0,0 +1,22 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+[defaults]
+host_key_checking = false
+library = ../../extra_modules
+roles_path = ../../roles
+pipelining = True

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/vagrant/packet-capture/playbook.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/vagrant/packet-capture/playbook.yml b/metron-deployment/vagrant/packet-capture/playbook.yml
new file mode 100644
index 0000000..7a5128c
--- /dev/null
+++ b/metron-deployment/vagrant/packet-capture/playbook.yml
@@ -0,0 +1,43 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+#
+# produces network traffic
+#
+- hosts: source
+  become: yes
+  vars:
+    pcap_replay_interface: "enp0s8"
+  roles:
+    - role: pcap_replay
+
+#
+# consumes network traffic
+#
+- hosts: sink
+  become: yes
+  vars:
+      dpdk_device: ["00:08.0"]
+      dpdk_target: "x86_64-native-linuxapp-gcc"
+      num_huge_pages: 512
+      pcapture_portmask: 0xf
+      pcapture_topic: pcap
+      kafka_broker_url: localhost:9092
+  roles:
+    - role: librdkafka
+    - role: kafka-broker
+    - role: packet-capture

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/vagrant/singlenode-vagrant/.gitignore
----------------------------------------------------------------------
diff --git a/metron-deployment/vagrant/singlenode-vagrant/.gitignore b/metron-deployment/vagrant/singlenode-vagrant/.gitignore
new file mode 100644
index 0000000..8000dd9
--- /dev/null
+++ b/metron-deployment/vagrant/singlenode-vagrant/.gitignore
@@ -0,0 +1 @@
+.vagrant

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/vagrant/singlenode-vagrant/Vagrantfile
----------------------------------------------------------------------
diff --git a/metron-deployment/vagrant/singlenode-vagrant/Vagrantfile b/metron-deployment/vagrant/singlenode-vagrant/Vagrantfile
new file mode 100644
index 0000000..98413d6
--- /dev/null
+++ b/metron-deployment/vagrant/singlenode-vagrant/Vagrantfile
@@ -0,0 +1,63 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+hosts = [{
+    hostname: "node1",
+    ip: "192.168.66.121",
+    memory: "8192",
+    cpus: 4,
+    promisc: 2  # enables promisc on the 'Nth' network interface
+}]
+
+Vagrant.configure(2) do |config|
+
+  # all hosts built on centos 6
+  config.vm.box = "bento/centos-6.7"
+  config.ssh.insert_key = "true"
+
+  # enable the hostmanager plugin
+  config.hostmanager.enabled = true
+  config.hostmanager.manage_host = true
+
+  # host definition
+  hosts.each_with_index do |host, index|
+    config.vm.define host[:hostname] do |node|
+
+      # host settings
+      node.vm.hostname = host[:hostname]
+      node.vm.network "private_network", ip: host[:ip]
+
+      # vm settings
+      node.vm.provider "virtualbox" do |vb|
+        vb.memory = host[:memory]
+        vb.cpus = host[:cpus]
+
+        # enable promisc mode on the network interface
+        if host.has_key?(:promisc)
+          vb.customize ["modifyvm", :id, "--nicpromisc#{host[:promisc]}", "allow-all"]
+        end
+      end
+    end
+  end
+
+  # provisioning
+  config.vm.provision :ansible do |ansible|
+    ansible.playbook = "../../playbooks/metron_full_install.yml"
+    ansible.sudo = true
+    ansible.inventory_path = "../../inventory/singlenode-vagrant"
+  end
+end

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/vagrant/singlenode-vagrant/ansible.cfg
----------------------------------------------------------------------
diff --git a/metron-deployment/vagrant/singlenode-vagrant/ansible.cfg b/metron-deployment/vagrant/singlenode-vagrant/ansible.cfg
new file mode 100644
index 0000000..7a41ec8
--- /dev/null
+++ b/metron-deployment/vagrant/singlenode-vagrant/ansible.cfg
@@ -0,0 +1,22 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+[defaults]
+host_key_checking = false
+library = ../../extra_modules
+roles_path = ../../roles
+pipelining = True
+log_path = ./ansible.log

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/README.md
----------------------------------------------------------------------
diff --git a/metron-platform/README.md b/metron-platform/README.md
new file mode 100644
index 0000000..c37d90c
--- /dev/null
+++ b/metron-platform/README.md
@@ -0,0 +1,30 @@
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+	http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+ -->
+
+# Current Build
+
+The latest build of metron-platform is 0.1BETA.
+
+We are still in the process of merging/porting additional features from our production code base into this open source release. This release will be followed by a number of additional beta releases until the port is complete. We will also work on getting additional documentation and user/developer guides to the community as soon as we can. At this time we offer no support for the beta software, but will try to respond to requests as promptly as we can.
+
+# metron-platform
+
+Extensible set of Storm topologies and topology attributes for streaming, enriching, indexing, and storing telemetry in Hadoop.  General information on Metron is available at https://metron.incubator.apache.org/
+
+# Documentation
+
+Please see documentation within each individual module for description and usage instructions. Sample topologies are provided under Metron_Topologies to get you started with the framework. We pre-assume knowledge of Hadoop, Storm, Kafka, and HBase.

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/README.txt
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/README.txt b/metron-platform/metron-api/README.txt
new file mode 100644
index 0000000..9d545ab
--- /dev/null
+++ b/metron-platform/metron-api/README.txt
@@ -0,0 +1,16 @@
+'hbase' module of 'metron' project contains the code to communicate with HBase. This module has several APIs ( refer IPcapGetter.java, IPcapScanner.java files ) 
+to fetch pcaps from HBase. Following APIs have been created under this module implementation.
+
+APIs ( in IPcapGetter.java) to get pcaps using keys :
+ 1. public PcapsResponse getPcaps(List<String> keys, String lastRowKey, long startTime, long endTime, boolean includeReverseTraffic, boolean includeDuplicateLastRow, long maxResultSize) throws IOException;
+ 2. public PcapsResponse getPcaps(String key, long startTime, long endTime, boolean includeReverseTraffic) throws IOException;
+ 3. public PcapsResponse getPcaps(List<String> keys) throws IOException;
+ 4. public PcapsResponse getPcaps(String key) throws IOException;
+
+APIs ( in IPcapScanner.java) to get pcaps using key range :
+ 1. public byte[] getPcaps(String startKey, String endKey, long maxResponseSize, long startTime, long endTime) throws IOException;
+ 2. public byte[] getPcaps(String startKey, String endKey) throws IOException;
+ 
+ 
+Refer the wiki documentation for further details : https://hwcsco.atlassian.net/wiki/pages/viewpage.action?pageId=5242892
+ 	

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/pom.xml b/metron-platform/metron-api/pom.xml
new file mode 100644
index 0000000..0decfa2
--- /dev/null
+++ b/metron-platform/metron-api/pom.xml
@@ -0,0 +1,282 @@
+<!--.
+  Licensed to the Apache Software.
+  Foundation (ASF) under one or more contributor license agreements. See the.
+  NOTICE file distributed with this work for additional information regarding.
+  copyright ownership. The ASF licenses this file to You under the Apache License,.
+  Version 2.0 (the "License"); you may not use this file except in compliance.
+  with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
+  Unless required by applicable law or agreed to in writing, software distributed.
+  under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES.
+  OR CONDITIONS OF ANY KIND, either express or implied. See the License for.
+  the specific language governing permissions and limitations under the License..
+  -->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<groupId>org.apache.metron</groupId>
+		<artifactId>metron-platform</artifactId>
+		<version>0.1BETA</version>
+	</parent>
+	<artifactId>metron-api</artifactId>
+	<description>Metron API</description>
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+		<flume.version>${global_flume_version}</flume.version>
+		<hadoop.version>${global_hadoop_version}</hadoop.version>
+		<maven.compiler.target>1.7</maven.compiler.target>
+		<maven.compiler.source>1.7</maven.compiler.source>
+		<slf4j.version>${global_slf4j_version}</slf4j.version>
+		<zookeeper.version>3.4.5.2.0.6.0-76</zookeeper.version>
+		<logger.version>1.2.15</logger.version>
+
+		<storm-kafka.version>0.9.2-incubating</storm-kafka.version>
+		<storm-hdfs.version>0.0.7-SNAPSHOT</storm-hdfs.version>
+		<storm-hbase.version>0.0.5-SNAPSHOT</storm-hbase.version>
+
+		<spring.integration.version>3.0.0.RELEASE</spring.integration.version>
+		<spring.version>3.2.6.RELEASE</spring.version>
+		<commons-fileupload.version>1.2.2</commons-fileupload.version>
+		<commons-io.version>2.4</commons-io.version>
+		<commons-configuration.version>1.10</commons-configuration.version>
+		<commons-lang.version>2.6</commons-lang.version>
+		<commons-collections.version>3.2.1</commons-collections.version>
+		<commons-beanutils.version>1.8.3</commons-beanutils.version>
+		<commons-jexl.version>2.1.1</commons-jexl.version>
+
+
+		<junit.version>${global_junit_version}</junit.version>
+		<hamcrest.version>1.3</hamcrest.version>
+		<mockito.version>1.9.5</mockito.version>
+		<elastic-search.version>1.3.0</elastic-search.version>
+	</properties>
+	<dependencies>
+		<dependency>
+			<groupId>org.jboss.resteasy</groupId>
+			<artifactId>jaxrs-api</artifactId>
+			<version>3.0.4.Final</version>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.metron</groupId>
+			<artifactId>metron-common</artifactId>
+			<version>${project.parent.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.metron</groupId>
+			<artifactId>metron-pcap</artifactId>
+			<version>${project.parent.version}</version>
+		</dependency>
+
+		<dependency>
+			<groupId>commons-beanutils</groupId>
+			<artifactId>commons-beanutils</artifactId>
+			<version>${commons-beanutils.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.commons</groupId>
+			<artifactId>commons-jexl</artifactId>
+			<version>${commons-jexl.version}</version>
+		</dependency>
+
+		<dependency>
+			<artifactId>commons-configuration</artifactId>
+			<groupId>commons-configuration</groupId>
+			<version>${commons-configuration.version}</version>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-api</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>${junit.version}</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.powermock</groupId>
+			<artifactId>powermock-api-mockito</artifactId>
+			<version>1.5</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.powermock</groupId>
+			<artifactId>powermock-core</artifactId>
+			<version>1.5</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.powermock</groupId>
+			<artifactId>powermock-module-junit4</artifactId>
+			<version>1.5</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>joda-time</groupId>
+			<artifactId>joda-time</artifactId>
+			<version>2.3</version>
+		</dependency>
+		<dependency>
+			<groupId>com.google.guava</groupId>
+			<artifactId>guava</artifactId>
+			<version>${global_hbase_guava_version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hbase</groupId>
+			<artifactId>hbase-client</artifactId>
+			<version>${global_hbase_version}</version>
+			<!--scope>provided</scope-->
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-log4j12</artifactId>
+				</exclusion>
+				<!--exclusion>
+					<groupId>com.google.guava</groupId>
+					<artifactId>guava</artifactId>
+				</exclusion-->
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hbase</groupId>
+			<artifactId>hbase-testing-util</artifactId>
+			<version>${global_hbase_version}</version>
+			<scope>provided</scope>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-log4j12</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-common</artifactId>
+			<version>${global_hadoop_version}</version>
+			<!--scope>provided</scope-->
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-hdfs</artifactId>
+			<version>${global_hadoop_version}</version>
+			<!--scope>provided</scope-->
+		</dependency>
+		<dependency>
+			<groupId>org.apache.hadoop</groupId>
+			<artifactId>hadoop-client</artifactId>
+			<version>${global_hadoop_version}</version>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-log4j12</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.springframework.integration</groupId>
+			<artifactId>spring-integration-http</artifactId>
+			<version>${spring.integration.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.springframework</groupId>
+			<artifactId>spring-webmvc</artifactId>
+			<version>${spring.version}</version>
+		</dependency>
+		<dependency>
+			<groupId>log4j</groupId>
+			<artifactId>log4j</artifactId>
+			<version>${logger.version}</version>
+			<exclusions>
+				<exclusion>
+					<groupId>com.sun.jmx</groupId>
+					<artifactId>jmxri</artifactId>
+				</exclusion>
+				<exclusion>
+					<groupId>com.sun.jdmk</groupId>
+					<artifactId>jmxtools</artifactId>
+				</exclusion>
+				<exclusion>
+					<groupId>javax.jms</groupId>
+					<artifactId>jms</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.jboss.resteasy</groupId>
+			<artifactId>resteasy-jaxrs</artifactId>
+			<version>3.0.1.Final</version>
+			<exclusions>
+				<exclusion>
+					<groupId>org.slf4j</groupId>
+					<artifactId>slf4j-simple</artifactId>
+				</exclusion>
+			</exclusions>
+		</dependency>
+		<dependency>
+			<groupId>org.jboss.resteasy</groupId>
+			<artifactId>resteasy-jaxb-provider</artifactId>
+			<version>3.0.1.Final</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.jboss.resteasy</groupId>
+			<artifactId>async-http-servlet-3.0</artifactId>
+			<version>3.0.1.Final</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.eclipse.jetty</groupId>
+			<artifactId>jetty-server</artifactId>
+			<version>9.3.0.M0</version>
+		</dependency>
+		<dependency>
+			<groupId>org.eclipse.jetty</groupId>
+			<artifactId>jetty-servlet</artifactId>
+			<version>9.3.0.M0</version>
+		</dependency>
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-simple</artifactId>
+			<version>${global_slf4j_version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-api</artifactId>
+			<version>${global_slf4j_version}</version>
+		</dependency>
+		<dependency>
+			<groupId>org.slf4j</groupId>
+			<artifactId>slf4j-log4j12</artifactId>
+			<version>${global_slf4j_version}</version>
+		</dependency>
+	</dependencies>
+
+	<build>
+		<plugins>
+			<plugin>
+				<artifactId>maven-assembly-plugin</artifactId>
+				<configuration>
+					<archive>
+						<manifest>
+							<mainClass>org.apache.metron.pcapservice.rest.PcapService</mainClass>
+						</manifest>
+					</archive>
+					<descriptorRefs>
+						<descriptorRef>jar-with-dependencies</descriptorRef>
+					</descriptorRefs>
+				</configuration>
+				<executions>
+					<execution>
+						<id>make-assembly</id> <!-- this is used for inheritance merges -->
+						<phase>package</phase> <!-- bind to the packaging phase -->
+						<goals>
+							<goal>single</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
+		</plugins>
+	</build>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OnlyDeleteExpiredFilesCompactionPolicy.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OnlyDeleteExpiredFilesCompactionPolicy.java b/metron-platform/metron-api/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OnlyDeleteExpiredFilesCompactionPolicy.java
new file mode 100644
index 0000000..f0d9f5e
--- /dev/null
+++ b/metron-platform/metron-api/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OnlyDeleteExpiredFilesCompactionPolicy.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver.compactions;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.regionserver.compactions.RatioBasedCompactionPolicy;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
+import org.apache.hadoop.hbase.regionserver.StoreFile;
+
+public class OnlyDeleteExpiredFilesCompactionPolicy extends RatioBasedCompactionPolicy {
+  private static final Log LOG = LogFactory.getLog(OnlyDeleteExpiredFilesCompactionPolicy.class);
+
+  /**
+   * Constructor.
+   * 
+   * @param conf
+   *          The Conf.
+   * @param storeConfigInfo
+   *          Info about the store.
+   */
+  public OnlyDeleteExpiredFilesCompactionPolicy(final Configuration conf, final StoreConfigInformation storeConfigInfo) {
+    super(conf, storeConfigInfo);
+  }
+
+  @Override
+  final ArrayList<StoreFile> applyCompactionPolicy(final ArrayList<StoreFile> candidates, final boolean mayUseOffPeak,
+      final boolean mayBeStuck) throws IOException {
+    LOG.info("Sending empty list for compaction to avoid compaction and do only deletes of files older than TTL");
+
+    return new ArrayList<StoreFile>();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/java/org/apache/metron/api/ConfigurationManager.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/java/org/apache/metron/api/ConfigurationManager.java b/metron-platform/metron-api/src/main/java/org/apache/metron/api/ConfigurationManager.java
new file mode 100644
index 0000000..5b3e4ae
--- /dev/null
+++ b/metron-platform/metron-api/src/main/java/org/apache/metron/api/ConfigurationManager.java
@@ -0,0 +1,136 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.api;
+
+
+
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.configuration.CombinedConfiguration;
+import org.apache.commons.configuration.Configuration;
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.commons.configuration.DefaultConfigurationBuilder;
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+
+/**
+ * Configuration manager class which loads all 'config-definition.xml' files and
+ * creates a Configuration object which holds all properties from the underlying
+ * configuration resource
+ */
+public class ConfigurationManager {
+
+  /** configuration definition file name. */
+  private static String DEFAULT_CONFIG_DEFINITION_FILE_NAME = "config-definition.xml";
+
+  /** Stores a map with the configuration for each path specified. */
+  private static Map<String, Configuration> configurationsCache = new HashMap<String, Configuration>();
+
+  /** The Constant LOGGER. */
+  private static final Logger LOGGER = Logger
+      .getLogger(ConfigurationManager.class);
+
+  /**
+   * Common method to load content of all configuration resources defined in
+   * 'config-definition.xml'.
+   * 
+   * @param configDefFilePath
+   *          the config def file path
+   * @return Configuration
+   */
+  public static Configuration getConfiguration(String configDefFilePath) {
+    if (configurationsCache.containsKey(configDefFilePath)) {
+      return configurationsCache.get(configDefFilePath);
+    }
+    CombinedConfiguration configuration = null;
+    synchronized (configurationsCache) {
+      if (configurationsCache.containsKey(configDefFilePath)) {
+        return configurationsCache.get(configDefFilePath);
+      }
+      DefaultConfigurationBuilder builder = new DefaultConfigurationBuilder();
+      String fielPath = getConfigDefFilePath(configDefFilePath);
+      LOGGER.info("loading from 'configDefFilePath' :" + fielPath);
+      builder.setFile(new File(fielPath));
+      try {
+        configuration = builder.getConfiguration(true);
+        configurationsCache.put(fielPath, configuration);
+      } catch (ConfigurationException e) {
+        LOGGER.info("Exception in loading property files.", e);
+      }
+    }
+    return configuration;
+  }
+
+  /**
+   * Removes the configuration created from a config definition file located at
+   * 'configDefFilePath'.
+   * 
+   * @param configDefFilePath
+   *          path to the config definition file
+   */
+  public static void clearConfiguration(String configDefFilePath) {
+    configurationsCache.remove(configDefFilePath);
+  }
+
+  /**
+   * Gets the configuration.
+   * 
+   * @return the configuration
+   */
+  public static Configuration getConfiguration() {
+    return getConfiguration(null);
+  }
+
+  /**
+   * Returns the 'config-definition.xml' file path. 1. If the param
+   * 'configDefFilePath' has a valid value, returns configDefFilePath 2. If the
+   * system property key 'configDefFilePath' has a valid value, returns the
+   * value 3. By default, it returns the file name 'config-definition.xml'
+   * 
+   * @param configDefFilePath
+   *          given input path to the config definition file
+   * @return the config def file path
+   */
+  private static String getConfigDefFilePath(String configDefFilePath) {
+    if (StringUtils.isNotEmpty(configDefFilePath)) {
+      return configDefFilePath;
+    }
+    return DEFAULT_CONFIG_DEFINITION_FILE_NAME;
+  }
+
+  /**
+   * The main method.
+   * 
+   * @param args
+   *          the args
+   * @throws InterruptedException
+   *           the interrupted exception
+   */
+  public static void main(String[] args) throws InterruptedException {
+    Configuration config = ConfigurationManager
+        .getConfiguration("/Users/Sayi/Documents/config/config-definition-dpi.xml");
+    System.out.println("elastic.search.cluster ="
+        + config.getString("elastic.search.cluster"));
+    Thread.sleep(10000);
+    System.out.println("storm.topology.dpi.bolt.es-index.index.name ="
+        + config.getString("storm.topology.dpi.bolt.es-index.index.name"));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/java/org/apache/metron/api/helper/service/PcapServiceCli.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/java/org/apache/metron/api/helper/service/PcapServiceCli.java b/metron-platform/metron-api/src/main/java/org/apache/metron/api/helper/service/PcapServiceCli.java
new file mode 100644
index 0000000..9aa4714
--- /dev/null
+++ b/metron-platform/metron-api/src/main/java/org/apache/metron/api/helper/service/PcapServiceCli.java
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.api.helper.service;
+
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+
+public class PcapServiceCli {
+
+	private String[] args = null;
+	private Options options = new Options();
+
+	int port = 8081;
+	String uri = "/pcapGetter";
+
+	public int getPort() {
+		return port;
+	}
+
+	public void setPort(int port) {
+		this.port = port;
+	}
+
+	public String getUri() {
+		return uri;
+	}
+
+	public void setUri(String uri) {
+		this.uri = uri;
+	}
+
+	public PcapServiceCli(String[] args) {
+
+		this.args = args;
+
+		Option help = new Option("h", "Display help menue");
+		options.addOption(help);
+		options.addOption(
+				"port",
+				true,
+				"OPTIONAL ARGUMENT [portnumber] If this argument sets the port for starting the service.  If this argument is not set the port will start on defaut port 8081");
+		options.addOption(
+				"endpoint_uri",
+				true,
+				"OPTIONAL ARGUMENT [/uri/to/service] This sets the URI for the service to be hosted.  The default URI is /pcapGetter");
+	}
+
+	public void parse() {
+		CommandLineParser parser = new BasicParser();
+
+		CommandLine cmd = null;
+
+		try {
+			cmd = parser.parse(options, args);
+		} catch (ParseException e1) {
+
+			e1.printStackTrace();
+		}
+
+		if (cmd.hasOption("h"))
+			help();
+
+		if (cmd.hasOption("port")) {
+
+			try {
+				port = Integer.parseInt(cmd.getOptionValue("port").trim());
+			} catch (Exception e) {
+
+				System.out.println("[Metron] Invalid value for port entered");
+				help();
+			}
+		}
+		if (cmd.hasOption("endpoint_uri")) {
+
+			try {
+
+				if (uri == null || uri.equals(""))
+					throw new Exception("invalid uri");
+
+				uri = cmd.getOptionValue("uri").trim();
+
+				if (uri.charAt(0) != '/')
+					uri = "/" + uri;
+
+				if (uri.charAt(uri.length()) == '/')
+					uri = uri.substring(0, uri.length() - 1);
+
+			} catch (Exception e) {
+				System.out.println("[Metron] Invalid URI entered");
+				help();
+			}
+		}
+
+	}
+
+	private void help() {
+		// This prints out some help
+		HelpFormatter formater = new HelpFormatter();
+
+		formater.printHelp("Topology Options:", options);
+
+		// System.out
+		// .println("[Metron] Example usage: \n storm jar Metron-Topologies-0.3BETA-SNAPSHOT.jar org.apache.metron.topology.Bro -local_mode true -config_path Metron_Configs/ -generator_spout true");
+
+		System.exit(0);
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/CellTimestampComparator.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/CellTimestampComparator.java b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/CellTimestampComparator.java
new file mode 100644
index 0000000..abea5fa
--- /dev/null
+++ b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/CellTimestampComparator.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import java.util.Comparator;
+
+import org.apache.hadoop.hbase.Cell;
+
+/**
+ * Comparator created for sorting pcaps cells based on the timestamp (asc).
+ * 
+ * @author Sayi
+ */
+public class CellTimestampComparator implements Comparator<Cell> {
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
+   */
+  
+  public int compare(Cell o1, Cell o2) {
+    return Long.valueOf(o1.getTimestamp()).compareTo(o2.getTimestamp());
+  }
+}


[03/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PacketInfo.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PacketInfo.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PacketInfo.java
deleted file mode 100644
index 76b8f9b..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PacketInfo.java
+++ /dev/null
@@ -1,471 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.pcap;
-
-import java.text.MessageFormat;
-import org.apache.log4j.Logger;
-
-import org.krakenapps.pcap.decoder.ip.Ipv4Packet;
-import org.krakenapps.pcap.decoder.tcp.TcpPacket;
-import org.krakenapps.pcap.decoder.udp.UdpPacket;
-import org.krakenapps.pcap.file.GlobalHeader;
-import org.krakenapps.pcap.packet.PacketHeader;
-import org.krakenapps.pcap.packet.PcapPacket;
-
-import org.apache.metron.pcap.Constants;
-import org.apache.metron.pcap.PcapUtils;
-
-/**
- * The Class PacketInfo.
- * 
- * @author sheetal
- * @version $Revision: 1.0 $
- */
-public class PacketInfo {
-
-  /** The packetHeader. */
-  private PacketHeader packetHeader = null;
-
-  /** The packet. */
-  private PcapPacket packet = null;
-
-  /** The ipv4 packet. */
-  private Ipv4Packet ipv4Packet = null;
-
-  /** The tcp packet. */
-  private TcpPacket tcpPacket = null;
-
-  /** The udp packet. */
-  private UdpPacket udpPacket = null;
-
-  /** The global header. */
-  private GlobalHeader globalHeader = null;
-
-  /** The Constant globalHeaderJsonTemplateSB. */
-  private static final StringBuffer globalHeaderJsonTemplateSB = new StringBuffer();
-
-  /** The Constant ipv4HeaderJsonTemplateSB. */
-  private static final StringBuffer ipv4HeaderJsonTemplateSB = new StringBuffer();
-
-  /** The Constant tcpHeaderJsonTemplateSB. */
-  private static final StringBuffer tcpHeaderJsonTemplateSB = new StringBuffer();
-
-  /** The Constant udpHeaderJsonTemplateSB. */
-  private static final StringBuffer udpHeaderJsonTemplateSB = new StringBuffer();
-
-  /** The Constant LOG. */
-  private static final Logger LOG = Logger.getLogger(PacketInfo.class);
-  
-  static {
-    globalHeaderJsonTemplateSB.append("<\"global_header\":<\"pcap_id\":\"").append("{0}").append('"');
-    globalHeaderJsonTemplateSB.append(",\"inc_len\":").append("{1}");
-    globalHeaderJsonTemplateSB.append(",\"orig_len\":").append("{2}");
-    globalHeaderJsonTemplateSB.append(",\"ts_sec\":").append("{3}");
-    globalHeaderJsonTemplateSB.append(",\"ts_usec\":").append("{4}");
-    globalHeaderJsonTemplateSB.append(">,"); // NOPMD by sheetal on 1/29/14 2:37
-    // PM
-
-    // ipv4 header
-
-    ipv4HeaderJsonTemplateSB.append("\"ipv4_header\":");
-
-    ipv4HeaderJsonTemplateSB.append("\"ip_dst\":").append("{0}");
-    ipv4HeaderJsonTemplateSB.append(",\"ip_dst_addr\":\"").append("{1}");
-    ipv4HeaderJsonTemplateSB.append("\",\"ip_flags\":").append("{2}");
-    ipv4HeaderJsonTemplateSB.append(",\"ip_fragment_offset\":").append("{3}");
-    ipv4HeaderJsonTemplateSB.append(",\"ip_header_checksum\":").append("{4}");
-    ipv4HeaderJsonTemplateSB.append(",\"ip_id\":").append("{5}");
-    ipv4HeaderJsonTemplateSB.append(",\"ip_header_length\":").append("{6}");
-    ipv4HeaderJsonTemplateSB.append(",\"ip_protocol\":").append("{7}");
-    ipv4HeaderJsonTemplateSB.append(",\"ip_src\":").append("{8}");
-    ipv4HeaderJsonTemplateSB.append(",\"ip_src_addr\":\"").append("{9}");
-    ipv4HeaderJsonTemplateSB.append("\",\"ip_tos\":").append("{10}");
-    ipv4HeaderJsonTemplateSB.append(",\"ip_total_length\":").append("{11}");
-    ipv4HeaderJsonTemplateSB.append(",\"ip_ttl\":").append("{12}");
-    ipv4HeaderJsonTemplateSB.append(",\"ip_version\":").append("{13}");
-    ipv4HeaderJsonTemplateSB.append('>');
-
-    // tcp header
-    tcpHeaderJsonTemplateSB.append(",\"tcp_header\":<\"ack\":").append("{0}");
-    tcpHeaderJsonTemplateSB.append(",\"checksum\":").append("{1}");
-    tcpHeaderJsonTemplateSB.append(",\"data_length\":").append("{2}");
-    tcpHeaderJsonTemplateSB.append(",\"data_offset\":").append("{3}");
-    tcpHeaderJsonTemplateSB.append(",\"dst_addr\":\"").append("{4}");
-    tcpHeaderJsonTemplateSB.append("\",\"dst_port\":").append("{5}");
-    tcpHeaderJsonTemplateSB.append(",\"direction\":").append("{6}");
-    tcpHeaderJsonTemplateSB.append(",\"flags\":").append("{7}");
-    tcpHeaderJsonTemplateSB.append(",\"reassembled_length \":").append("{8}");
-    tcpHeaderJsonTemplateSB.append(",\"relative_ack\":").append("{9}");
-    tcpHeaderJsonTemplateSB.append(",\"relative_seq\":").append("{10}");
-    tcpHeaderJsonTemplateSB.append(",\"seq\":").append("{11}");
-    tcpHeaderJsonTemplateSB.append(",\"session_key\":\"").append("{12}");
-    tcpHeaderJsonTemplateSB.append("\",\"src_addr\":\"").append("{13}");
-    tcpHeaderJsonTemplateSB.append("\",\"src_port\":").append("{14}");
-    tcpHeaderJsonTemplateSB.append(",\"total_length\":").append("{15}");
-    tcpHeaderJsonTemplateSB.append(",\"urgent_pointer\":").append("{16}");
-    tcpHeaderJsonTemplateSB.append(",\"window\":").append("{17}");
-    tcpHeaderJsonTemplateSB.append(">>");
-
-    // udp headers
-    udpHeaderJsonTemplateSB.append(",\"udp_header\":<\"checksum\":").append("{0}");
-    udpHeaderJsonTemplateSB.append(",\"dst_port\":").append("{1}");
-    udpHeaderJsonTemplateSB.append(",\"length\":").append("{2}");
-    udpHeaderJsonTemplateSB.append(",\"src_port\":").append("{3}");
-    udpHeaderJsonTemplateSB.append(",\"dst_addr\":\"").append("{4}");
-    udpHeaderJsonTemplateSB.append("\",\"src_addr\":\"").append("{5}").append('"');
-    tcpHeaderJsonTemplateSB.append(">>");
-
-  }
-
-  /** The Constant globalHeaderJsonTemplateString. */
-  private static final String globalHeaderJsonTemplateString = globalHeaderJsonTemplateSB.toString();
-
-  /** The Constant ipv4HeaderJsonTemplateString. */
-  private static final String ipv4HeaderJsonTemplateString = ipv4HeaderJsonTemplateSB.toString();
-
-  /** The Constant tcpHeaderJsonTemplateString. */
-  private static final String tcpHeaderJsonTemplateString = tcpHeaderJsonTemplateSB.toString();
-
-  /** The Constant udpHeaderJsonTemplateString. */
-  private static final String udpHeaderJsonTemplateString = udpHeaderJsonTemplateSB.toString();
-
-  /**
-   * Instantiates a new packet info.
-   * 
-   * @param globalHeader
-   *          the global header
-   * @param packetHeader
-   *          the packet header
-   * @param packet
-   *          the packet
-   * @param ipv4Packet
-   *          the ipv4 packet
-   * @param tcpPacket
-   *          the tcp packet
-   * @param udpPacket
-   *          the udp packet
-   */
-  public PacketInfo(GlobalHeader globalHeader, PacketHeader packetHeader, PcapPacket packet, Ipv4Packet ipv4Packet, TcpPacket tcpPacket,
-      UdpPacket udpPacket) {
-    this.packetHeader = packetHeader;
-    this.packet = packet;
-    this.ipv4Packet = ipv4Packet;
-    this.tcpPacket = tcpPacket;
-    this.udpPacket = udpPacket;
-    this.globalHeader = globalHeader;
-  }
-
-  /**
-   * Gets the global header.
-   * 
-   * @return the global header
-   */
-  public GlobalHeader getGlobalHeader() {
-    return globalHeader;
-  }
-
-  /**
-   * Gets the packet header.
-   * 
-   * 
-   * @return the packet header
-   */
-  public PacketHeader getPacketHeader() {
-    return packetHeader;
-  }
-
-  /**
-   * Gets the packet.
-   * 
-   * 
-   * @return the packet
-   */
-  public PcapPacket getPacket() {
-    return packet;
-  }
-
-  /**
-   * Gets the ipv4 packet.
-   * 
-   * 
-   * @return the ipv4 packet
-   */
-  public Ipv4Packet getIpv4Packet() {
-    return ipv4Packet;
-  }
-
-  /**
-   * Gets the tcp packet.
-   * 
-   * 
-   * @return the tcp packet
-   */
-  public TcpPacket getTcpPacket() {
-    return tcpPacket;
-  }
-
-  /**
-   * Gets the udp packet.
-   * 
-   * 
-   * @return the udp packet
-   */
-  public UdpPacket getUdpPacket() {
-    return udpPacket;
-  }
-
-  /**
-   * Gets the key.
-   * 
-   * 
-   * @return the key
-   */
-  public String getKey() {
-    int sourcePort = 0;
-    int destinationPort = 0;
-    if (Constants.PROTOCOL_UDP == ipv4Packet.getProtocol()) {
-      sourcePort = udpPacket.getSourcePort();
-
-      destinationPort = udpPacket.getDestinationPort();
-
-    } else if (Constants.PROTOCOL_TCP == ipv4Packet.getProtocol()) {
-      sourcePort = tcpPacket.getSourcePort();
-
-      destinationPort = tcpPacket.getDestinationPort();
-
-    }
-
-    return PcapUtils.getSessionKey(ipv4Packet.getSourceAddress().getHostAddress(), ipv4Packet.getDestinationAddress().getHostAddress(),
-        ipv4Packet.getProtocol(), sourcePort, destinationPort, ipv4Packet.getId(), ipv4Packet.getFragmentOffset());
-
-  }
-
-  /**
-   * Gets the short key
-   * 
-   * 
-   * @return the short key
-   */
-  public String getShortKey() {
-	int sourcePort = 0;
-	int destinationPort = 0;
-	if(Constants.PROTOCOL_UDP == ipv4Packet.getProtocol()) {
-		sourcePort = udpPacket.getSourcePort();
-		destinationPort = udpPacket.getDestinationPort();
-	} else if (Constants.PROTOCOL_TCP == ipv4Packet.getProtocol()) {
-		sourcePort = tcpPacket.getSourcePort();
-		destinationPort = tcpPacket.getDestinationPort();
-	}
-	  
-	return PcapUtils.getShortSessionKey(ipv4Packet.getSourceAddress().getHostAddress(), ipv4Packet.getDestinationAddress().getHostAddress(),
-	    ipv4Packet.getProtocol(), sourcePort, destinationPort);
-			 
-  }
-  
-  /**
-   * Gets the json doc.
-   * 
-   * 
-   * @return the json doc
-   */
-  public String getJsonDoc() {
-
-    return getJsonDocUsingSBAppend();
-  }
-
-  /**
-   * Gets the json doc.
-   * 
-   * 
-   * @return the json doc
-   */
-  public String getJsonIndexDoc() {
-
-    return getJsonIndexDocUsingSBAppend();
-  }
-
-  /**
-   * Gets the json doc using sb append.
-   * 
-   * @return the json doc using sb append
-   */
-  private String getJsonDocUsingSBAppend() {
-
-	
-    StringBuffer jsonSb = new StringBuffer(1024);
-
-    // global header
-    jsonSb.append("{\"global_header\":{\"pcap_id\":\"").append(getKey());
-    jsonSb.append("\",\"inc_len\":").append(packetHeader.getInclLen());
-    jsonSb.append(",\"orig_len\":").append(packetHeader.getOrigLen());
-    jsonSb.append(",\"ts_sec\":").append(packetHeader.getTsSec());
-    jsonSb.append(",\"ts_usec\":").append(packetHeader.getTsUsec());
-    jsonSb.append("},"); // NOPMD by sheetal on 1/29/14 2:37 PM
-
-    // ipv4 header
-
-    jsonSb.append("\"ipv4_header\":{");
-
-    jsonSb.append("\"ip_dst\":").append(ipv4Packet.getDestination());
-    jsonSb.append(",\"ip_dst_addr\":\"").append(ipv4Packet.getDestinationAddress().getHostAddress());
-    jsonSb.append("\",\"ip_flags\":").append(ipv4Packet.getFlags());
-    jsonSb.append(",\"ip_fragment_offset\":").append(ipv4Packet.getFragmentOffset());
-    jsonSb.append(",\"ip_header_checksum\":").append(ipv4Packet.getHeaderChecksum());
-    jsonSb.append(",\"ip_id\":").append(ipv4Packet.getId());
-    jsonSb.append(",\"ip_header_length\":").append(ipv4Packet.getIhl());
-    jsonSb.append(",\"ip_protocol\":").append(ipv4Packet.getProtocol());
-    jsonSb.append(",\"ip_src\":").append(ipv4Packet.getSource());
-    jsonSb.append(",\"ip_src_addr\":\"").append(ipv4Packet.getSourceAddress().getHostAddress());
-    jsonSb.append("\",\"ip_tos\":").append(ipv4Packet.getTos());
-    jsonSb.append(",\"ip_total_length\":").append(ipv4Packet.getTotalLength());
-    jsonSb.append(",\"ip_ttl\":").append(ipv4Packet.getTtl());
-    jsonSb.append(",\"ip_version\":").append(ipv4Packet.getVersion());
-    jsonSb.append('}');
-
-    // tcp header
-    if (tcpPacket != null) {
-      jsonSb.append(",\"tcp_header\":{\"ack\":").append(tcpPacket.getAck());
-      jsonSb.append(",\"checksum\":").append(tcpPacket.getChecksum());
-      jsonSb.append(",\"data_length\":").append(tcpPacket.getDataLength());
-      jsonSb.append(",\"data_offset\":").append(tcpPacket.getDataOffset());
-      jsonSb.append(",\"dst_addr\":\"").append(tcpPacket.getDestinationAddress().getHostAddress());
-      jsonSb.append("\",\"dst_port\":").append(tcpPacket.getDestinationPort());
-      jsonSb.append(",\"direction\":").append(tcpPacket.getDirection());
-      jsonSb.append(",\"flags\":").append(tcpPacket.getFlags());
-      jsonSb.append(",\"reassembled_length \":").append(tcpPacket.getReassembledLength());
-      jsonSb.append(",\"relative_ack\":").append(tcpPacket.getRelativeAck());
-      jsonSb.append(",\"relative_seq\":").append(tcpPacket.getRelativeSeq());
-      jsonSb.append(",\"seq\":").append(tcpPacket.getSeq());
-      jsonSb.append(",\"session_key\":\"").append(tcpPacket.getSessionKey());
-      jsonSb.append("\",\"src_addr\":\"").append(tcpPacket.getSourceAddress().getHostAddress());
-      jsonSb.append("\",\"src_port\":").append(tcpPacket.getSourcePort());
-      jsonSb.append(",\"total_length\":").append(tcpPacket.getTotalLength());
-      jsonSb.append(",\"urgent_pointer\":").append(tcpPacket.getUrgentPointer());
-      jsonSb.append(",\"window\":").append(tcpPacket.getWindow());
-      jsonSb.append('}');
-    }
-
-    // udp headers
-    if (udpPacket != null) {
-      jsonSb.append(",\"udp_header\":{\"checksum\":").append(udpPacket.getChecksum());
-      jsonSb.append(",\"dst_port\":").append(udpPacket.getDestinationPort());
-      jsonSb.append(",\"length\":").append(udpPacket.getLength());
-      jsonSb.append(",\"src_port\":").append(udpPacket.getSourcePort());
-      jsonSb.append(",\"dst_addr\":\"").append(udpPacket.getDestination().getAddress().getHostAddress());
-      jsonSb.append("\",\"src_addr\":\"").append(udpPacket.getSource().getAddress().getHostAddress());
-      jsonSb.append("\"}");
-    }
-
-    jsonSb.append('}');
-
-    return jsonSb.toString();
-  }
-
-  /**
-   * Gets the json doc using message format.
-   * 
-   * @return the json doc using message format
-   */
-  private String getJsonDocUsingMessageFormat() {
-
-    StringBuffer jsonSb = new StringBuffer(600);
-
-    jsonSb.append(MessageFormat.format(globalHeaderJsonTemplateString, getKey(), packetHeader.getInclLen(), packetHeader.getOrigLen(),
-        packetHeader.getTsSec(), packetHeader.getTsUsec()));
-
-    jsonSb.append(MessageFormat.format(ipv4HeaderJsonTemplateString, ipv4Packet.getDestination(), ipv4Packet.getDestinationAddress()
-        .getHostAddress(), ipv4Packet.getFlags(), ipv4Packet.getFragmentOffset(), ipv4Packet.getHeaderChecksum(), ipv4Packet.getId(),
-        ipv4Packet.getIhl(), ipv4Packet.getProtocol(), ipv4Packet.getSource(), ipv4Packet.getSourceAddress().getHostAddress(), ipv4Packet
-            .getTos(), ipv4Packet.getTotalLength(), ipv4Packet.getTtl(), ipv4Packet.getVersion()));
-
-    // tcp header
-    if (tcpPacket != null) {
-      jsonSb.append(MessageFormat.format(tcpHeaderJsonTemplateString, tcpPacket.getAck(), tcpPacket.getChecksum(), tcpPacket
-          .getDataLength(), tcpPacket.getDataOffset(), tcpPacket.getDestinationAddress().getHostAddress(), tcpPacket.getDestinationPort(),
-          tcpPacket.getDirection(), tcpPacket.getFlags(), tcpPacket.getReassembledLength(), tcpPacket.getRelativeAck(), tcpPacket
-              .getRelativeSeq(), tcpPacket.getSeq(), tcpPacket.getSessionKey(), tcpPacket.getSourceAddress().getHostAddress(), tcpPacket
-              .getSourcePort(), tcpPacket.getTotalLength(), tcpPacket.getUrgentPointer(), tcpPacket.getWindow()));
-    } else
-    // udp headers
-    if (udpPacket != null) {
-      jsonSb.append(MessageFormat.format(udpHeaderJsonTemplateString, udpPacket.getChecksum(), udpPacket.getDestinationPort(),
-          udpPacket.getLength(), udpPacket.getSourcePort(), udpPacket.getDestination().getAddress().getHostAddress(), udpPacket.getSource()
-              .getAddress().getHostAddress()));
-
-    } else {
-      jsonSb.append('}');
-    }
-    return jsonSb.toString().replace('<', '{').replace('>', '}');
-  }
-
-  /**
-   * Gets the json index doc using sb append.
-   * 
-   * @return the json index doc using sb append
-   */
-  private String getJsonIndexDocUsingSBAppend() {
-
-	Long ts_micro = getPacketTimeInNanos() / 1000L;
-	StringBuffer jsonSb = new StringBuffer(175);
-
-	jsonSb.append("{\"pcap_id\":\"").append(getShortKey());
-    jsonSb.append("\",\"ip_protocol\":").append(ipv4Packet.getProtocol());
-    jsonSb.append(",\"ip_id\":").append(ipv4Packet.getId());
-    jsonSb.append(",\"frag_offset\":").append(ipv4Packet.getFragmentOffset());
-    jsonSb.append(",\"ts_micro\":").append(ts_micro);
-
-
-    // tcp header
-    if (tcpPacket != null) {
-      jsonSb.append(",\"ip_src_addr\":\"").append(tcpPacket.getSourceAddress().getHostAddress());
-      jsonSb.append("\",\"ip_src_port\":").append(tcpPacket.getSourcePort());
-      jsonSb.append(",\"ip_dst_addr\":\"").append(tcpPacket.getDestinationAddress().getHostAddress());
-      jsonSb.append("\",\"ip_dst_port\":").append(tcpPacket.getDestinationPort());
-    }
-
-    // udp headers
-    if (udpPacket != null) {
-      jsonSb.append(",\"ip_src_addr\":\"").append(udpPacket.getSource().getAddress().getHostAddress());
-      jsonSb.append("\",\"ip_src_port\":").append(udpPacket.getSourcePort());
-      jsonSb.append(",\"ip_dst_addr\":\"").append(udpPacket.getDestination().getAddress().getHostAddress());
-      jsonSb.append("\",\"ip_dst_port\":").append(udpPacket.getDestinationPort());
-    }
-
-    jsonSb.append('}');
-
-    return jsonSb.toString();
-  }
-  
-  public long getPacketTimeInNanos()
-  {
-	  if ( getGlobalHeader().getMagicNumber() == 0xa1b2c3d4 || getGlobalHeader().getMagicNumber() == 0xd4c3b2a1 )
-	  {
-		  //Time is in micro assemble as nano
-		  LOG.info("Times are in micro according to the magic number");
-		  return getPacketHeader().getTsSec() * 1000000000L + getPacketHeader().getTsUsec() * 1000L ; 
-	  }
-	  else if ( getGlobalHeader().getMagicNumber() == 0xa1b23c4d || getGlobalHeader().getMagicNumber() == 0x4d3cb2a1 ) {
-		//Time is in nano assemble as nano
-		  LOG.info("Times are in nano according to the magic number");
-		  return getPacketHeader().getTsSec() * 1000000000L + getPacketHeader().getTsUsec() ; 
-	  }
-	  //Default assume time is in micro assemble as nano
-	  LOG.warn("Unknown magic number. Defaulting to micro");
-	  return getPacketHeader().getTsSec() * 1000000000L + getPacketHeader().getTsUsec() * 1000L ;  
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapByteInputStream.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapByteInputStream.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapByteInputStream.java
deleted file mode 100644
index e2d56c8..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapByteInputStream.java
+++ /dev/null
@@ -1,185 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.pcap;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInputStream;
-import java.io.IOException;
-
-import org.krakenapps.pcap.PcapInputStream;
-import org.krakenapps.pcap.file.GlobalHeader;
-import org.krakenapps.pcap.packet.PacketHeader;
-import org.krakenapps.pcap.packet.PcapPacket;
-import org.krakenapps.pcap.util.Buffer;
-import org.krakenapps.pcap.util.ByteOrderConverter;
-import org.krakenapps.pcap.util.ChainBuffer;
-
-/**
- * The Class PcapByteInputStream.
- * 
- * @author sheetal
- * @version $Revision: 1.0 $
- */
-public class PcapByteInputStream implements PcapInputStream {
-
-  /** The is. */
-  private DataInputStream is;
-
-  /** The global header. */
-  private GlobalHeader globalHeader;
-
-  /**
-   * Opens pcap file input stream.
-   * 
-   * @param pcap
-   *          the byte array to be read
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  public PcapByteInputStream(byte[] pcap) throws IOException {
-    is = new DataInputStream(new ByteArrayInputStream(pcap)); // $codepro.audit.disable
-                                                              // closeWhereCreated
-    readGlobalHeader();
-  }
-
-  /**
-   * Reads a packet from pcap byte array.
-   * 
-   * @return the packet throws IOException the stream has been closed and the
-   *         contained input stream does not support reading after close, or
-   *         another I/O error occurs. * @throws IOException Signals that an I/O
-   *         exception has occurred. * @see
-   *         org.krakenapps.pcap.PcapInputStream#getPacket()
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-
-  public PcapPacket getPacket() throws IOException {
-    return readPacket(globalHeader.getMagicNumber());
-  }
-
-  /**
-   * Gets the global header.
-   * 
-   * 
-   * @return the global header
-   */
-  public GlobalHeader getGlobalHeader() {
-    return globalHeader;
-  }
-
-  /**
-   * Read global header.
-   * 
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  private void readGlobalHeader() throws IOException {
-    int magic = is.readInt();
-    short major = is.readShort();
-    short minor = is.readShort();
-    int tz = is.readInt();
-    int sigfigs = is.readInt();
-    int snaplen = is.readInt();
-    int network = is.readInt();
-
-    globalHeader = new GlobalHeader(magic, major, minor, tz, sigfigs, snaplen,
-        network);
-
-    if (globalHeader.getMagicNumber() == 0xD4C3B2A1) {
-      globalHeader.swapByteOrder();
-    }
-  }
-
-  /**
-   * Read packet.
-   * 
-   * @param magicNumber
-   *          the magic number
-   * @return the pcap packet * @throws IOException Signals that an I/O exception
-   *         has occurred. * @throws EOFException the EOF exception
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  private PcapPacket readPacket(int magicNumber) throws IOException {
-    PacketHeader packetHeader = readPacketHeader(magicNumber);
-    Buffer packetData = readPacketData(packetHeader.getInclLen());
-    return new PcapPacket(packetHeader, packetData);
-  }
-
-  /**
-   * Read packet header.
-   * 
-   * @param magicNumber
-   *          the magic number
-   * @return the packet header * @throws IOException Signals that an I/O
-   *         exception has occurred. * @throws EOFException the EOF exception
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  private PacketHeader readPacketHeader(int magicNumber) throws IOException {
-    int tsSec = is.readInt();
-    int tsUsec = is.readInt();
-    int inclLen = is.readInt();
-    int origLen = is.readInt();
-
-    if (magicNumber == 0xD4C3B2A1) {
-      tsSec = ByteOrderConverter.swap(tsSec);
-      tsUsec = ByteOrderConverter.swap(tsUsec);
-      inclLen = ByteOrderConverter.swap(inclLen);
-      origLen = ByteOrderConverter.swap(origLen);
-    }
-
-    return new PacketHeader(tsSec, tsUsec, inclLen, origLen);
-  }
-
-  /**
-   * Read packet data.
-   * 
-   * @param packetLength
-   *          the packet length
-   * @return the buffer * @throws IOException Signals that an I/O exception has
-   *         occurred.
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  private Buffer readPacketData(int packetLength) throws IOException {
-    byte[] packets = new byte[packetLength];
-    is.read(packets);
-
-    Buffer payload = new ChainBuffer();
-    payload.addLast(packets);
-    return payload;
-    // return new PacketPayload(packets);
-  }
-
-  /**
-   * Closes pcap stream handle.
-   * 
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred. * @see
-   *           org.krakenapps.pcap.PcapInputStream#close()
-   */
-
-  public void close() throws IOException {
-    is.close(); // $codepro.audit.disable closeInFinally
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapByteOutputStream.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapByteOutputStream.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapByteOutputStream.java
deleted file mode 100644
index 06d6af6..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapByteOutputStream.java
+++ /dev/null
@@ -1,305 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-// $codepro.audit.disable explicitThisUsage, lossOfPrecisionInCast
-package org.apache.metron.pcap;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.nio.BufferUnderflowException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.log4j.Logger;
-import org.krakenapps.pcap.PcapOutputStream;
-import org.krakenapps.pcap.file.GlobalHeader;
-import org.krakenapps.pcap.packet.PacketHeader;
-import org.krakenapps.pcap.packet.PcapPacket;
-import org.krakenapps.pcap.util.Buffer;
-
-// TODO: Auto-generated Javadoc
-/**
- * The Class PcapByteOutputStream.
- * 
- * @author sheetal
- * @version $Revision: 1.0 $
- */
-public class PcapByteOutputStream implements PcapOutputStream {
-
-  /** The Constant LOG. */
-  private static final Logger LOG = Logger
-      .getLogger(PcapByteOutputStream.class);
-
-  /** The Constant MAX_CACHED_PACKET_NUMBER. */
-  private static final int MAX_CACHED_PACKET_NUMBER = 1000;
-
-  /** The cached packet num. */
-  private int cachedPacketNum = 0; // NOPMD by sheetal on 1/29/14 2:34 PM
-
-  /** The baos. */
-  private ByteArrayOutputStream baos; // NOPMD by sheetal on 1/29/14 2:34 PM
-
-  /** The list. */
-  private List<Byte> list; // NOPMD by sheetal on 1/29/14 2:34 PM
-
-  /**
-   * Instantiates a new pcap byte output stream.
-   * 
-   * @param baos
-   *          the baos
-   */
-  public PcapByteOutputStream(ByteArrayOutputStream baos) {
-    this.baos = baos;
-    list = new ArrayList<Byte>();
-    createGlobalHeader();
-  }
-
-  /**
-   * Instantiates a new pcap byte output stream.
-   * 
-   * @param baos
-   *          the baos
-   * @param header
-   *          the header
-   */
-  public PcapByteOutputStream(ByteArrayOutputStream baos, GlobalHeader header) {
-    this.baos = baos;
-    list = new ArrayList<Byte>();
-    copyGlobalHeader(header);
-  }
-
-  /**
-   * Creates the global header.
-   */
-  private void createGlobalHeader() {
-    /* magic number(swapped) */
-    list.add((byte) 0xd4);
-    list.add((byte) 0xc3);
-    list.add((byte) 0xb2);
-    list.add((byte) 0xa1);
-
-    /* major version number */
-    list.add((byte) 0x02);
-    list.add((byte) 0x00);
-
-    /* minor version number */
-    list.add((byte) 0x04);
-    list.add((byte) 0x00);
-
-    /* GMT to local correction */
-    list.add((byte) 0x00);
-    list.add((byte) 0x00);
-    list.add((byte) 0x00);
-    list.add((byte) 0x00);
-
-    /* accuracy of timestamps */
-    list.add((byte) 0x00);
-    list.add((byte) 0x00);
-    list.add((byte) 0x00);
-    list.add((byte) 0x00);
-
-    /* max length of captured packets, in octets */
-    list.add((byte) 0xff);
-    list.add((byte) 0xff);
-    list.add((byte) 0x00);
-    list.add((byte) 0x00);
-
-    /* data link type(ethernet) */
-    list.add((byte) 0x01);
-    list.add((byte) 0x00);
-    list.add((byte) 0x00);
-    list.add((byte) 0x00);
-  }
-
-  /**
-   * Copy global header.
-   * 
-   * @param header
-   *          the header
-   */
-  private void copyGlobalHeader(GlobalHeader header) {
-    final byte[] magicNumber = intToByteArray(header.getMagicNumber());
-    final byte[] majorVersion = shortToByteArray(header.getMajorVersion());
-    final byte[] minorVersion = shortToByteArray(header.getMinorVersion());
-    final byte[] zone = intToByteArray(header.getThiszone());
-    final byte[] sigFigs = intToByteArray(header.getSigfigs());
-    final byte[] snapLen = intToByteArray(header.getSnaplen());
-    final byte[] network = intToByteArray(header.getNetwork());
-
-    list.add(magicNumber[0]);
-    list.add(magicNumber[1]);
-    list.add(magicNumber[2]);
-    list.add(magicNumber[3]);
-
-    list.add(majorVersion[1]);
-    list.add(majorVersion[0]);
-
-    list.add(minorVersion[1]);
-    list.add(minorVersion[0]);
-
-    list.add(zone[3]);
-    list.add(zone[2]);
-    list.add(zone[1]);
-    list.add(zone[0]);
-
-    list.add(sigFigs[3]);
-    list.add(sigFigs[2]);
-    list.add(sigFigs[1]);
-    list.add(sigFigs[0]);
-
-    list.add(snapLen[3]);
-    list.add(snapLen[2]);
-    list.add(snapLen[1]);
-    list.add(snapLen[0]);
-
-    list.add(network[3]);
-    list.add(network[2]);
-    list.add(network[1]);
-    list.add(network[0]);
-  }
-
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.krakenapps.pcap.PcapOutputStream#write(org.krakenapps.pcap.packet
-   * .PcapPacket)
-   */
-  /**
-   * Method write.
-   * 
-   * @param packet
-   *          PcapPacket
-   * 
-   * 
-   * @throws IOException
-   *           * @see org.krakenapps.pcap.PcapOutputStream#write(PcapPacket) * @see
-   *           org.krakenapps.pcap.PcapOutputStream#write(PcapPacket)
-   */
- 
-  public void write(PcapPacket packet) throws IOException {
-    PacketHeader packetHeader = packet.getPacketHeader();
-
-    int tsSec = packetHeader.getTsSec();
-    int tsUsec = packetHeader.getTsUsec();
-    int inclLen = packetHeader.getInclLen();
-    int origLen = packetHeader.getOrigLen();
-
-    addInt(tsSec);
-    addInt(tsUsec);
-    addInt(inclLen);
-    addInt(origLen);
-
-    Buffer payload = packet.getPacketData();
-
-    try {
-      payload.mark();
-      while (true) {
-        list.add(payload.get());
-      }
-    } catch (BufferUnderflowException e) {
-      //LOG.debug("Ignorable exception while writing packet", e);
-      payload.reset();
-    }
-
-    cachedPacketNum++;
-    if (cachedPacketNum == MAX_CACHED_PACKET_NUMBER) {
-      flush();
-    }
-  }
-
-  /**
-   * Adds the int.
-   * 
-   * @param number
-   *          the number
-   */
-  private void addInt(int number) {
-    list.add((byte) (number & 0xff));
-    list.add((byte) ((number & 0xff00) >> 8));
-    list.add((byte) ((number & 0xff0000) >> 16));
-    list.add((byte) ((number & 0xff000000) >> 24));
-  }
-
-  /**
-   * Int to byte array.
-   * 
-   * @param number
-   *          the number
-   * 
-   * @return the byte[]
-   */
-  private byte[] intToByteArray(int number) {
-    return new byte[] { (byte) (number >>> 24), (byte) (number >>> 16),
-        (byte) (number >>> 8), (byte) number };
-  }
-
-  /**
-   * Short to byte array.
-   * 
-   * @param number
-   *          the number
-   * 
-   * @return the byte[]
-   */
-  private byte[] shortToByteArray(short number) {
-    return new byte[] { (byte) (number >>> 8), (byte) number };
-  }
-
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.krakenapps.pcap.PcapOutputStream#flush()
-   */
-  /**
-   * Method flush.
-   * 
-   * 
-   * @throws IOException
-   *           * @see org.krakenapps.pcap.PcapOutputStream#flush() * @see
-   *           org.krakenapps.pcap.PcapOutputStream#flush()
-   */
- 
-  public void flush() throws IOException {
-    byte[] fileBinary = new byte[list.size()];
-    for (int i = 0; i < fileBinary.length; i++) {
-      fileBinary[i] = list.get(i);
-    }
-
-    list.clear();
-    baos.write(fileBinary);
-    cachedPacketNum = 0;
-  }
-
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.krakenapps.pcap.PcapOutputStream#close()
-   */
-  /**
-   * Method close.
-   * 
-   * 
-   * @throws IOException
-   *           * @see org.krakenapps.pcap.PcapOutputStream#close() * @see
-   *           org.krakenapps.pcap.PcapOutputStream#close()
-   */
- 
-  public void close() throws IOException {
-    flush();
-    baos.close(); // $codepro.audit.disable closeInFinally
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapMerger.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapMerger.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapMerger.java
deleted file mode 100644
index 48d25c7..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapMerger.java
+++ /dev/null
@@ -1,262 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.pcap;
-
-import java.io.ByteArrayOutputStream;
-import java.io.EOFException;
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.log4j.Logger;
-
-import org.krakenapps.pcap.packet.PcapPacket;
-import org.krakenapps.pcap.file.GlobalHeader;
-
-// TODO: Auto-generated Javadoc
-/**
- * The Class PcapMerger.
- * 
- * @author sheetal
- * @version $Revision: 1.0 $
- */
-public final class PcapMerger {
-
-  /** The Constant LOG. */
-  private static final Logger LOG = Logger.getLogger(PcapMerger.class);
-  
-  /** The comparator for PcapPackets */
-  private static PcapPacketComparator PCAP_PACKET_COMPARATOR = new PcapPacketComparator();
-
-  /**
-   * Instantiates a new pcap merger.
-   */
-  private PcapMerger() { // $codepro.audit.disable emptyMethod
-  }
-
-  /**
-   * Merge two pcap byte arrays.
-   * 
-   * @param baos
-   *          the baos
-   * @param pcaps
-   *          the pcaps
-   * 
-   * @throws IOException
-   *           if there is no byte array, no access permission, or other io
-   *           related problems.
-   */
-  // public static void merge(byte[] to, byte[] from) throws IOException {
-  // PcapByteInputStream is = null;
-  // PcapByteOutputStream os = null;
-  // ByteArrayOutputStream baos = null;
-  // try {
-  // is = new PcapByteInputStream(from);
-  // baos = new ByteArrayOutputStream();
-  // os = new PcapByteOutputStream(baos, is.getGlobalHeader());
-  //
-  // writePacket(is, os);
-  // } finally {
-  // closeInput(is);
-  // if (baos != null) {
-  // baos.close();
-  // }
-  // closeOutput(os);
-  // }
-  // }
-
-  public static void merge(ByteArrayOutputStream baos, List<byte[]> pcaps)
-      throws IOException {
-    PcapByteInputStream is = null;
-    PcapByteOutputStream os = null;
-    ByteArrayOutputStream unsortedBaos = new ByteArrayOutputStream();
-    
-    try {
-      int i = 1;
-      for (byte[] pcap : pcaps) {
-        is = new PcapByteInputStream(pcap);
-        if (i == 1) {
-          os = new PcapByteOutputStream(unsortedBaos, is.getGlobalHeader());
-        }
-
-        writePacket(is, os);
-        i++;
-        closeInput(is);
-      }
-    } finally {
-      if (unsortedBaos != null) {
-        unsortedBaos.close();
-      }
-      closeOutput(os);
-      sort(baos, unsortedBaos.toByteArray());
-    }
-  }
-
-  /**
-   * Merge byte array1 with byte array2, and write to output byte array. It
-   * doesn't hurt original pcap dump byte arrays.
-   * 
-   * @param baos
-   *          the baos
-   * @param pcaps
-   *          the pcaps
-   * 
-   * @throws IOException
-   *           if there are no source byte arrays, have no read and/or write
-   *           permissions, or anything else.
-   */
-  public static void merge(ByteArrayOutputStream baos, byte[]... pcaps) // $codepro.audit.disable
-                                                                        // overloadedMethods
-      throws IOException {
-    merge(baos, Arrays.asList(pcaps));
-
-  }
-  
-  /**
-   * Sort the potentially unsorted byte array according to the timestamp
-   * in the packet header
-   * 
-   * @param unsortedBytes
-   * 	a byte array of a pcap file
-   * 
-   * @return byte array of a pcap file with packets in cronological order
-   * 
-   * @throws IOException
-   * 	if there are no source byte arrays, have no read and or write 
-   * 	permission, or anything else.
-   */
-  private static void sort(ByteArrayOutputStream baos, byte[] unsortedBytes) throws IOException {
-	  PcapByteInputStream pcapIs = new PcapByteInputStream(unsortedBytes);
-	  PcapByteOutputStream pcapOs = new PcapByteOutputStream(baos, pcapIs.getGlobalHeader());
-	  PcapPacket packet;
-	  ArrayList<PcapPacket> packetList = new ArrayList<PcapPacket>();
-	  
-	  try {
-		  while (true) {
-			  packet = pcapIs.getPacket();
-			  if (packet == null)
-				  break;
-			  packetList.add(packet);
-			  LOG.debug("Presort packet: " + packet.getPacketHeader().toString());
-		  }
-	  } catch (EOFException e) {
-		  //LOG.debug("Ignoreable exception in sort", e);
-	  }
-	  
-	  Collections.sort(packetList, PCAP_PACKET_COMPARATOR);
-	  for (PcapPacket p : packetList) {
-		  pcapOs.write(p);
-		  LOG.debug("Postsort packet: " + p.getPacketHeader().toString());
-	  }
-	  pcapOs.close();  
-  }
-  
-  /**
-   * Write packet.
-   * 
-   * @param is
-   *          the is
-   * @param os
-   *          the os
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  private static void writePacket(PcapByteInputStream is,
-      PcapByteOutputStream os) throws IOException {
-    PcapPacket packet = null;
-    try {
-      while (true) {
-        packet = is.getPacket();
-        if (packet == null) {
-          break;
-        }
-        os.write(packet);
-      }
-    } catch (EOFException e) {
-      //LOG.debug("Ignorable exception in writePacket", e);
-    }
-
-  }
-
-  /**
-   * Close input.
-   * 
-   * @param is
-   *          the is
-   */
-  private static void closeInput(PcapByteInputStream is) {
-    if (is == null) {
-      return;
-    }
-    try {
-      is.close(); // $codepro.audit.disable closeInFinally
-    } catch (IOException e) {
-      LOG.error("Failed to close input stream", e);
-    }
-  }
-
-  /**
-   * Close output.
-   * 
-   * @param os
-   *          the os
-   */
-  private static void closeOutput(PcapByteOutputStream os) {
-    if (os == null) {
-      return;
-    }
-    try {
-      os.close();
-    } catch (IOException e) {
-      LOG.error("Failed to close output stream", e);
-
-    }
-  }
-
-  /**
-   * The main method.
-   * 
-   * @param args
-   *          the arguments
-   * 
-   * @throws IOException
-   *           Signals that an I/O exception has occurred.
-   */
-  public static void main(String[] args) throws IOException {
-    byte[] b1 = FileUtils.readFileToByteArray(new File(
-        "/Users/sheetal/Downloads/constructedTcpDump.1.pcap"));
-    byte[] b2 = FileUtils.readFileToByteArray(new File(
-        "/Users/sheetal/Downloads/constructedTcpDump.2.pcap"));
-    byte[] b3 = FileUtils.readFileToByteArray(new File(
-        "/Users/sheetal/Downloads/constructedTcpDump.3.pcap"));
-
-    ByteArrayOutputStream boas = new ByteArrayOutputStream(); // $codepro.audit.disable
-                                                              // closeWhereCreated
-    PcapMerger.merge(boas, b1, b2, b3);
-
-    FileUtils.writeByteArrayToFile(new File(
-        "/Users/sheetal/Downloads/constructedTcpDump.automerged.1.2.pcap"),
-        boas.toByteArray(), false);
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapPacketComparator.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapPacketComparator.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapPacketComparator.java
deleted file mode 100644
index 96f64a0..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapPacketComparator.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.pcap;
-
-import java.util.Comparator;
-
-import org.apache.log4j.Logger;
-
-import org.krakenapps.pcap.packet.PcapPacket;
-
-public class PcapPacketComparator implements Comparator<PcapPacket> {
-
-	/** The Constant LOG. */
-	private static final Logger LOG = Logger.getLogger(PcapMerger.class);
-	
-	public int compare(PcapPacket p1, PcapPacket p2) {
-
-		Long p1time = new Long(p1.getPacketHeader().getTsSec()) * 1000000L + new Long(p1.getPacketHeader().getTsUsec());
-		Long p2time = new Long(p2.getPacketHeader().getTsSec()) * 1000000L + new Long(p2.getPacketHeader().getTsUsec());
-		Long delta = p1time - p2time;
-		LOG.debug("p1time: " + p1time.toString() + " p2time: " + p2time.toString() + " delta: " + delta.toString());
-		return delta.intValue();
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapUtils.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapUtils.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapUtils.java
deleted file mode 100644
index 10ab03d..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/PcapUtils.java
+++ /dev/null
@@ -1,475 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.pcap;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import com.google.common.base.Joiner;
-import org.apache.commons.lang.StringUtils;
-
-import com.google.common.collect.BiMap;
-import com.google.common.collect.HashBiMap;
-import org.json.simple.JSONObject;
-
-/**
- * The Class PcapUtils.
- */
-public class PcapUtils {
-
-  /** The Constant SESSION_KEY_SEPERATOR. */
-  private static final char SESSION_KEY_SEPERATOR = '-';
-
-  /** The Constant protocolIdToNameMap. */
-  private static final BiMap<Integer, String> protocolIdToNameMap = HashBiMap
-      .create();
-
-  // private static final Map<Integer, String> protocolIdToNameMap = new
-  // HashMap();
-
-  static {
-
-    protocolIdToNameMap.put(0, "HOPOPT");
-    protocolIdToNameMap.put(1, "ICMP");
-    protocolIdToNameMap.put(2, "IGMP");
-    protocolIdToNameMap.put(3, "GGP");
-    protocolIdToNameMap.put(4, "IPV4");
-    protocolIdToNameMap.put(5, "ST");
-    protocolIdToNameMap.put(6, "TCP");
-    protocolIdToNameMap.put(7, "CBT");
-    protocolIdToNameMap.put(8, "EGP");
-    protocolIdToNameMap.put(9, "IGP");
-    protocolIdToNameMap.put(10, "BBN-RCC-MON");
-    protocolIdToNameMap.put(11, "NVP-II");
-    protocolIdToNameMap.put(12, "PUP");
-    protocolIdToNameMap.put(13, "ARGUS");
-    protocolIdToNameMap.put(14, "EMCON");
-    protocolIdToNameMap.put(15, "XNET");
-    protocolIdToNameMap.put(16, "CHAOS");
-    protocolIdToNameMap.put(17, "UDP");
-    protocolIdToNameMap.put(18, "MUX");
-    protocolIdToNameMap.put(19, "DCN-MEAS");
-    protocolIdToNameMap.put(20, "HMP");
-    protocolIdToNameMap.put(21, "PRM");
-    protocolIdToNameMap.put(22, "XNS-IDP");
-    protocolIdToNameMap.put(23, "TRUNK-1");
-    protocolIdToNameMap.put(24, "TRUNK-2");
-    protocolIdToNameMap.put(25, "LEAF-1");
-    protocolIdToNameMap.put(26, "LEAF-2");
-    protocolIdToNameMap.put(27, "RDP");
-    protocolIdToNameMap.put(28, "IRTP");
-    protocolIdToNameMap.put(29, "ISO-TP4");
-    protocolIdToNameMap.put(30, "NETBLT");
-    protocolIdToNameMap.put(31, "MFE-NSP");
-    protocolIdToNameMap.put(32, "MERIT-INP");
-    protocolIdToNameMap.put(33, "DCCP");
-    protocolIdToNameMap.put(34, "3PC");
-    protocolIdToNameMap.put(35, "IDPR");
-    protocolIdToNameMap.put(36, "XTP");
-    protocolIdToNameMap.put(37, "DDP");
-    protocolIdToNameMap.put(38, "IDPR-CMTP");
-    protocolIdToNameMap.put(39, "TP++");
-    protocolIdToNameMap.put(40, "IL");
-    protocolIdToNameMap.put(41, "IPV6");
-    protocolIdToNameMap.put(42, "SDRP");
-    protocolIdToNameMap.put(43, "IPV6-ROUTE");
-    protocolIdToNameMap.put(44, "IPV6-FRAG");
-    protocolIdToNameMap.put(45, "IDRP");
-    protocolIdToNameMap.put(46, "RSVP");
-    protocolIdToNameMap.put(47, "GRE");
-    protocolIdToNameMap.put(48, "DSR");
-    protocolIdToNameMap.put(49, "BNA");
-    protocolIdToNameMap.put(50, "ESP");
-    protocolIdToNameMap.put(51, "AH");
-    protocolIdToNameMap.put(52, "I-NLSP");
-    protocolIdToNameMap.put(53, "SWIPE");
-    protocolIdToNameMap.put(54, "NARP");
-    protocolIdToNameMap.put(55, "MOBILE");
-    protocolIdToNameMap.put(56, "TLSP");
-    protocolIdToNameMap.put(57, "SKIP");
-    protocolIdToNameMap.put(58, "IPV6-ICMP");
-    protocolIdToNameMap.put(59, "IPV6-NONXT");
-    protocolIdToNameMap.put(60, "IPV6-OPTS");
-    protocolIdToNameMap.put(62, "CFTP");
-    protocolIdToNameMap.put(64, "SAT-EXPAK");
-    protocolIdToNameMap.put(65, "KRYPTOLAN");
-    protocolIdToNameMap.put(66, "RVD");
-    protocolIdToNameMap.put(67, "IPPC");
-    protocolIdToNameMap.put(69, "SAT-MON");
-    protocolIdToNameMap.put(70, "VISA");
-    protocolIdToNameMap.put(71, "IPCV");
-    protocolIdToNameMap.put(72, "CPNX");
-    protocolIdToNameMap.put(73, "CPHB");
-    protocolIdToNameMap.put(74, "WSN");
-    protocolIdToNameMap.put(75, "PVP");
-    protocolIdToNameMap.put(76, "BR-SAT-MON");
-    protocolIdToNameMap.put(77, "SUN-ND");
-    protocolIdToNameMap.put(78, "WB-MON");
-    protocolIdToNameMap.put(79, "WB-EXPAK");
-    protocolIdToNameMap.put(80, "ISO-IP");
-    protocolIdToNameMap.put(81, "VMTP");
-    protocolIdToNameMap.put(82, "SECURE-VMTP");
-    protocolIdToNameMap.put(83, "VINES");
-    protocolIdToNameMap.put(84, "TTP");
-    protocolIdToNameMap.put(85, "NSFNET-IGP");
-    protocolIdToNameMap.put(86, "DGP");
-    protocolIdToNameMap.put(87, "TCF");
-    protocolIdToNameMap.put(88, "EIGRP");
-    protocolIdToNameMap.put(89, "OSPFIGP");
-    protocolIdToNameMap.put(90, "SPRITE-RPC");
-    protocolIdToNameMap.put(91, "LARP");
-    protocolIdToNameMap.put(92, "MTP");
-    protocolIdToNameMap.put(93, "AX.25");
-    protocolIdToNameMap.put(94, "IPIP");
-    protocolIdToNameMap.put(95, "MICP");
-    protocolIdToNameMap.put(96, "SCC-SP");
-    protocolIdToNameMap.put(97, "ETHERIP");
-    protocolIdToNameMap.put(98, "ENCAP");
-    protocolIdToNameMap.put(100, "GMTP");
-    protocolIdToNameMap.put(101, "IFMP");
-    protocolIdToNameMap.put(102, "PNNI");
-    protocolIdToNameMap.put(103, "PIM");
-    protocolIdToNameMap.put(104, "ARIS");
-    protocolIdToNameMap.put(105, "SCPS");
-    protocolIdToNameMap.put(106, "QNX");
-    protocolIdToNameMap.put(107, "A/N");
-    protocolIdToNameMap.put(108, "IPCOMP");
-    protocolIdToNameMap.put(109, "SNP");
-    protocolIdToNameMap.put(110, "COMPAQ-PEER");
-    protocolIdToNameMap.put(111, "IPX-IN-IP");
-    protocolIdToNameMap.put(112, "VRRP");
-    protocolIdToNameMap.put(113, "PGM");
-    protocolIdToNameMap.put(115, "L2TP");
-    protocolIdToNameMap.put(116, "DDX");
-    protocolIdToNameMap.put(117, "IATP");
-    protocolIdToNameMap.put(118, "STP");
-    protocolIdToNameMap.put(119, "SRP");
-    protocolIdToNameMap.put(120, "UTI");
-    protocolIdToNameMap.put(121, "SMP");
-    protocolIdToNameMap.put(122, "SM");
-    protocolIdToNameMap.put(123, "PTP");
-    protocolIdToNameMap.put(124, "ISIS OVER IPV4");
-    protocolIdToNameMap.put(125, "FIRE");
-    protocolIdToNameMap.put(126, "CRTP");
-    protocolIdToNameMap.put(127, "CRUDP");
-    protocolIdToNameMap.put(128, "SSCOPMCE");
-    protocolIdToNameMap.put(129, "IPLT");
-    protocolIdToNameMap.put(130, "SPS");
-    protocolIdToNameMap.put(131, "PIPE");
-    protocolIdToNameMap.put(132, "SCTP");
-    protocolIdToNameMap.put(133, "FC");
-    protocolIdToNameMap.put(134, "RSVP-E2E-IGNORE");
-    protocolIdToNameMap.put(135, "MOBILITY HEADER");
-    protocolIdToNameMap.put(136, "UDPLITE");
-    protocolIdToNameMap.put(137, "MPLS-IN-IP");
-    protocolIdToNameMap.put(138, "MANET");
-    protocolIdToNameMap.put(139, "HIP");
-    protocolIdToNameMap.put(140, "SHIM6");
-    protocolIdToNameMap.put(141, "WESP");
-    protocolIdToNameMap.put(142, "ROHC");
-  }
-
-  /** The Constant protocolNameToIdMap. */
-  private static final BiMap<String, Integer> protocolNameToIdMap = protocolIdToNameMap
-      .inverse();
-
-  // private static final Map<String, Integer> protocolNameToIdMap =
-  // invertMap(protocolIdToNameMap);
-
-  /**
-   * Convert ipv4 ip to hex.
-   * 
-   * @param ipAddress
-   *          the ip address
-   * @return the string
-   */
-  public static String convertIpv4IpToHex(String ipAddress) {
-    StringBuffer hexIp = new StringBuffer(64);
-    String[] ipSegments = ipAddress.split("\\.");
-
-    for (String ipSegment : ipSegments) {
-      hexIp.append(convertIpSegmentToHex(ipSegment));
-    }
-
-    return hexIp.toString();
-
-  }
-
-  public static String convertHexToIpv4Ip(String hex) {
-    List<Integer> ipSegments = new ArrayList<>();
-    for(int i = 0; i < hex.length(); i += 2) {
-      String segment = hex.substring(i, i + 2);
-      ipSegments.add(Integer.parseInt(segment, 16));
-    }
-    return Joiner.on(".").join(ipSegments);
-  }
-
-  /**
-   * Gets the session key.
-   * 
-   * @param srcIp
-   *          the src ip
-   * @param dstIp
-   *          the dst ip
-   * @param protocol
-   *          the protocol
-   * @param srcPort
-   *          the src port
-   * @param dstPort
-   *          the dst port
-   * @return the session key
-   */
-  public static String getSessionKey(String srcIp, String dstIp,
-      String protocol, String srcPort, String dstPort) {
-    return getSessionKey(srcIp, dstIp, protocol, srcPort, dstPort, null, null);
-  }
-
-  /**
-   * Gets the session key.
-   * 
-   * @param srcIp
-   *          the src ip
-   * @param dstIp
-   *          the dst ip
-   * @param protocol
-   *          the protocol
-   * @param srcPort
-   *          the src port
-   * @param dstPort
-   *          the dst port
-   * @param ipId
-   *          the ip id
-   * @param fragmentOffset
-   *          the fragment offset
-   * @return the session key
-   */
-  public static String getSessionKey(String srcIp, String dstIp,
-      String protocol, String srcPort, String dstPort, String ipId,
-      String fragmentOffset) {
-
-    StringBuffer sb = new StringBuffer(40);
-    sb.append(convertIpv4IpToHex(srcIp)).append(SESSION_KEY_SEPERATOR)
-        .append(convertIpv4IpToHex(dstIp)).append(SESSION_KEY_SEPERATOR)
-        .append(protocol == null ? "0" : protocol)
-        .append(SESSION_KEY_SEPERATOR).append(srcPort == null ? "0" : srcPort)
-        .append(SESSION_KEY_SEPERATOR).append(dstPort == null ? "0" : dstPort)
-        .append(SESSION_KEY_SEPERATOR).append(ipId == null ? "0" : ipId)
-        .append(SESSION_KEY_SEPERATOR)
-        .append(fragmentOffset == null ? "0" : fragmentOffset);
-
-    return sb.toString();
-  }
-
-  public static String getSessionKey(JSONObject message) {
-    String srcIp = (String) message.get("ip_src_addr");
-    String dstIp = (String) message.get("ip_dst_addr");
-    Long protocol = (Long) message.get("ip_protocol");
-    Long srcPort = (Long) message.get("ip_src_port");
-    Long dstPort = (Long) message.get("ip_dst_port");
-    Long ipId = (Long) message.get("ip_id");
-    String ipIdString = ipId == null ? null : ipId.toString();
-    Long fragmentOffset = (Long) message.get("frag_offset");
-    String fragmentOffsetString = fragmentOffset == null ? null : fragmentOffset.toString();
-    return PcapUtils.getSessionKey(srcIp, dstIp, protocol.toString(), srcPort.toString(), dstPort.toString(), ipIdString, fragmentOffsetString);
-  }
-
-  public static String getPartialSessionKey(String srcIp, String dstIp,
-                                            String protocol, String srcPort, String dstPort) {
-    StringBuffer sb = new StringBuffer(40);
-    sb.append(convertIpv4IpToHex(srcIp)).append(SESSION_KEY_SEPERATOR)
-            .append(convertIpv4IpToHex(dstIp)).append(SESSION_KEY_SEPERATOR)
-            .append(protocol == null ? "0" : protocol)
-            .append(SESSION_KEY_SEPERATOR).append(srcPort == null ? "0" : srcPort)
-            .append(SESSION_KEY_SEPERATOR).append(dstPort == null ? "0" : dstPort);
-    return sb.toString();
-  }
-
-  /**
-   * Gets the session key.
-   * 
-   * @param srcIp
-   *          the src ip
-   * @param dstIp
-   *          the dst ip
-   * @param protocol
-   *          the protocol
-   * @param srcPort
-   *          the src port
-   * @param dstPort
-   *          the dst port
-   * @param ipId
-   *          the ip id
-   * @param fragmentOffset
-   *          the fragment offset
-   * @return the session key
-   */
-  public static String getSessionKey(String srcIp, String dstIp, int protocol,
-      int srcPort, int dstPort, int ipId, int fragmentOffset) {
-    String keySeperator = "-";
-    StringBuffer sb = new StringBuffer(40);
-    sb.append(convertIpv4IpToHex(srcIp)).append(keySeperator)
-        .append(convertIpv4IpToHex(dstIp)).append(keySeperator)
-        .append(protocol).append(keySeperator).append(srcPort)
-        .append(keySeperator).append(dstPort).append(keySeperator).append(ipId)
-        .append(keySeperator).append(fragmentOffset);
-
-    return sb.toString();
-  }
-
-  /**
-   * Gets the short session key. (5-tuple only)
-   * 
-   * @param srcIp
-   *          the src ip
-   * @param dstIp
-   *          the dst ip
-   * @param protocol
-   *          the protocol
-   * @param srcPort
-   *          the src port
-   * @param dstPort
-   *          the dst port
-   * @return the session key
-   */
-  public static String getShortSessionKey(String srcIp, String dstIp, int protocol,
-      int srcPort, int dstPort) {
-    String keySeperator = "-";
-    StringBuffer sb = new StringBuffer(40);
-    sb.append(convertIpv4IpToHex(srcIp)).append(keySeperator)
-        .append(convertIpv4IpToHex(dstIp)).append(keySeperator)
-        .append(protocol).append(keySeperator).append(srcPort)
-        .append(keySeperator).append(dstPort);
-
-    return sb.toString();
-  }
-  
-  // public static String convertPortToHex(String portNumber) {
-  // return convertPortToHex(Integer.valueOf(portNumber));
-  //
-  // }
-  //
-  // public static String convertPortToHex(int portNumber) {
-  // return convertToHex(portNumber, 4);
-  //
-  // }
-  //
-  // public static String convertProtocolToHex(String protocol) {
-  // return convertProtocolToHex(Integer.valueOf(protocol));
-  //
-  // }
-  //
-  // public static String convertProtocolToHex(int protocol) {
-  // return convertToHex(protocol, 2);
-  // }
-
-  /**
-   * Convert ip segment to hex.
-   * 
-   * @param ipSegment
-   *          the ip segment
-   * @return the string
-   */
-  public static String convertIpSegmentToHex(String ipSegment) {
-    return convertIpSegmentToHex(Integer.valueOf(ipSegment));
-
-  }
-
-  /**
-   * Convert ip segment to hex.
-   * 
-   * @param ipSegment
-   *          the ip segment
-   * @return the string
-   */
-  public static String convertIpSegmentToHex(int ipSegment) {
-    return convertToHex(ipSegment, 2);
-
-  }
-
-  /**
-   * Convert to hex.
-   * 
-   * @param number
-   *          the number
-   * @param length
-   *          the length
-   * @return the string
-   */
-  public static String convertToHex(int number, int length) {
-    return StringUtils.leftPad(Integer.toHexString(number), length, '0');
-
-  }
-
-  /**
-   * Gets the protocol name.
-   * 
-   * @param protocolNumber
-   *          the protocol number
-   * 
-   * @return the protocol name
-   */
-  public static String getProtocolNameFromId(int protocolNumber) {
-    String protocolName = protocolIdToNameMap.get(protocolNumber);
-
-    if (protocolName == null) {
-      protocolName = String.valueOf(protocolNumber);
-    }
-    return protocolName;
-  }
-
-  /**
-   * Gets the protocol id from name.
-   * 
-   * @param protocolName
-   *          the protocol name
-   * @return the protocol id from name
-   */
-  public static int getProtocolIdFromName(String protocolName) {
-    Integer protocolNumber = protocolNameToIdMap
-        .get(protocolName.toUpperCase());
-
-    if (protocolNumber == null) {
-      protocolNumber = -1;
-    }
-    return protocolNumber;
-  }
-
-  /**
-   * Invert map.
-   * 
-   * @param <V>
-   *          the value type
-   * @param <K>
-   *          the key type
-   * @param map
-   *          the map
-   * @return the map
-   */
-  private static <V, K> Map<V, K> invertMap(Map<K, V> map) {
-
-    Map<V, K> inv = new HashMap<V, K>();
-
-    for (Entry<K, V> entry : map.entrySet())
-      inv.put(entry.getValue(), entry.getKey());
-
-    return inv;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/Lookup.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/Lookup.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/Lookup.java
deleted file mode 100644
index bf871d1..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/Lookup.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.reference.lookup;
-
-import org.apache.metron.reference.lookup.accesstracker.AccessTracker;
-import org.apache.metron.reference.lookup.handler.Handler;
-
-import java.io.IOException;
-
-public class Lookup<CONTEXT_T, KEY_T extends LookupKey, RESULT_T> implements Handler<CONTEXT_T, KEY_T, RESULT_T> {
-  private String name;
-  private AccessTracker accessTracker;
-  private Handler<CONTEXT_T, KEY_T, RESULT_T> lookupHandler;
-
-  public String getName() {
-    return name;
-  }
-
-  public void setName(String name) {
-    this.name = name;
-  }
-
-  public AccessTracker getAccessTracker() {
-    return accessTracker;
-  }
-
-  public void setAccessTracker(AccessTracker accessTracker) {
-    this.accessTracker = accessTracker;
-  }
-
-  public Handler< CONTEXT_T, KEY_T, RESULT_T > getLookupHandler() {
-    return lookupHandler;
-  }
-
-  public void setLookupHandler(Handler< CONTEXT_T, KEY_T, RESULT_T > lookupHandler) {
-    this.lookupHandler = lookupHandler;
-  }
-
-  @Override
-  public boolean exists(KEY_T key, CONTEXT_T context, boolean logAccess) throws IOException {
-    if(logAccess) {
-      accessTracker.logAccess(key);
-    }
-    return lookupHandler.exists(key, context, logAccess);
-  }
-
-  @Override
-  public RESULT_T get(KEY_T key, CONTEXT_T context, boolean logAccess) throws IOException {
-    if(logAccess) {
-      accessTracker.logAccess(key);
-    }
-    return lookupHandler.get(key, context, logAccess);
-  }
-
-  @Override
-  public Iterable<Boolean> exists(Iterable<KEY_T> key, CONTEXT_T context, boolean logAccess) throws IOException {
-    if(logAccess) {
-      for (KEY_T k : key) {
-        accessTracker.logAccess(k);
-      }
-    }
-    return lookupHandler.exists(key, context, logAccess);
-  }
-
-
-  @Override
-  public Iterable<RESULT_T> get(Iterable<KEY_T> key, CONTEXT_T context, boolean logAccess) throws IOException {
-    if(logAccess) {
-      for (KEY_T k : key) {
-        accessTracker.logAccess(k);
-      }
-    }
-    return lookupHandler.get(key, context, logAccess);
-  }
-
-  @Override
-  public void close() throws Exception {
-    accessTracker.cleanup();
-    lookupHandler.close();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/LookupKV.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/LookupKV.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/LookupKV.java
deleted file mode 100644
index eb2b552..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/LookupKV.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.reference.lookup;
-
-import java.io.Serializable;
-
-public class LookupKV<KEY_T extends LookupKey, VALUE_T extends LookupValue> implements Serializable {
-    private KEY_T key;
-    private VALUE_T value;
-    public LookupKV(KEY_T key, VALUE_T value) {
-        this.key = key;
-        this.value = value;
-    }
-
-    public KEY_T getKey() {
-        return key;
-    }
-
-    public VALUE_T getValue() {
-        return value;
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
-
-        LookupKV<?, ?> lookupKV = (LookupKV<?, ?>) o;
-
-        if (key != null ? !key.equals(lookupKV.key) : lookupKV.key != null) return false;
-        return value != null ? value.equals(lookupKV.value) : lookupKV.value == null;
-
-    }
-
-    @Override
-    public int hashCode() {
-        int result = key != null ? key.hashCode() : 0;
-        result = 31 * result + (value != null ? value.hashCode() : 0);
-        return result;
-    }
-
-    @Override
-    public String toString() {
-        return "LookupKV{" +
-                "key=" + key +
-                ", value=" + value +
-                '}';
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/LookupKey.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/LookupKey.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/LookupKey.java
deleted file mode 100644
index c51ed9f..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/LookupKey.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.reference.lookup;
-
-public interface LookupKey {
-    byte[] toBytes();
-    void fromBytes(byte[] in);
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/LookupValue.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/LookupValue.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/LookupValue.java
deleted file mode 100644
index 448f4c9..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/LookupValue.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.metron.reference.lookup;
-
-import java.util.Map;
-import java.util.NavigableMap;
-
-public interface LookupValue {
-    Iterable<Map.Entry<byte[], byte[]>> toColumns();
-    void fromColumns(Iterable<Map.Entry<byte[], byte[]>> values);
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/AccessTracker.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/AccessTracker.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/AccessTracker.java
deleted file mode 100644
index 960c1e0..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/AccessTracker.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.reference.lookup.accesstracker;
-
-import org.apache.metron.reference.lookup.LookupKey;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.Map;
-
-public interface AccessTracker extends Serializable{
-    void logAccess(LookupKey key);
-    void configure(Map<String, Object> config);
-    boolean hasSeen(LookupKey key);
-    String getName();
-    AccessTracker union(AccessTracker tracker);
-    void reset();
-    boolean isFull();
-    void cleanup() throws IOException;
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/AccessTrackerUtil.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/AccessTrackerUtil.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/AccessTrackerUtil.java
deleted file mode 100644
index 511275d..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/AccessTrackerUtil.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.reference.lookup.accesstracker;
-
-import com.google.common.base.Function;
-import com.google.common.collect.Iterables;
-import org.apache.hadoop.hbase.client.*;
-import org.apache.hadoop.hbase.util.Bytes;
-
-import javax.annotation.Nullable;
-import java.io.*;
-
-public enum AccessTrackerUtil {
-    INSTANCE;
-
-    public static byte[] COLUMN = Bytes.toBytes("v");
-
-    public AccessTracker deserializeTracker(byte[] bytes) throws IOException, ClassNotFoundException {
-        ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(bytes));
-        return (AccessTracker) ois.readObject();
-    }
-    public byte[] serializeTracker(AccessTracker tracker) throws IOException {
-        ByteArrayOutputStream bos = new ByteArrayOutputStream();
-        ObjectOutputStream oos = new ObjectOutputStream(bos);
-        oos.writeObject(tracker);
-        oos.flush();
-        oos.close();
-        return bos.toByteArray();
-    }
-
-
-    public void persistTracker(HTableInterface accessTrackerTable, String columnFamily, PersistentAccessTracker.AccessTrackerKey key, AccessTracker underlyingTracker) throws IOException {
-        Put put = new Put(key.toRowKey());
-        put.add(Bytes.toBytes(columnFamily), COLUMN, serializeTracker(underlyingTracker));
-        accessTrackerTable.put(put);
-    }
-
-    public Iterable<AccessTracker> loadAll(HTableInterface accessTrackerTable, final String columnFamily, final String name, final long earliest) throws IOException {
-        Scan scan = new Scan(PersistentAccessTracker.AccessTrackerKey.getTimestampScanKey(name, earliest));
-        ResultScanner scanner = accessTrackerTable.getScanner(scan);
-        return Iterables.transform(scanner, new Function<Result, AccessTracker>() {
-
-            @Nullable
-            @Override
-            public AccessTracker apply(@Nullable Result result) {
-                try {
-                    return deserializeTracker(result.getValue(Bytes.toBytes(columnFamily), COLUMN));
-                } catch (Exception e) {
-                    throw new RuntimeException("Unable to deserialize " + name + " @ " + earliest);
-                }
-            }
-        });
-    }
-
-
-    public AccessTracker loadAll(Iterable<AccessTracker> trackers) throws IOException, ClassNotFoundException {
-        AccessTracker tracker = null;
-        for(AccessTracker t : trackers) {
-            if(tracker == null) {
-                tracker = t;
-            }
-            else {
-                tracker = tracker.union(t);
-            }
-        }
-        return tracker;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/BloomAccessTracker.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/BloomAccessTracker.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/BloomAccessTracker.java
deleted file mode 100644
index 2bb8414..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/BloomAccessTracker.java
+++ /dev/null
@@ -1,147 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.reference.lookup.accesstracker;
-
-import com.google.common.hash.BloomFilter;
-import com.google.common.hash.Funnel;
-import com.google.common.hash.PrimitiveSink;
-import org.apache.metron.reference.lookup.LookupKey;
-
-import java.io.*;
-import java.util.HashMap;
-import java.util.Map;
-
-public class BloomAccessTracker implements AccessTracker {
-    private static final long serialVersionUID = 1L;
-    public static final String EXPECTED_INSERTIONS_KEY = "expected_insertions";
-    public static final String FALSE_POSITIVE_RATE_KEY = "false_positive_rate";
-    public static final String NAME_KEY = "name";
-
-    private static class LookupKeyFunnel implements Funnel<LookupKey> {
-        @Override
-        public void funnel(LookupKey lookupKey, PrimitiveSink primitiveSink) {
-            primitiveSink.putBytes(lookupKey.toBytes());
-        }
-
-
-        @Override
-        public boolean equals(Object obj) {
-            return this.getClass().equals(obj.getClass());
-        }
-
-    }
-
-    private static Funnel<LookupKey> LOOKUPKEY_FUNNEL = new LookupKeyFunnel();
-
-    BloomFilter<LookupKey> filter;
-    String name;
-    int expectedInsertions;
-    double falsePositiveRate;
-    int numInsertions = 0;
-
-    public BloomAccessTracker(String name, int expectedInsertions, double falsePositiveRate) {
-        this.name = name;
-        this.expectedInsertions = expectedInsertions;
-        this.falsePositiveRate = falsePositiveRate;
-        filter = BloomFilter.create(LOOKUPKEY_FUNNEL, expectedInsertions, falsePositiveRate);
-    }
-    public BloomAccessTracker() {}
-    public BloomAccessTracker(Map<String, Object> config) {
-        configure(config);
-    }
-
-    protected BloomFilter<LookupKey> getFilter() {
-        return filter;
-    }
-    @Override
-    public void logAccess(LookupKey key) {
-        numInsertions++;
-        filter.put(key);
-    }
-
-    @Override
-    public void configure(Map<String, Object> config) {
-        expectedInsertions = toInt(config.get(EXPECTED_INSERTIONS_KEY));
-        falsePositiveRate = toDouble(config.get(FALSE_POSITIVE_RATE_KEY));
-        name = config.get(NAME_KEY).toString();
-        filter = BloomFilter.create(LOOKUPKEY_FUNNEL, expectedInsertions, falsePositiveRate);
-    }
-
-    @Override
-    public boolean hasSeen(LookupKey key) {
-        return filter.mightContain(key);
-    }
-
-    @Override
-    public void reset() {
-        filter = BloomFilter.create(LOOKUPKEY_FUNNEL, expectedInsertions, falsePositiveRate);
-    }
-
-    private static double toDouble(Object o) {
-        if(o instanceof String) {
-            return Double.parseDouble((String)o);
-        }
-        else if(o instanceof Number) {
-            return ((Number) o).doubleValue();
-        }
-        else {
-            throw new IllegalStateException("Unable to convert " + o + " to a double.");
-        }
-    }
-    private static int toInt(Object o) {
-        if(o instanceof String) {
-            return Integer.parseInt((String)o);
-        }
-        else if(o instanceof Number) {
-            return ((Number) o).intValue();
-        }
-        else {
-            throw new IllegalStateException("Unable to convert " + o + " to a double.");
-        }
-    }
-
-    @Override
-    public String getName() {
-        return name;
-    }
-
-
-    @Override
-    public AccessTracker union(AccessTracker tracker) {
-        if(filter == null) {
-            throw new IllegalStateException("Unable to union access tracker, because this tracker is not initialized.");
-        }
-        if(tracker instanceof BloomAccessTracker ) {
-            filter.putAll(((BloomAccessTracker)tracker).getFilter());
-            return this;
-        }
-        else {
-            throw new IllegalStateException("Unable to union access tracker, because it's not of the right type (BloomAccessTracker)");
-        }
-    }
-
-    @Override
-    public boolean isFull() {
-        return numInsertions >= expectedInsertions;
-    }
-
-    @Override
-    public void cleanup() throws IOException {
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/NoopAccessTracker.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/NoopAccessTracker.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/NoopAccessTracker.java
deleted file mode 100644
index fb6d3ba..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/NoopAccessTracker.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.reference.lookup.accesstracker;
-
-import org.apache.metron.reference.lookup.LookupKey;
-
-import java.io.IOException;
-import java.util.Map;
-
-public class NoopAccessTracker implements AccessTracker {
-  @Override
-  public void logAccess(LookupKey key) {
-
-  }
-
-  @Override
-  public void configure(Map<String, Object> config) {
-
-  }
-
-  @Override
-  public boolean hasSeen(LookupKey key) {
-    return false;
-  }
-
-  @Override
-  public String getName() {
-    return "noop";
-  }
-
-  @Override
-  public AccessTracker union(AccessTracker tracker) {
-    return null;
-  }
-
-  @Override
-  public void reset() {
-
-  }
-
-  @Override
-  public boolean isFull() {
-    return false;
-  }
-
-  @Override
-  public void cleanup() throws IOException {
-
-  }
-}


[08/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/spouts/GenericInternalTestSpout.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/spouts/GenericInternalTestSpout.java b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/spouts/GenericInternalTestSpout.java
new file mode 100644
index 0000000..a36d99d
--- /dev/null
+++ b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/spouts/GenericInternalTestSpout.java
@@ -0,0 +1,139 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.test.spouts;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import com.google.common.base.Splitter;
+import com.google.common.collect.Iterables;
+import org.apache.metron.test.converters.BinaryConverters;
+import org.apache.metron.test.converters.IConverter;
+import org.apache.metron.test.filereaders.FileReader;
+
+import backtype.storm.spout.SpoutOutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.topology.base.BaseRichSpout;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Values;
+import backtype.storm.utils.Utils;
+
+
+public class GenericInternalTestSpout extends BaseRichSpout {
+
+	
+	/**
+	 * 
+	 */
+	private static final long serialVersionUID = -2379344923143372543L;
+
+	List<String> jsons;
+	
+	private String _filename;
+	private int _delay = 100;
+	private boolean _repeating = true;
+	
+	private SpoutOutputCollector _collector;
+	private IConverter _converter;
+	private FileReader Reader;
+	private int cnt = 0;
+	
+	public GenericInternalTestSpout withFilename(String filename)
+	{
+		if(filename != null && filename.length() > 0 && filename.charAt(0) == '$') {
+			filename = Iterables.getLast(Splitter.on("}").split(filename));
+		}
+		_filename = filename;
+		return this;
+	}
+	public GenericInternalTestSpout withMillisecondDelay(Integer delay)
+	{
+		_delay = delay;
+		return this;
+	}
+	
+	public GenericInternalTestSpout withRepeating(Boolean repeating)
+	{
+		_repeating = repeating;
+		return this;
+	}
+
+	public GenericInternalTestSpout withBinaryConverter(String converter) {
+		if(converter == null) {
+			_converter = BinaryConverters.DEFAULT;
+		}
+		else {
+			_converter = BinaryConverters.valueOf(converter);
+		}
+		return this;
+	}
+
+
+	@SuppressWarnings("rawtypes") 
+	public void open(Map conf, TopologyContext context,
+			SpoutOutputCollector collector) {
+		
+		_collector = collector;
+		try {
+			Reader =  new FileReader();
+			jsons = Reader.readFromFile(_filename);
+
+		} catch (Throwable e)
+		{
+			System.out.println("Could not read sample JSONs");
+			e.printStackTrace();
+		}
+		
+	}
+
+	public void nextTuple() {
+		Utils.sleep(_delay);
+		
+		if(cnt < jsons.size())
+		{
+			byte[] value;
+			if (_converter != null) {
+			  value = _converter.convert(jsons.get(cnt));
+			} else {
+				value = jsons.get(cnt).getBytes();
+			}
+			_collector.emit(new Values(value));
+		}
+		cnt ++;
+		
+		if(_repeating && cnt == jsons.size() -1 )
+			cnt = 0;
+	}
+
+	@Override
+	public void ack(Object id) {
+	}
+
+	@Override
+	public void fail(Object id) {
+	}
+
+	public void declareOutputFields(OutputFieldsDeclarer declarer) {
+		declarer.declare(new Fields("message"));
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/utils/KafkaLoader.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/utils/KafkaLoader.java b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/utils/KafkaLoader.java
new file mode 100644
index 0000000..294f8fd
--- /dev/null
+++ b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/utils/KafkaLoader.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.test.utils;
+
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.ProducerRecord;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.util.HashMap;
+import java.util.Map;
+
+public class KafkaLoader {
+
+  private String brokerUrl;
+  private String topic;
+  private String samplePath;
+  private int delay = 1000;
+  private int iterations = -1;
+  private KafkaProducer kafkaProducer;
+
+  public KafkaLoader(String brokerUrl, String topic, String samplePath) {
+    this.brokerUrl = brokerUrl;
+    this.topic = topic;
+    this.samplePath = samplePath;
+  }
+
+  public KafkaLoader withDelay(int delay) {
+    this.delay = delay;
+    return this;
+  }
+
+  public KafkaLoader withIterations(int iterations) {
+    this.iterations = iterations;
+    return this;
+  }
+
+  public void start() {
+    Map<String, Object> producerConfig = new HashMap<>();
+    producerConfig.put("bootstrap.servers", brokerUrl);
+    producerConfig.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
+    producerConfig.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
+    kafkaProducer = new KafkaProducer<>(producerConfig);
+    try {
+      while (iterations == -1 || iterations-- > 0) {
+        BufferedReader reader = new BufferedReader(new FileReader(samplePath));
+        String line;
+        while((line = reader.readLine()) != null) {
+          kafkaProducer.send(new ProducerRecord<String, String>(topic, line));
+          Thread.sleep(delay);
+        }
+        reader.close();
+      }
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+  }
+
+  public void stop() {
+    kafkaProducer.close();
+  }
+
+
+  public static void main(String[] args) {
+    KafkaLoader kafkaLoader = new KafkaLoader(args[0], args[1], args[2]);
+    if (args.length > 3) kafkaLoader.withDelay(Integer.parseInt(args[3]));
+    if (args.length > 4) kafkaLoader.withIterations(Integer.parseInt(args[4]));
+    kafkaLoader.start();
+    kafkaLoader.stop();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/utils/UnitTestHelper.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/utils/UnitTestHelper.java b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/utils/UnitTestHelper.java
new file mode 100644
index 0000000..aff48aa
--- /dev/null
+++ b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/utils/UnitTestHelper.java
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.test.utils;
+
+import org.apache.log4j.ConsoleAppender;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.apache.log4j.PatternLayout;
+import org.junit.Assert;
+
+import java.io.File;
+import java.util.Set;
+import java.util.Stack;
+
+public class UnitTestHelper {
+    public static String findDir(String name) {
+        return findDir(new File("."), name);
+    }
+
+    public static String findDir(File startDir, String name) {
+        Stack<File> s = new Stack<File>();
+        s.push(startDir);
+        while(!s.empty()) {
+            File parent = s.pop();
+            if(parent.getName().equalsIgnoreCase(name)) {
+                return parent.getAbsolutePath();
+            }
+            else {
+                File[] children = parent.listFiles();
+                if(children != null) {
+                    for (File child : children) {
+                        s.push(child);
+                    }
+                }
+            }
+        }
+        return null;
+    }
+
+    public static <T> void assertSetEqual(String type, Set<T> expectedPcapIds, Set<T> found) {
+        boolean mismatch = false;
+        for(T f : found) {
+            if(!expectedPcapIds.contains(f)) {
+                mismatch = true;
+                System.out.println("Found " + type + " that I did not expect: " + f);
+            }
+        }
+        for(T expectedId : expectedPcapIds) {
+            if(!found.contains(expectedId)) {
+                mismatch = true;
+                System.out.println("Expected " + type + " that I did not index: " + expectedId);
+            }
+        }
+        Assert.assertFalse(mismatch);
+    }
+
+    public static void verboseLogging() {
+        verboseLogging("%d [%p|%c|%C{1}] %m%n", Level.ALL);
+    }
+    public static void verboseLogging(String pattern, Level level) {
+        ConsoleAppender console = new ConsoleAppender(); //create appender
+        //configure the appender
+        console.setLayout(new PatternLayout(pattern));
+        console.setThreshold(level);
+        console.activateOptions();
+        //add appender to any Logger (here is root)
+        Logger.getRootLogger().addAppender(console);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/pom.xml b/metron-platform/pom.xml
new file mode 100644
index 0000000..12ef866
--- /dev/null
+++ b/metron-platform/pom.xml
@@ -0,0 +1,265 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software
+	Foundation (ASF) under one or more contributor license agreements. See the
+	NOTICE file distributed with this work for additional information regarding
+	copyright ownership. The ASF licenses this file to You under the Apache License,
+	Version 2.0 (the "License"); you may not use this file except in compliance
+	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+	Unless required by applicable law or agreed to in writing, software distributed
+	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
+	OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+  the specific language governing permissions and limitations under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+		 xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<groupId>org.apache.metron</groupId>
+	<artifactId>metron-platform</artifactId>
+	<version>0.1BETA</version>
+	<packaging>pom</packaging>
+	<name>metron-platform</name>
+	<description>Stream analytics for Metron</description>
+	<url>https://metron.incubator.apache.org/</url>
+	<scm>
+		<connection>scm:git:https://git-wip-us.apache.org/repos/asf/incubator-metron.git</connection>
+		<developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/incubator-metron.git</developerConnection>
+		<tag>HEAD</tag>
+		<url>https://git-wip-us.apache.org/repos/asf/incubator-metron</url>
+	</scm>
+	<properties>
+		<twitter>@ApacheMetron</twitter>
+		<global_opencsv_version>3.7</global_opencsv_version>
+		<global_storm_version>0.10.0</global_storm_version>
+		<global_flux_version>0.10.0</global_flux_version>
+		<global_pcap_version>1.7.1</global_pcap_version>
+		<global_kafka_version>0.8.2.2</global_kafka_version>
+		<global_hadoop_version>2.7.1</global_hadoop_version>
+		<global_hbase_version>1.1.1</global_hbase_version>
+		<global_flume_version>1.5.2</global_flume_version>
+		<global_elasticsearch_version>1.7.4</global_elasticsearch_version>
+		<global_json_simple_version>1.1.1</global_json_simple_version>
+		<global_metrics_version>3.0.2</global_metrics_version>
+		<global_junit_version>4.4</global_junit_version>
+		<global_guava_version>17.0</global_guava_version>
+		<global_hbase_guava_version>12.0</global_hbase_guava_version>
+		<global_json_schema_validator_version>2.2.5</global_json_schema_validator_version>
+		<global_slf4j_version>1.7.7</global_slf4j_version>
+		<global_opencsv_version>3.7</global_opencsv_version>
+		<global_solr_version>5.2.1</global_solr_version>
+		<global_mockito_version>1.10.19</global_mockito_version>
+	</properties>
+	<licenses>
+		<license>
+			<name>The Apache Software License, Version 2.0</name>
+			<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+			<distribution>repo</distribution>
+		</license>
+	</licenses>
+	<modules>
+		<module>metron-common</module>
+		<module>metron-enrichment</module>
+		<module>metron-elasticsearch</module>
+		<module>metron-solr</module>
+		<module>metron-parsers</module>
+		<module>metron-data-management</module>
+		<module>metron-pcap</module>
+		<module>metron-integration-test</module>
+		<module>metron-test-utilities</module>
+		<module>metron-api</module>
+		<module>metron-hbase</module>
+	</modules>
+	<dependencyManagement>
+		<dependencies>
+			<dependency>
+				<groupId>org.mockito</groupId>
+				<artifactId>mockito-core</artifactId>
+				<version>${global_mockito_version}</version>
+			</dependency>
+		</dependencies>
+	</dependencyManagement>
+	<dependencies>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>4.12</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.adrianwalker</groupId>
+			<artifactId>multiline-string</artifactId>
+			<version>0.1.2</version>
+			<scope>test</scope>
+		</dependency>
+	</dependencies>
+	<build>
+		<pluginManagement>
+			<plugins>
+				<plugin>
+					<!-- Separates the unit tests from the integration tests. -->
+					<groupId>org.apache.maven.plugins</groupId>
+					<artifactId>maven-surefire-plugin</artifactId>
+					<version>2.18</version>
+					<configuration>
+						<!-- Skip the default running of this plug-in (or everything is run twice...see below) -->
+						<argLine>-Xmx2048m -XX:MaxPermSize=256m</argLine>
+						<skip>true</skip>
+						<!-- Show 100% of the lines from the stack trace (doesn't work) -->
+						<trimStackTrace>false</trimStackTrace>
+					</configuration>
+					<executions>
+						<execution>
+							<id>unit-tests</id>
+							<phase>test</phase>
+							<goals>
+								<goal>test</goal>
+							</goals>
+							<configuration>
+								<!-- Never skip running the tests when the test phase is invoked -->
+								<skip>false</skip>
+								<includes>
+									<!-- Include unit tests within integration-test phase. -->
+									<include>**/*Test.java</include>
+								</includes>
+								<excludes>
+									<!-- Exclude integration tests within (unit) test phase. -->
+									<exclude>**/*IntegrationTest.java</exclude>
+								</excludes>
+							</configuration>
+						</execution>
+						<execution>
+							<id>integration-tests</id>
+							<phase>integration-test</phase>
+							<goals>
+								<goal>test</goal>
+							</goals>
+							<configuration>
+								<!-- Never skip running the tests when the integration-test phase is invoked -->
+								<skip>false</skip>
+								<includes>
+									<!-- Include integration tests within integration-test phase. -->
+									<include>**/*IntegrationTest.java</include>
+								</includes>
+							</configuration>
+						</execution>
+					</executions>
+				</plugin>
+			</plugins>
+		</pluginManagement>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-checkstyle-plugin</artifactId>
+				<version>2.17</version>
+				<executions>
+					<execution>
+						<id>checkstyle</id>
+						<phase>package</phase>
+						<goals>
+							<goal>check</goal>
+						</goals>
+					</execution>
+				</executions>
+				<configuration>
+					<configLocation>style/checkstyle.xml</configLocation>
+					<headerLocation>style/LICENSE</headerLocation>
+					<failOnViolation>true</failOnViolation>
+					<includeTestSourceDirectory>true</includeTestSourceDirectory>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>3.5.1</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.codehaus.mojo</groupId>
+				<artifactId>cobertura-maven-plugin</artifactId>
+				<version>2.7</version>
+				<configuration>
+					<check />
+					<formats>
+						<format>html</format>
+					</formats>
+					<aggregate>true</aggregate>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.rat</groupId>
+				<artifactId>apache-rat-plugin</artifactId>
+				<version>0.11</version>
+				<configuration>
+					<excludes>
+						<exclude>**/README.md</exclude>
+						<exclude>**/*.json</exclude>
+						<exclude>**/*.log</exclude>
+						<exclude>**/src/main/resources/patterns/**</exclude>
+						<exclude>**/src/main/resources/sample/data/SampleIndexed/**</exclude>
+						<exclude>**/src/main/resources/sample/data/SampleInput/**</exclude>
+						<exclude>**/src/main/resources/sample/data/SampleParsed/**</exclude>
+						<exclude>**/dependency-reduced-pom.xml</exclude>
+					</excludes>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+	<reporting>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.18</version>
+				<configuration>
+					<argLine>-Xmx2048m -XX:MaxPermSize=256m</argLine>
+					<systemProperties>
+						<property>
+							<name>mode</name>
+							<value>local</value>
+						</property>
+					</systemProperties>
+				</configuration>
+			</plugin>
+			<!-- Normally, dependency report takes time, skip it -->
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-project-info-reports-plugin</artifactId>
+				<version>2.7</version>
+				<configuration>
+					<dependencyLocationsEnabled>false</dependencyLocationsEnabled>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-pmd-plugin</artifactId>
+				<version>3.3</version>
+				<configuration>
+					<targetJdk>1.7</targetJdk>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.codehaus.mojo</groupId>
+				<artifactId>emma-maven-plugin</artifactId>
+				<version>1.0-alpha-3</version>
+				<inherited>true</inherited>
+			</plugin>
+		</plugins>
+	</reporting>
+	<repositories>
+		<repository>
+			<id>clojars.org</id>
+			<url>http://clojars.org/repo</url>
+		</repository>
+		<repository>
+			<id>multiline-release-repo</id>
+			<url>https://raw.github.com/benelog/multiline/master/maven-repository</url>
+			<snapshots>
+				<enabled>false</enabled>
+			</snapshots>
+		</repository>
+    </repositories>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/style/LICENSE.config
----------------------------------------------------------------------
diff --git a/metron-platform/style/LICENSE.config b/metron-platform/style/LICENSE.config
new file mode 100644
index 0000000..826d578
--- /dev/null
+++ b/metron-platform/style/LICENSE.config
@@ -0,0 +1,16 @@
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/style/LICENSE.java
----------------------------------------------------------------------
diff --git a/metron-platform/style/LICENSE.java b/metron-platform/style/LICENSE.java
new file mode 100644
index 0000000..5d5f1e3
--- /dev/null
+++ b/metron-platform/style/LICENSE.java
@@ -0,0 +1,17 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/style/LICENSE.xml
----------------------------------------------------------------------
diff --git a/metron-platform/style/LICENSE.xml b/metron-platform/style/LICENSE.xml
new file mode 100644
index 0000000..05af984
--- /dev/null
+++ b/metron-platform/style/LICENSE.xml
@@ -0,0 +1,16 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one
+   or more contributor license agreements.  See the NOTICE file
+   distributed with this work for additional information
+   regarding copyright ownership.  The ASF licenses this file
+   to you under the Apache License, Version 2.0 (the
+   "License"); you may not use this file except in compliance
+   with the License.  You may obtain a copy of the License at
+     http://www.apache.org/licenses/LICENSE-2.0
+   Unless required by applicable law or agreed to in writing,
+   software distributed under the License is distributed on an
+   "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+   KIND, either express or implied.  See the License for the
+   specific language governing permissions and limitations
+   under the License.
+-->

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/style/checkstyle.xml
----------------------------------------------------------------------
diff --git a/metron-platform/style/checkstyle.xml b/metron-platform/style/checkstyle.xml
new file mode 100644
index 0000000..5f8b5c5
--- /dev/null
+++ b/metron-platform/style/checkstyle.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+  -->
+<!DOCTYPE module PUBLIC "-//Puppy Crawl//DTD Check Configuration 1.1//EN" "http://www.puppycrawl.com/dtds/configuration_1_1.dtd">
+
+<module name="Checker">
+  <property name="localeLanguage" value="en"/>
+
+  <!-- Verify that EVERY source file has the appropriate license -->
+  <module name="Header">
+    <property name="headerFile" value="style/LICENSE.java"/>
+    <property name="fileExtensions" value="java"/>
+  </module>
+  <module name="Header">
+    <property name="headerFile" value="style/LICENSE.config"/>
+    <property name="fileExtensions" value="properties, yaml"/>
+  </module>
+
+</module>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/README.md
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/README.md b/metron-streaming/Metron-Alerts/README.md
deleted file mode 100644
index 2b87d50..0000000
--- a/metron-streaming/Metron-Alerts/README.md
+++ /dev/null
@@ -1,104 +0,0 @@
-#Metron-Alerts
-
-##Module Description
-
-This module enables telemetry alerts.  It splits the mssage stream into two streams.  The original message is emitted on the "message" stream.  The corresponding alert is emitted on the "alerts" stream.  The two are tied together through the alerts UUID.  
-
-##Message Format
-
-Assuming the original message (with enrichments enabled) has the following format:
-
-```json
-{
-"message": 
-{"ip_src_addr": xxxx, 
-"ip_dst_addr": xxxx, 
-"ip_src_port": xxxx, 
-"ip_dst_port": xxxx, 
-"protocol": xxxx, 
-"timestamp": xxxx.
-"original_string": xxxx,
-"additional-field 1": xxxx,
-},
-"enrichment" : {"geo": xxxx, "whois": xxxx, "hosts": xxxxx, "CIF": "xxxxx"}
-
-}
-```
-
-The telemetry message will be tagged with a UUID alert tag like so:
-
-```json
-{
-"message": 
-{"ip_src_addr": xxxx, 
-"ip_dst_addr": xxxx, 
-"ip_src_port": xxxx, 
-"ip_dst_port": xxxx, 
-"protocol": xxxx, 
-"timestamp": xxxx,
-"original_string": xxxx,
-"additional-field 1": xxxx,
-},
-"enrichment" : {"geo": xxxx, "whois": xxxx, "hosts": xxxxx, "CIF": "xxxxx"},
-"alerts": [UUID1, UUID2, UUID3, etc]
-
-}
-```
-
-The alert will be fired on the "alerts" stream and can be customized to have any format as long as it includes the required mandatory fields.  The mandatory fields are:
-
-* timestamp (epoch): The time from the message that triggered the alert
-* description: A human friendly string representation of the alert
-* alert_id: The UUID generated for the alert. This uniquely identifies an alert
-
-There are other standard but not mandatory fields that can be leveraged by metron-ui and other alert consumers:
-
-* designated_host: The IP address that corresponds to an asset. Ex. The IP address of the company device associated with the alert.
-* enrichment: A copy of the enrichment data from the message that triggered the alert
-* priority: The priority of the alert. Mustb e set to one of HIGH, MED or LOW
-
-An example of an alert with all mandatory and standard fields would look like so:
-
-```json
-{
-"timestamp": xxxx,
-"alert_id": UUID,
-"description": xxxx,
-"designated_host": xxxx,
-"enrichment": { "geo": xxxx, "whois": xxxx, "cif": xxxx },
-"priority": "MED"
-}
-```
-
-##Alerts Bolt
-
-The bolt can be extended with a variety of alerts adapters.  The ability to stack alerts is currently in beta, but is not currently advisable.  We advice to only have one alerts bolt per topology.  The adapters are rules-based adapters which fire alerts when rules are a match.  Currently only Java adapters are provided, but there are future plans to provide Grok-Based adapters as well.
-
-The signature of the Alerts bolt is as follows:
-
-``` 
-TelemetryAlertsBolt alerts_bolt = new TelemetryAlertsBolt()
-.withIdentifier(alerts_identifier).withMaxCacheSize(1000)
-.withMaxTimeRetain(3600).withAlertsAdapter(alerts_adapter)
-.withMetricConfiguration(config);
-```
-Identifier - JSON key where the alert is attached
-TimeRetain & MaxCacheSize - Caching parameters for the bolt
-MetricConfiguration - export custom bolt metrics to graphite (if not null)
-AlertsAdapter - pick the appropriate adapter for generating the alerts
-
-### Java Adapters
-
-Java adapters are designed for high volume topologies, but are not easily extensible.  The adapters provided are:
-
-* org.apache.metron.alerts.adapters.AllAlertsAdapter - will tag every single message with the static alert (appropriate for topologies like Sourcefire, etc, where every single message is an alert)
-* org.apache.metron.alerts.adapters.HbaseWhiteAndBlacklistAdapter - will read white and blacklists from HBase and fire alerts if source or dest IP are not on the whitelist or if any IP is on the blacklist
-* org.apache.metron.alerts.adapters.CIFAlertsAdapter - will alert on messages that have results in enrichment.cif.
-* org.apache.metron.alerts.adpaters.KeywordsAlertAdapter - will alert on messages that contain any of a list of keywords
-###Grok Adapters
-
-Grok alerts adapters for Metron are still under devleopment
-
-###Stacking Alert Adapters
-
-The functionality to stack alerts adapters is still under development

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/pom.xml
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/pom.xml b/metron-streaming/Metron-Alerts/pom.xml
deleted file mode 100644
index ec921ee..0000000
--- a/metron-streaming/Metron-Alerts/pom.xml
+++ /dev/null
@@ -1,169 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- 
-  Licensed to the Apache Software 
-	Foundation (ASF) under one or more contributor license agreements. See the 
-	NOTICE file distributed with this work for additional information regarding 
-	copyright ownership. The ASF licenses this file to You under the Apache License, 
-	Version 2.0 (the "License"); you may not use this file except in compliance 
-	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 
-	Unless required by applicable law or agreed to in writing, software distributed 
-	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES 
-	OR CONDITIONS OF ANY KIND, either express or implied. See the License for 
-  the specific language governing permissions and limitations under the License. 
-  -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.metron</groupId>
-        <artifactId>Metron-Streaming</artifactId>
-        <version>0.1BETA</version>
-    </parent>
-    <artifactId>Metron-Alerts</artifactId>
-    <name>Metron-Alerts</name>
-    <description>Taggers for alerts</description>
-    <properties>
-        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
-        <commons.validator.version>1.4.0</commons.validator.version>
-    </properties>
-    <dependencies>
-       <dependency>
-            <groupId>com.google.guava</groupId>
-            <artifactId>guava</artifactId>
-            <version>${global_hbase_guava_version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.metron</groupId>
-            <artifactId>Metron-Common</artifactId>
-            <version>${project.parent.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>com.googlecode.json-simple</groupId>
-            <artifactId>json-simple</artifactId>
-            <version>${global_json_simple_version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.storm</groupId>
-            <artifactId>storm-core</artifactId>
-            <version>${global_storm_version}</version>
-            <scope>provided</scope>
-            <exclusions>
-                <exclusion>
-                    <artifactId>servlet-api</artifactId>
-                    <groupId>javax.servlet</groupId>
-                </exclusion>
-                <exclusion>
-                    <artifactId>log4j-over-slf4j</artifactId>
-                    <groupId>org.slf4j</groupId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.kafka</groupId>
-            <artifactId>kafka_2.9.2</artifactId>
-            <version>${global_kafka_version}</version>
-            <scope>provided</scope>
-            <exclusions>
-                <exclusion>
-                    <groupId>com.sun.jmx</groupId>
-                    <artifactId>jmxri</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>com.sun.jdmk</groupId>
-                    <artifactId>jmxtools</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>javax.jms</groupId>
-                    <artifactId>jms</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-client</artifactId>
-            <version>${global_hbase_version}</version>
-            <scope>provided</scope>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.slf4j</groupId>
-                    <artifactId>slf4j-log4j12</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>log4j</groupId>
-                    <artifactId>log4j</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>com.google.guava</groupId>
-                    <artifactId>guava</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>com.codahale.metrics</groupId>
-            <artifactId>metrics-core</artifactId>
-            <version>${global_metrics_version}</version>
-        </dependency>
-        <dependency>
-            <groupId>commons-validator</groupId>
-            <artifactId>commons-validator</artifactId>
-            <version>${commons.validator.version}</version>
-            <exclusions>
-                <exclusion>
-
-                    <groupId>commons-beanutils</groupId>
-
-                    <artifactId>commons-beanutils</artifactId>
-
-                </exclusion>
-            </exclusions>
-        </dependency>
-    </dependencies>
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-surefire-plugin</artifactId>
-                <version>2.18</version>
-                <configuration>
-                    <systemProperties>
-                        <property>
-                            <name>mode</name>
-                            <value>local</value>
-                        </property>
-                    </systemProperties>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-compiler-plugin</artifactId>
-                <version>3.1</version>
-                <configuration>
-                    <source>1.7</source>
-                    <target>1.7</target>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-pmd-plugin</artifactId>
-                <version>3.3</version>
-                <configuration>
-                    <targetJdk>1.7</targetJdk>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.codehaus.mojo</groupId>
-                <artifactId>emma-maven-plugin</artifactId>
-                <version>1.0-alpha-3</version>
-                <inherited>true</inherited>
-            </plugin>
-        </plugins>
-        <resources>
-            <resource>
-                <directory>src/main/resources</directory>
-            </resource>
-        </resources>
-    </build>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/AbstractAlertBolt.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/AbstractAlertBolt.java b/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/AbstractAlertBolt.java
deleted file mode 100644
index f482c2a..0000000
--- a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/AbstractAlertBolt.java
+++ /dev/null
@@ -1,119 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.alerts;
-
-import java.io.IOException;
-import java.util.Map;
-
-import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseRichBolt;
-import backtype.storm.tuple.Fields;
-
-import com.codahale.metrics.Counter;
-import com.google.common.cache.Cache;
-import org.apache.metron.alerts.interfaces.AlertsAdapter;
-import org.apache.metron.metrics.MetricReporter;
-
-@SuppressWarnings("rawtypes")
-public abstract class AbstractAlertBolt extends BaseRichBolt {
-	/**
-	 * 
-	 */
-	private static final long serialVersionUID = -6710596708304282838L;
-
-	transient Cache<String, String> cache;
-
-	protected static final Logger LOG = LoggerFactory
-			.getLogger(AbstractAlertBolt.class);
-
-	protected OutputCollector _collector;
-	protected AlertsAdapter _adapter;
-
-	protected String OutputFieldName;
-	protected JSONObject _identifier;
-	protected MetricReporter _reporter;
-
-	protected int _MAX_CACHE_SIZE_OBJECTS_NUM = -1;
-	protected int _MAX_TIME_RETAIN_MINUTES = -1;
-
-	protected Counter ackCounter, emitCounter, failCounter;
-
-	protected void registerCounters() {
-
-		String ackString = _adapter.getClass().getSimpleName() + ".ack";
-
-		String emitString = _adapter.getClass().getSimpleName() + ".emit";
-
-		String failString = _adapter.getClass().getSimpleName() + ".fail";
-
-		ackCounter = _reporter.registerCounter(ackString);
-		emitCounter = _reporter.registerCounter(emitString);
-		failCounter = _reporter.registerCounter(failString);
-
-	}
-
-	public final void prepare(Map conf, TopologyContext topologyContext,
-			OutputCollector collector) {
-		_collector = collector;
-
-		if (this._adapter == null)
-			throw new IllegalStateException("Alerts adapter must be specified");
-		if (this._identifier == null)
-			throw new IllegalStateException("Identifier must be specified");
-
-		if (this._MAX_CACHE_SIZE_OBJECTS_NUM == -1)
-			throw new IllegalStateException("MAX_CACHE_SIZE_OBJECTS_NUM must be specified");
-		if (this._MAX_TIME_RETAIN_MINUTES == -1)
-			throw new IllegalStateException("MAX_TIME_RETAIN_MINUTES must be specified");
-
-		try {
-			doPrepare(conf, topologyContext, collector);
-		} catch (IOException e) {
-			LOG.error("Counld not initialize...");
-			e.printStackTrace();
-		}
-
-		boolean success = _adapter.initialize();
-		
-		try {
-			if (!success)
-
-				throw new Exception("Could not initialize adapter");
-		} catch (Exception e) {
-
-			e.printStackTrace();
-		}
-	}
-
-	public void declareOutputFields(OutputFieldsDeclarer declearer) {
-		declearer.declareStream("message", new Fields("key", "message"));
-		declearer.declareStream("alert", new Fields( "message"));
-		declearer.declareStream("error", new Fields("message"));
-	}
-
-	abstract void doPrepare(Map conf, TopologyContext topologyContext,
-			OutputCollector collector) throws IOException;
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/TelemetryAlertsBolt.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/TelemetryAlertsBolt.java b/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/TelemetryAlertsBolt.java
deleted file mode 100644
index 663ae40..0000000
--- a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/TelemetryAlertsBolt.java
+++ /dev/null
@@ -1,254 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.alerts;
-
-import java.io.IOException;
-import java.util.Map;
-import java.util.Properties;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.configuration.Configuration;
-import org.json.simple.JSONArray;
-import org.json.simple.JSONObject;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-
-import com.google.common.cache.CacheBuilder;
-import org.apache.metron.alerts.interfaces.AlertsAdapter;
-import org.apache.metron.helpers.topology.ErrorUtils;
-import org.apache.metron.json.serialization.JSONEncoderHelper;
-import org.apache.metron.metrics.MetricReporter;
-
-@SuppressWarnings("rawtypes")
-public class TelemetryAlertsBolt extends AbstractAlertBolt {
-
-	/**
-	 * Use an adapter to tag existing telemetry messages with alerts. The list
-	 * of available tagger adapters is located under
-	 * org.apache.metron.tagging.adapters. At the time of the release the following
-	 * adapters are available:
-	 * 
-	 * <p>
-	 * <ul>
-	 * <li>RegexTagger = read a list or regular expressions and tag a message if
-	 * they exist in a message
-	 * <li>StaticAllTagger = tag each message with a static alert
-	 * <ul>
-	 * <p>
-	 */
-	private static final long serialVersionUID = -2647123143398352020L;
-	private Properties metricProperties;
-	private JSONObject metricConfiguration;
-
-	// private AlertsCache suppressed_alerts;
-
-	/**
-	 * 
-	 * @param tagger
-	 *            - tagger adapter for generating alert tags
-	 * @return instance of bolt
-	 */
-	public TelemetryAlertsBolt withAlertsAdapter(AlertsAdapter tagger) {
-		_adapter = tagger;
-		return this;
-	}
-
-	/**
-	 * 
-	 * @param OutputFieldName
-	 *            - output name of the tuple coming out of this bolt
-	 * @return - instance of this bolt
-	 */
-	public TelemetryAlertsBolt withOutputFieldName(String OutputFieldName) {
-		this.OutputFieldName = OutputFieldName;
-		return this;
-	}
-
-	/**
-	 * 
-	 * @param metricProperties
-	 *            - metric output to graphite
-	 * @return - instance of this bolt
-	 */
-	public TelemetryAlertsBolt withMetricProperties(Properties metricProperties) {
-		this.metricProperties = metricProperties;
-		return this;
-	}
-
-	/**
-	 * 
-	 * @param identifier
-	 *            - the identifier tag for tagging telemetry messages with
-	 *            alerts out of this bolt
-	 * @return - instance of this bolt
-	 */
-
-	public TelemetryAlertsBolt withIdentifier(JSONObject identifier) {
-		this._identifier = identifier;
-		return this;
-	}
-
-	/**
-	 * @param config
-	 *            A class for generating custom metrics into graphite
-	 * @return Instance of this class
-	 */
-
-	public TelemetryAlertsBolt withMetricConfiguration(Configuration config) {
-		this.metricConfiguration = JSONEncoderHelper.getJSON(config
-				.subset("org.apache.metron.metrics"));
-		return this;
-	}
-
-	/**
-	 * @param MAX_CACHE_SIZE_OBJECTS_NUM
-	 *            Maximum size of cache before flushing
-	 * @return Instance of this class
-	 */
-
-	public TelemetryAlertsBolt withMaxCacheSize(int MAX_CACHE_SIZE_OBJECTS_NUM) {
-		_MAX_CACHE_SIZE_OBJECTS_NUM = MAX_CACHE_SIZE_OBJECTS_NUM;
-		return this;
-	}
-
-	/**
-	 * @param MAX_TIME_RETAIN_MINUTES
-	 *            Maximum time to retain cached entry before expiring
-	 * @return Instance of this class
-	 */
-
-	public TelemetryAlertsBolt withMaxTimeRetain(int MAX_TIME_RETAIN_MINUTES) {
-		_MAX_TIME_RETAIN_MINUTES = MAX_TIME_RETAIN_MINUTES;
-		return this;
-	}
-
-	@Override
-	void doPrepare(Map conf, TopologyContext topologyContext,
-			OutputCollector collector) throws IOException {
-
-		cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE_OBJECTS_NUM)
-				.expireAfterWrite(_MAX_TIME_RETAIN_MINUTES, TimeUnit.MINUTES).build();
-
-		LOG.info("[Metron] Preparing TelemetryAlert Bolt...");
-
-		try {
-			_reporter = new MetricReporter();
-			_reporter.initialize(metricProperties, TelemetryAlertsBolt.class);
-			LOG.info("[Metron] Initialized metrics");
-		} catch (Exception e) {
-			LOG.info("[Metron] Could not initialize metrics");
-		}
-	}
-
-	@SuppressWarnings("unchecked")
-	public void execute(Tuple tuple) {
-
-		LOG.trace("[Metron] Starting to process message for alerts");
-		JSONObject original_message = null;
-		String key = null;
-
-		try {
-
-			key = tuple.getStringByField("key");
-			original_message = (JSONObject) tuple.getValueByField("message");
-
-			if (original_message == null || original_message.isEmpty())
-				throw new Exception("Could not parse message from byte stream");
-			
-			if(key == null)
-				throw new Exception("Key is not valid");
-			
-			LOG.trace("[Metron] Received tuple: " + original_message);
-
-			JSONObject alerts_tag = new JSONObject();
-			Map<String, JSONObject> alerts_list = _adapter
-					.alert(original_message);
-			JSONArray uuid_list = new JSONArray();
-
-			if (alerts_list == null || alerts_list.isEmpty()) {
-				System.out.println("[Metron] No alerts detected in: "
-						+ original_message);
-				_collector.ack(tuple);
-				_collector.emit("message", new Values(key, original_message));
-			} else {
-				for (String alert : alerts_list.keySet()) {
-					uuid_list.add(alert);
-
-					LOG.trace("[Metron] Checking alerts cache: " + alert);
-
-					if (cache.getIfPresent(alert) == null) {
-						System.out.println("[Metron]: Alert not found in cache: " + alert);
-
-						JSONObject global_alert = new JSONObject();
-						global_alert.putAll(_identifier);
-						global_alert.putAll(alerts_list.get(alert));
-						global_alert.put("timestamp", System.currentTimeMillis());
-						_collector.emit("alert", new Values(global_alert));
-
-						cache.put(alert, "");
-
-					} else
-						LOG.trace("Alert located in cache: " + alert);
-
-					LOG.debug("[Metron] Alerts are: " + alerts_list);
-
-					if (original_message.containsKey("alerts")) {
-						JSONArray already_triggered = (JSONArray) original_message
-								.get("alerts");
-
-						uuid_list.addAll(already_triggered);
-						LOG.trace("[Metron] Messages already had alerts...tagging more");
-					}
-
-					original_message.put("alerts", uuid_list);
-
-					LOG.debug("[Metron] Detected alerts: " + alerts_tag);
-
-					_collector.ack(tuple);
-					_collector.emit("message", new Values(key, original_message));
-
-				}
-
-				/*
-				 * if (metricConfiguration != null) { emitCounter.inc();
-				 * ackCounter.inc(); }
-				 */
-			}
-
-		} catch (Exception e) {
-			e.printStackTrace();
-			LOG.error("Failed to tag message :" + original_message);
-			e.printStackTrace();
-			_collector.fail(tuple);
-
-			/*
-			 * if (metricConfiguration != null) { failCounter.inc(); }
-			 */
-
-
-			JSONObject error = ErrorUtils.generateErrorMessage(
-					"Alerts problem: " + original_message, e);
-			_collector.emit("error", new Values(error));
-		}
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/AbstractAlertAdapter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/AbstractAlertAdapter.java b/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/AbstractAlertAdapter.java
deleted file mode 100644
index 6c1b8d1..0000000
--- a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/AbstractAlertAdapter.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.metron.alerts.adapters;
-
-import java.io.Serializable;
-import java.util.UUID;
-import java.util.concurrent.TimeUnit;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.cache.Cache;
-import com.google.common.cache.CacheBuilder;
-import org.apache.metron.alerts.interfaces.AlertsAdapter;
-
-@SuppressWarnings("serial")
-public abstract class AbstractAlertAdapter implements AlertsAdapter, Serializable{
-	
-	protected static final Logger _LOG = LoggerFactory
-			.getLogger(AbstractAlertAdapter.class);
-
-
-	protected Cache<String, String> cache;
-	
-	protected String generateAlertId(String source_ip, String dst_ip,
-			int alert_type) {
-
-		String key = makeKey(source_ip, dst_ip, alert_type);
-
-		if (cache.getIfPresent(key) != null)
-			return cache.getIfPresent(key);
-
-		String new_UUID = System.currentTimeMillis() + "-" + UUID.randomUUID();
-
-		cache.put(key, new_UUID);
-		key = makeKey(dst_ip, source_ip, alert_type);
-		cache.put(key, new_UUID);
-
-		return new_UUID;
-
-	}
-	
-	private String makeKey(String ip1, String ip2, int alert_type) {
-		return (ip1 + "-" + ip2 + "-" + alert_type);
-	}
-	
-	protected void generateCache(int _MAX_CACHE_SIZE_OBJECTS_NUM, int _MAX_TIME_RETAIN_MINUTES)
-	{
-		cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE_OBJECTS_NUM)
-				.expireAfterWrite(_MAX_TIME_RETAIN_MINUTES, TimeUnit.MINUTES).build();
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/AllAlertAdapter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/AllAlertAdapter.java b/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/AllAlertAdapter.java
deleted file mode 100644
index b527991..0000000
--- a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/AllAlertAdapter.java
+++ /dev/null
@@ -1,292 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.alerts.adapters;
-
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.UUID;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.validator.routines.InetAddressValidator;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.json.simple.JSONObject;
-import org.apache.log4j.Logger;
-import com.google.common.cache.Cache;
-import com.google.common.cache.CacheBuilder;
-import org.apache.metron.alerts.interfaces.AlertsAdapter;
-
-@SuppressWarnings("serial")
-public class AllAlertAdapter implements AlertsAdapter, Serializable {
-
-	HTableInterface blacklist_table;
-	HTableInterface whitelist_table;
-	InetAddressValidator ipvalidator = new InetAddressValidator();
-	String _whitelist_table_name;
-	String _blacklist_table_name;
-	String _quorum;
-	String _port;
-	String _topologyname;
-	Configuration conf = null;
-
-	Cache<String, String> cache;
-	String _topology_name;
-
-	Set<String> loaded_whitelist = new HashSet<String>();
-	Set<String> loaded_blacklist = new HashSet<String>();
-
-	protected static final Logger LOG = Logger
-			.getLogger(AllAlertAdapter.class);
-
-	public AllAlertAdapter(Map<String, String> config) {
-		try {
-			if(!config.containsKey("whitelist_table_name"))
-				throw new Exception("Whitelist table name is missing");
-				
-			_whitelist_table_name = config.get("whitelist_table_name");
-			
-			if(!config.containsKey("blacklist_table_name"))
-				throw new Exception("Blacklist table name is missing");
-			
-			_blacklist_table_name = config.get("blacklist_table_name");
-			
-			if(!config.containsKey("quorum"))
-				throw new Exception("Quorum name is missing");
-			
-			_quorum = config.get("quorum");
-			
-			if(!config.containsKey("port"))
-				throw new Exception("port name is missing");
-			
-			_port = config.get("port");
-
-			if(!config.containsKey("_MAX_CACHE_SIZE_OBJECTS_NUM"))
-				throw new Exception("_MAX_CACHE_SIZE_OBJECTS_NUM name is missing");
-			
-			int _MAX_CACHE_SIZE_OBJECTS_NUM = Integer.parseInt(config
-					.get("_MAX_CACHE_SIZE_OBJECTS_NUM"));
-			
-			if(!config.containsKey("_MAX_TIME_RETAIN_MINUTES"))
-				throw new Exception("_MAX_TIME_RETAIN_MINUTES name is missing");
-			
-			int _MAX_TIME_RETAIN_MINUTES = Integer.parseInt(config
-					.get("_MAX_TIME_RETAIN_MINUTES"));
-
-			cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE_OBJECTS_NUM)
-					.expireAfterWrite(_MAX_TIME_RETAIN_MINUTES, TimeUnit.MINUTES)
-					.build();
-		} catch (Exception e) {
-			System.out.println("Could not initialize Alerts Adapter");
-			e.printStackTrace();
-			System.exit(0);
-		}
-	}
-
-	@SuppressWarnings("resource")
-    @Override
-	public boolean initialize() {
-
-		conf = HBaseConfiguration.create();
-		//conf.set("hbase.zookeeper.quorum", _quorum);
-		//conf.set("hbase.zookeeper.property.clientPort", _port);
-
-		LOG.trace("[Metron] Connecting to hbase with conf:" + conf);
-		LOG.trace("[Metron] Whitelist table name: " + _whitelist_table_name);
-		LOG.trace("[Metron] Whitelist table name: " + _blacklist_table_name);
-		LOG.trace("[Metron] ZK Client/port: "
-				+ conf.get("hbase.zookeeper.quorum") + " -> "
-				+ conf.get("hbase.zookeeper.property.clientPort"));
-
-		try {
-
-			LOG.trace("[Metron] Attempting to connect to hbase");
-
-			HConnection connection = HConnectionManager.createConnection(conf);
-
-			LOG.trace("[Metron] CONNECTED TO HBASE");
-
-			HBaseAdmin hba = new HBaseAdmin(conf);
-
-			if (!hba.tableExists(_whitelist_table_name))
-				throw new Exception("Whitelist table doesn't exist");
-
-			if (!hba.tableExists(_blacklist_table_name))
-				throw new Exception("Blacklist table doesn't exist");
-
-			whitelist_table = new HTable(conf, _whitelist_table_name);
-
-			LOG.trace("[Metron] CONNECTED TO TABLE: " + _whitelist_table_name);
-			blacklist_table = new HTable(conf, _blacklist_table_name);
-			LOG.trace("[Metron] CONNECTED TO TABLE: " + _blacklist_table_name);
-
-			if (connection == null || whitelist_table == null
-					|| blacklist_table == null)
-				throw new Exception("Unable to initialize hbase connection");
-
-			Scan scan = new Scan();
-
-			ResultScanner rs = whitelist_table.getScanner(scan);
-			try {
-				for (Result r = rs.next(); r != null; r = rs.next()) {
-					loaded_whitelist.add(Bytes.toString(r.getRow()));
-				}
-			} catch (Exception e) {
-				LOG.trace("[Metron] COULD NOT READ FROM HBASE");
-				e.printStackTrace();
-			} finally {
-				rs.close(); // always close the ResultScanner!
-				hba.close();
-			}
-			whitelist_table.close();
-
-			LOG.trace("[Metron] READ IN WHITELIST: " + loaded_whitelist.size());
-			
-			System.out.println("LOADED WHITELIST IS: ");
-			
-			for(String str: loaded_whitelist)
-				System.out.println("WHITELIST: " + str);
-
-			scan = new Scan();
-
-			rs = blacklist_table.getScanner(scan);
-			try {
-				for (Result r = rs.next(); r != null; r = rs.next()) {
-					loaded_blacklist.add(Bytes.toString(r.getRow()));
-				}
-			} catch (Exception e) {
-				LOG.trace("[Metron] COULD NOT READ FROM HBASE");
-				e.printStackTrace();
-			} finally {
-				rs.close(); // always close the ResultScanner!
-				hba.close();
-			}
-			blacklist_table.close();
-
-			LOG.trace("[Metron] READ IN WHITELIST: " + loaded_whitelist.size());
-
-			rs.close(); // always close the ResultScanner!
-			hba.close();
-
-			return true;
-		} catch (Exception e) {
-
-			e.printStackTrace();
-		}
-
-		return false;
-
-	}
-
-	@Override
-	public boolean refresh() throws Exception {
-		// TODO Auto-generated method stub
-		return false;
-	}
-
-	@SuppressWarnings("unchecked")
-    @Override
-	public Map<String, JSONObject> alert(JSONObject raw_message) {
-
-		Map<String, JSONObject> alerts = new HashMap<String, JSONObject>();
-		JSONObject content = (JSONObject) raw_message.get("message");
-
-		JSONObject enrichment = null;
-
-		if (raw_message.containsKey("enrichment"))
-			enrichment = (JSONObject) raw_message.get("enrichment");
-
-		JSONObject alert = new JSONObject();
-
-
-
-		String source = "unknown";
-		String dest = "unknown";
-		String host = "unknown";
-
-		if (content.containsKey("ip_src_addr"))
-		{
-			source = content.get("ip_src_addr").toString();
-			
-			if(RangeChecker.checkRange(loaded_whitelist, source))
-				host = source;				
-		}
-
-		if (content.containsKey("ip_dst_addr"))
-		{
-			dest = content.get("ip_dst_addr").toString();
-			
-			if(RangeChecker.checkRange(loaded_whitelist, dest))
-				host = dest;	
-		}
-
-		alert.put("designated_host", host);
-		alert.put("description", content.get("original_string").toString());
-		alert.put("priority", "MED");	
-
-		String alert_id = generateAlertId(source, dest, 0);
-
-		alert.put("alert_id", alert_id);
-		alerts.put(alert_id, alert);
-
-		alert.put("enrichment", enrichment);
-
-		return alerts;
-
-	}
-
-	@Override
-	public boolean containsAlertId(String alert) {
-		// TODO Auto-generated method stub
-		return false;
-	}
-
-	protected String generateAlertId(String source_ip, String dst_ip,
-			int alert_type) {
-
-		String key = makeKey(source_ip, dst_ip, alert_type);
-
-		if (cache.getIfPresent(key) != null)
-			return cache.getIfPresent(key);
-
-		String new_UUID = System.currentTimeMillis() + "-" + UUID.randomUUID();
-
-		cache.put(key, new_UUID);
-		key = makeKey(dst_ip, source_ip, alert_type);
-		cache.put(key, new_UUID);
-
-		return new_UUID;
-
-	}
-
-	private String makeKey(String ip1, String ip2, int alert_type) {
-		return (ip1 + "-" + ip2 + "-" + alert_type);
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/CIFAlertsAdapter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/CIFAlertsAdapter.java b/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/CIFAlertsAdapter.java
deleted file mode 100644
index 4e8e025..0000000
--- a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/CIFAlertsAdapter.java
+++ /dev/null
@@ -1,328 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.alerts.adapters;
-
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.UUID;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.validator.routines.InetAddressValidator;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.cache.Cache;
-import com.google.common.cache.CacheBuilder;
-import org.apache.metron.alerts.interfaces.AlertsAdapter;
-
-@SuppressWarnings("serial")
-public class CIFAlertsAdapter implements AlertsAdapter, Serializable {
-
-	String enrichment_tag;
-
-	HTableInterface blacklist_table;
-	HTableInterface whitelist_table;
-	InetAddressValidator ipvalidator = new InetAddressValidator();
-	String _whitelist_table_name;
-	String _blacklist_table_name;
-	String _quorum;
-	String _port;
-	String _topologyname;
-	Configuration conf = null;
-
-	Cache<String, String> cache;
-	String _topology_name;
-
-	Set<String> loaded_whitelist = new HashSet<String>();
-	Set<String> loaded_blacklist = new HashSet<String>();
-
-	protected static final Logger LOG = LoggerFactory
-			.getLogger(CIFAlertsAdapter.class);
-
-	public CIFAlertsAdapter(Map<String, String> config) {
-		try {
-
-			if (!config.containsKey("whitelist_table_name"))
-				throw new Exception("Whitelist table name is missing");
-
-			_whitelist_table_name = config.get("whitelist_table_name");
-
-			if (!config.containsKey("blacklist_table_name"))
-				throw new Exception("Blacklist table name is missing");
-
-			_blacklist_table_name = config.get("blacklist_table_name");
-
-			if (!config.containsKey("quorum"))
-				throw new Exception("Quorum name is missing");
-
-			_quorum = config.get("quorum");
-
-			if (!config.containsKey("port"))
-				throw new Exception("port name is missing");
-
-			_port = config.get("port");
-
-			if (!config.containsKey("_MAX_CACHE_SIZE_OBJECTS_NUM"))
-				throw new Exception("_MAX_CACHE_SIZE_OBJECTS_NUM name is missing");
-
-			int _MAX_CACHE_SIZE_OBJECTS_NUM = Integer.parseInt(config
-					.get("_MAX_CACHE_SIZE_OBJECTS_NUM"));
-
-			if (!config.containsKey("_MAX_TIME_RETAIN_MINUTES"))
-				throw new Exception("_MAX_TIME_RETAIN_MINUTES name is missing");
-
-			int _MAX_TIME_RETAIN_MINUTES = Integer.parseInt(config
-					.get("_MAX_TIME_RETAIN_MINUTES"));
-
-			cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE_OBJECTS_NUM)
-					.expireAfterWrite(_MAX_TIME_RETAIN_MINUTES, TimeUnit.MINUTES)
-					.build();
-
-			enrichment_tag = config.get("enrichment_tag");
-
-		} catch (Exception e) {
-			System.out.println("Could not initialize alerts adapter");
-			e.printStackTrace();
-			System.exit(0);
-		}
-	}
-
-	@SuppressWarnings("resource")
-    @Override
-	public boolean initialize() {
-
-		conf = HBaseConfiguration.create();
-		// conf.set("hbase.zookeeper.quorum", _quorum);
-		// conf.set("hbase.zookeeper.property.clientPort", _port);
-
-		LOG.trace("[Metron] Connecting to hbase with conf:" + conf);
-		LOG.trace("[Metron] Whitelist table name: " + _whitelist_table_name);
-		LOG.trace("[Metron] Whitelist table name: " + _blacklist_table_name);
-		LOG.trace("[Metron] ZK Client/port: "
-				+ conf.get("hbase.zookeeper.quorum") + " -> "
-				+ conf.get("hbase.zookeeper.property.clientPort"));
-
-		try {
-
-			LOG.trace("[Metron] Attempting to connect to hbase");
-
-			HConnection connection = HConnectionManager.createConnection(conf);
-
-			LOG.trace("[Metron] CONNECTED TO HBASE");
-
-			HBaseAdmin hba = new HBaseAdmin(conf);
-
-			if (!hba.tableExists(_whitelist_table_name))
-				throw new Exception("Whitelist table doesn't exist");
-
-			if (!hba.tableExists(_blacklist_table_name))
-				throw new Exception("Blacklist table doesn't exist");
-
-			whitelist_table = new HTable(conf, _whitelist_table_name);
-
-			LOG.trace("[Metron] CONNECTED TO TABLE: " + _whitelist_table_name);
-			blacklist_table = new HTable(conf, _blacklist_table_name);
-			LOG.trace("[Metron] CONNECTED TO TABLE: " + _blacklist_table_name);
-
-			if (connection == null || whitelist_table == null
-					|| blacklist_table == null)
-				throw new Exception("Unable to initialize hbase connection");
-
-			Scan scan = new Scan();
-
-			ResultScanner rs = whitelist_table.getScanner(scan);
-			try {
-				for (Result r = rs.next(); r != null; r = rs.next()) {
-					loaded_whitelist.add(Bytes.toString(r.getRow()));
-				}
-			} catch (Exception e) {
-				LOG.trace("[Metron] COULD NOT READ FROM HBASE");
-				e.printStackTrace();
-			} finally {
-				rs.close(); // always close the ResultScanner!
-				hba.close();
-			}
-			whitelist_table.close();
-
-			LOG.trace("[Metron] READ IN WHITELIST: " + loaded_whitelist.size());
-
-			scan = new Scan();
-
-			rs = blacklist_table.getScanner(scan);
-			try {
-				for (Result r = rs.next(); r != null; r = rs.next()) {
-					loaded_blacklist.add(Bytes.toString(r.getRow()));
-				}
-			} catch (Exception e) {
-				LOG.trace("[Metron] COULD NOT READ FROM HBASE");
-				e.printStackTrace();
-			} finally {
-				rs.close(); // always close the ResultScanner!
-				hba.close();
-			}
-			blacklist_table.close();
-
-			LOG.trace("[Metron] READ IN WHITELIST: " + loaded_whitelist.size());
-
-			rs.close(); // always close the ResultScanner!
-			hba.close();
-
-			return true;
-		} catch (Exception e) {
-
-			e.printStackTrace();
-		}
-
-		return false;
-
-	}
-
-	@Override
-	public boolean refresh() throws Exception {
-		return true;
-	}
-
-	@SuppressWarnings("unchecked")
-    @Override
-	public Map<String, JSONObject> alert(JSONObject raw_message) {
-
-		System.out.println("LOOKING FOR ENRICHMENT TAG: " + enrichment_tag);
-
-		Map<String, JSONObject> alerts = new HashMap<String, JSONObject>();
-		JSONObject content = (JSONObject) raw_message.get("message");
-
-		JSONObject enrichment = null;
-
-		if (raw_message.containsKey("enrichment"))
-			enrichment = (JSONObject) raw_message.get("enrichment");
-		else
-			return null;
-
-		if (enrichment.containsKey(enrichment_tag)) {
-
-			System.out.println("FOUND TAG: " + enrichment_tag);
-
-			JSONObject cif = (JSONObject) enrichment.get(enrichment_tag);
-
-			int cnt = 0;
-			Object enriched_key = null;
-			
-			for (Object key : cif.keySet()) {
-				JSONObject tmp = (JSONObject) cif.get(key);
-				cnt = cnt + tmp.size();
-				if (tmp.size() > 0)
-					enriched_key = key;
-			}
-
-			if (cnt == 0) {
-				System.out.println("TAG HAS NO ELEMENTS");
-				return null;
-			}
-
-			JSONObject alert = new JSONObject();
-
-			String source = "unknown";
-			String dest = "unknown";
-			String host = "unknown";
-
-			if (content.containsKey("ip_src_addr")) {
-				source = content.get("ip_src_addr").toString();
-
-				if (RangeChecker.checkRange(loaded_whitelist, source))
-					host = source;
-			}
-
-			if (content.containsKey("ip_dst_addr")) {
-				dest = content.get("ip_dst_addr").toString();
-
-				if (RangeChecker.checkRange(loaded_whitelist, dest))
-					host = dest;
-			}
-			
-			JSONObject cifQualifier = (JSONObject) cif.get(enriched_key);
-			
-			alert.put("designated_host", host);
-			String description = new StringBuilder()
-					.append(host)
-					.append(" communicated with a host (")
-					.append(content.get(enriched_key).toString())
-					.append(") identified as ")
-					.append(cifQualifier.keySet().iterator().next().toString())
-					.append(" by CIF")
-					.toString();	
-			alert.put("description", description);
-			alert.put("priority", "MED");
-
-			String alert_id = generateAlertId(source, dest, 0);
-
-			alert.put("alert_id", alert_id);
-			alerts.put(alert_id, alert);
-
-			alert.put("enrichment", enrichment);
-
-			return alerts;
-		} else {
-			System.out.println("DID NOT FIND TAG: " + enrichment_tag);
-			return null;
-		}
-
-	}
-
-	@Override
-	public boolean containsAlertId(String alert) {
-		// TODO Auto-generated method stub
-		return false;
-	}
-
-	protected String generateAlertId(String source_ip, String dst_ip,
-			int alert_type) {
-
-		String key = makeKey(source_ip, dst_ip, alert_type);
-
-		if (cache.getIfPresent(key) != null)
-			return cache.getIfPresent(key);
-
-		String new_UUID = System.currentTimeMillis() + "-" + UUID.randomUUID();
-
-		cache.put(key, new_UUID);
-		key = makeKey(dst_ip, source_ip, alert_type);
-		cache.put(key, new_UUID);
-
-		return new_UUID;
-
-	}
-
-	private String makeKey(String ip1, String ip2, int alert_type) {
-		return (ip1 + "-" + ip2 + "-" + alert_type);
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/HbaseWhiteAndBlacklistAdapter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/HbaseWhiteAndBlacklistAdapter.java b/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/HbaseWhiteAndBlacklistAdapter.java
deleted file mode 100644
index 3673138..0000000
--- a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/HbaseWhiteAndBlacklistAdapter.java
+++ /dev/null
@@ -1,483 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.alerts.adapters;
-
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.UUID;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.validator.routines.InetAddressValidator;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.cache.Cache;
-import com.google.common.cache.CacheBuilder;
-import org.apache.metron.alerts.interfaces.AlertsAdapter;
-
-public class HbaseWhiteAndBlacklistAdapter implements AlertsAdapter,
-		Serializable {
-
-	HTableInterface blacklist_table;
-	HTableInterface whitelist_table;
-	InetAddressValidator ipvalidator = new InetAddressValidator();
-	String _whitelist_table_name;
-	String _blacklist_table_name;
-	String _quorum;
-	String _port;
-	String _topologyname;
-	Configuration conf = null;
-
-	Cache<String, String> cache;
-	String _topology_name;
-
-	Set<String> loaded_whitelist = new HashSet<String>();
-	Set<String> loaded_blacklist = new HashSet<String>();
-
-	protected static final Logger LOG = LoggerFactory
-			.getLogger(HbaseWhiteAndBlacklistAdapter.class);
-
-	public HbaseWhiteAndBlacklistAdapter(Map<String, String> config) {
-
-		try {
-			if(!config.containsKey("whitelist_table_name"))
-				throw new Exception("Whitelist table name is missing");
-				
-			_whitelist_table_name = config.get("whitelist_table_name");
-			
-			if(!config.containsKey("blacklist_table_name"))
-				throw new Exception("Blacklist table name is missing");
-			
-			_blacklist_table_name = config.get("blacklist_table_name");
-			
-			if(!config.containsKey("quorum"))
-				throw new Exception("Quorum name is missing");
-			
-			_quorum = config.get("quorum");
-			
-			if(!config.containsKey("port"))
-				throw new Exception("port name is missing");
-			
-			_port = config.get("port");
-
-			if(!config.containsKey("_MAX_CACHE_SIZE_OBJECTS_NUM"))
-				throw new Exception("_MAX_CACHE_SIZE_OBJECTS_NUM name is missing");
-			
-			int _MAX_CACHE_SIZE_OBJECTS_NUM = Integer.parseInt(config
-					.get("_MAX_CACHE_SIZE_OBJECTS_NUM"));
-			
-			if(!config.containsKey("_MAX_TIME_RETAIN_MINUTES"))
-				throw new Exception("_MAX_TIME_RETAIN_MINUTES name is missing");
-			
-			int _MAX_TIME_RETAIN_MINUTES = Integer.parseInt(config
-					.get("_MAX_TIME_RETAIN_MINUTES"));
-
-			cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE_OBJECTS_NUM)
-					.expireAfterWrite(_MAX_TIME_RETAIN_MINUTES, TimeUnit.MINUTES)
-					.build();
-		} catch (Exception e) {
-			System.out.println("Could not initialize Alerts Adapter");
-			e.printStackTrace();
-			System.exit(0);
-		}
-
-	}
-
-	public boolean initialize() {
-
-		conf = HBaseConfiguration.create();
-		//conf.set("hbase.zookeeper.quorum", _quorum);
-		//conf.set("hbase.zookeeper.property.clientPort", _port);
-
-		LOG.trace("[Metron] Connecting to hbase with conf:" + conf);
-		LOG.trace("[Metron] Whitelist table name: " + _whitelist_table_name);
-		LOG.trace("[Metron] Whitelist table name: " + _blacklist_table_name);
-		LOG.trace("[Metron] ZK Client/port: "
-				+ conf.get("hbase.zookeeper.quorum") + " -> "
-				+ conf.get("hbase.zookeeper.property.clientPort"));
-
-		try {
-
-			LOG.trace("[Metron] Attempting to connect to hbase");
-
-			HConnection connection = HConnectionManager.createConnection(conf);
-
-			LOG.trace("[Metron] CONNECTED TO HBASE");
-
-			HBaseAdmin hba = new HBaseAdmin(conf);
-
-			if (!hba.tableExists(_whitelist_table_name))
-				throw new Exception("Whitelist table doesn't exist");
-
-			if (!hba.tableExists(_blacklist_table_name))
-				throw new Exception("Blacklist table doesn't exist");
-
-			whitelist_table = new HTable(conf, _whitelist_table_name);
-
-			LOG.trace("[Metron] CONNECTED TO TABLE: " + _whitelist_table_name);
-			blacklist_table = new HTable(conf, _blacklist_table_name);
-			LOG.trace("[Metron] CONNECTED TO TABLE: " + _blacklist_table_name);
-
-			if (connection == null || whitelist_table == null
-					|| blacklist_table == null)
-				throw new Exception("Unable to initialize hbase connection");
-
-			Scan scan = new Scan();
-
-			ResultScanner rs = whitelist_table.getScanner(scan);
-			try {
-				for (Result r = rs.next(); r != null; r = rs.next()) {
-					loaded_whitelist.add(Bytes.toString(r.getRow()));
-				}
-			} catch (Exception e) {
-				LOG.trace("[Metron] COULD NOT READ FROM HBASE");
-				e.printStackTrace();
-			} finally {
-				rs.close(); // always close the ResultScanner!
-				hba.close();
-			}
-			whitelist_table.close();
-
-			LOG.trace("[Metron] READ IN WHITELIST: " + loaded_whitelist.size());
-
-			scan = new Scan();
-
-			rs = blacklist_table.getScanner(scan);
-			try {
-				for (Result r = rs.next(); r != null; r = rs.next()) {
-					loaded_blacklist.add(Bytes.toString(r.getRow()));
-				}
-			} catch (Exception e) {
-				LOG.trace("[Metron] COULD NOT READ FROM HBASE");
-				e.printStackTrace();
-			} finally {
-				rs.close(); // always close the ResultScanner!
-				hba.close();
-			}
-			blacklist_table.close();
-
-			LOG.trace("[Metron] READ IN WHITELIST: " + loaded_whitelist.size());
-
-			rs.close(); // always close the ResultScanner!
-			hba.close();
-
-			return true;
-		} catch (Exception e) {
-
-			e.printStackTrace();
-		}
-
-		return false;
-
-	}
-
-	protected String generateAlertId(String source_ip, String dst_ip,
-			int alert_type) {
-
-		String key = makeKey(source_ip, dst_ip, alert_type);
-
-		if (cache.getIfPresent(key) != null)
-			return cache.getIfPresent(key);
-
-		String new_UUID = System.currentTimeMillis() + "-" + UUID.randomUUID();
-
-		cache.put(key, new_UUID);
-		key = makeKey(dst_ip, source_ip, alert_type);
-		cache.put(key, new_UUID);
-
-		return new_UUID;
-
-	}
-
-	public boolean refresh() throws Exception {
-		// TODO Auto-generated method stub
-		return false;
-	}
-
-	private String makeKey(String ip1, String ip2, int alert_type) {
-		return (ip1 + "-" + ip2 + "-" + alert_type);
-	}
-
-	@SuppressWarnings("unchecked")
-	public Map<String, JSONObject> alert(JSONObject raw_message) {
-
-		Map<String, JSONObject> alerts = new HashMap<String, JSONObject>();
-
-		JSONObject content = (JSONObject) raw_message.get("message");
-		JSONObject enrichment = null;
-
-		if (raw_message.containsKey("enrichment"))
-			enrichment = (JSONObject) raw_message.get("enrichment");
-
-		if (!content.containsKey("ip_src_addr")
-				|| !content.containsKey("ip_dst_addr")) {
-
-			int alert_type = 0;
-
-			JSONObject alert = new JSONObject();
-
-			alert.put("title", "IP Check Error Type: : " + alert_type);
-			alert.put("priority", "1");
-			alert.put("type", "error");
-			alert.put("designated_host", "Uknown");
-			alert.put("source", "NA");
-			alert.put("dest", "NA");
-			alert.put("body", "Source or destination IP is missing");
-
-			String alert_id = UUID.randomUUID().toString();
-
-			alert.put("reference_id", alert_id);
-			alerts.put(alert_id, alert);
-
-			if (enrichment != null)
-				alert.put("enrichment", enrichment);
-
-			LOG.trace("[Metron] Returning alert: " + alerts);
-
-			return alerts;
-
-		}
-
-		String source_ip = content.get("ip_src_addr").toString();
-		String dst_ip = content.get("ip_dst_addr").toString();
-
-		if (source_ip == null && dst_ip == null) {
-
-			int alert_type = 1;
-
-			JSONObject alert = new JSONObject();
-
-			alert.put("title", "IP Check Error Type: : " + alert_type);
-			alert.put("priority", "1");
-			alert.put("type", "error");
-			alert.put("designated_host", "Uknown");
-			alert.put("source", source_ip);
-			alert.put("dest", dst_ip);
-			alert.put(
-					"body",
-					"This communication does not contain a source or destination IP string. Communication between two IPs: "
-							+ source_ip + " -> " + dst_ip);
-
-			String alert_id = generateAlertId(source_ip, dst_ip, alert_type);
-
-			alert.put("reference_id", alert_id);
-			alerts.put(alert_id, alert);
-			if (enrichment != null)
-				alert.put("enrichment", enrichment);
-
-			LOG.trace("[Metron] Returning alert: " + alerts);
-
-			return alerts;
-
-		}
-
-		if (!ipvalidator.isValidInet4Address(source_ip)
-				&& !ipvalidator.isValidInet4Address(dst_ip)) {
-			int alert_type = 2;
-
-			JSONObject alert = new JSONObject();
-
-			alert.put("title", "IP Check Error Type: : " + alert_type);
-			alert.put("priority", "1");
-			alert.put("type", "error");
-			alert.put("designated_host", "Uknown");
-			alert.put("source", source_ip);
-			alert.put("dest", dst_ip);
-			alert.put(
-					"content",
-					"This communication contains souce and destination IP strings, but these strings are not valid. Communication between two IPs: "
-							+ source_ip + " -> " + dst_ip);
-
-			String alert_id = generateAlertId(source_ip, dst_ip, alert_type);
-
-			alert.put("reference_id", alert_id);
-			alerts.put(alert_id, alert);
-			if (enrichment != null)
-				alert.put("enrichment", enrichment);
-
-			LOG.trace("[Metron] Returning alert: " + alerts);
-
-			return alerts;
-
-		}
-
-		String designated_host = null;
-
-		if (loaded_whitelist.contains(source_ip))
-			designated_host = source_ip;
-		else if (loaded_whitelist.contains(dst_ip))
-			designated_host = dst_ip;
-
-		if (designated_host == null) {
-			int alert_type = 3;
-
-			JSONObject alert = new JSONObject();
-
-			alert.put("title", "IP Check Error Type: : " + alert_type);
-			alert.put("priority", "1");
-			alert.put("type", "error");
-			alert.put("designated_host", "Uknown");
-			alert.put("source", source_ip);
-			alert.put("dest", dst_ip);
-			alert.put(
-					"content",
-					"This communication does not contain a source or a destination IP that is in the white list. Communication between two IPs: "
-							+ source_ip + " -> " + dst_ip);
-
-			String alert_id = generateAlertId(source_ip, dst_ip, alert_type);
-
-			alert.put("reference_id", alert_id);
-			alerts.put(alert_id, alert);
-			if (enrichment != null)
-				alert.put("enrichment", enrichment);
-
-			LOG.trace("[Metron] Returning alert: " + alerts);
-
-			return alerts;
-
-		}
-
-		if (source_ip.equals(designated_host)
-				&& !ipvalidator.isValidInet4Address(dst_ip)) {
-			int alert_type = 4;
-
-			JSONObject alert = new JSONObject();
-
-			alert.put("title", "IP Check Error Type: : " + alert_type);
-			alert.put("priority", "1");
-			alert.put("type", "error");
-			alert.put("designated_host", designated_host);
-			alert.put("source", source_ip);
-			alert.put("dest", dst_ip);
-			alert.put(
-					"content",
-					"This communication contains an IP that is not valid. Communication between two IPs: "
-							+ source_ip + " -> " + dst_ip);
-
-			String alert_id = generateAlertId(source_ip, dst_ip, alert_type);
-
-			alert.put("reference_id", alert_id);
-			alerts.put(alert_id, alert);
-			if (enrichment != null)
-				alert.put("enrichment", enrichment);
-
-		}
-
-		if (dst_ip.equals(designated_host)
-				&& !ipvalidator.isValidInet4Address(source_ip)) {
-			int alert_type = 5;
-
-			JSONObject alert = new JSONObject();
-
-			alert.put("title", "IP Check Error Type: : " + alert_type);
-			alert.put("priority", "1");
-			alert.put("type", "error");
-			alert.put("designated_host", designated_host);
-			alert.put("source", source_ip);
-			alert.put("dest", dst_ip);
-			alert.put(
-					"content",
-					"This communication contains IP that is not valid. Communication between two IPs: "
-							+ source_ip + " -> " + dst_ip);
-
-			String alert_id = generateAlertId(source_ip, dst_ip, alert_type);
-
-			alert.put("reference_id", alert_id);
-			alerts.put(alert_id, alert);
-			if (enrichment != null)
-				alert.put("enrichment", enrichment);
-
-		}
-
-		if (loaded_blacklist.contains(source_ip)) {
-			int alert_type = 6;
-
-			JSONObject alert = new JSONObject();
-
-			alert.put("title", "IP Check Error Type: : " + alert_type);
-			alert.put("priority", "1");
-			alert.put("type", "error");
-			alert.put("designated_host", designated_host);
-			alert.put("source", source_ip);
-			alert.put("dest", dst_ip);
-			alert.put(
-					"content",
-					"This communication contains IP that is black listed. Communication between two IPs: "
-							+ source_ip + " -> " + dst_ip);
-
-			String alert_id = generateAlertId(source_ip, dst_ip, alert_type);
-
-			alert.put("reference_id", alert_id);
-			alerts.put(alert_id, alert);
-			if (enrichment != null)
-				alert.put("enrichment", enrichment);
-
-		}
-
-		if (loaded_blacklist.contains(dst_ip)) {
-			int alert_type = 7;
-
-			JSONObject alert = new JSONObject();
-
-			alert.put("title", "IP Check Error Type: : " + alert_type);
-			alert.put("priority", "1");
-			alert.put("type", "error");
-			alert.put("designated_host", designated_host);
-			alert.put("source", source_ip);
-			alert.put("dest", dst_ip);
-			alert.put(
-					"content",
-					"This communication contains IP that is black listed. Communication between two IPs: "
-							+ source_ip + " -> " + dst_ip);
-
-			String alert_id = generateAlertId(source_ip, dst_ip, alert_type);
-
-			alert.put("reference_id", alert_id);
-			alerts.put(alert_id, alert);
-			if (enrichment != null)
-				alert.put("enrichment", enrichment);
-
-		}
-
-		if (alerts.isEmpty())
-			return null;
-		else
-			return alerts;
-	}
-
-	public boolean containsAlertId(String alert) {
-		// TODO Auto-generated method stub
-		return false;
-	}
-
-}



[19/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/ISESampleOutput
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/ISESampleOutput b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/ISESampleOutput
new file mode 100644
index 0000000..1cb0678
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/ISESampleOutput
@@ -0,0 +1,308 @@
+Aug  6 17:26:31 10.34.84.145 Aug  7 00:45:43 stage-pdp01 CISE_Profiler 0000024855 1 0 2014-08-07 00:45:43.741 -07:00 0000288542 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=113, EndpointCertainityMetric=10, EndpointIPAddress=10.56.111.14, EndpointMacAddress=3C:97:0E:C3:F8:F1, EndpointMatchedPolicy=Nortel-Device, EndpointNADAddress=10.56.72.127, EndpointOUI=Wistron InfoComm(Kunshan)Co.\,Ltd., EndpointPolicy=Nortel-Device, EndpointProperty=StaticAssignment=false\,PostureApplicable=Yes\,PolicyVersion=402\,IdentityGroupID=0c1d9270-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=10\,BYODRegistration=Unknown\,FeedService=false\,EndPointPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1407397543718\,MatchedPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,TimeToProfile=19\,StaticGroupAssignment=false\,NmapSubnetScanID=0\,DeviceRegistrationStatus=NotRegistered\,PortalUser=, EndpointSourceEvent=SNMPQuery Probe, EndpointIdentityGroup=Profile
 d, ProfilerServer=stage-pdp01.cisco.com, 
+Aug  6 17:26:31 10.34.84.145 Aug  7 00:45:43 stage-pdp01 CISE_Profiler 0000024856 1 0 2014-08-07 00:45:43.786 -07:00 0000288543 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=113, EndpointCertainityMetric=10, EndpointIPAddress=10.56.111.14, EndpointMacAddress=3C:97:0E:C3:F8:F1, EndpointMatchedPolicy=Nortel-Device, EndpointNADAddress=10.56.72.127, EndpointOUI=Wistron InfoComm(Kunshan)Co.\,Ltd., EndpointPolicy=Nortel-Device, EndpointProperty=StaticAssignment=false\,PostureApplicable=Yes\,BYODRegistration=Unknown\,EndPointPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1407397543718\,TimeToProfile=19\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,UpdateTime=0\,PolicyVersion=402\,IdentityGroupID=0c1d9270-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=10\,FeedService=false\,MatchedPolicyID=49054ed0-68a6-11e1-bc72-0050568e013c\,NmapScanCount=0\,NmapSubnetScanID=0\,PortalUser=, EndpointSourceE
 vent=SNMPQuery Probe, EndpointIdentityGroup=Profiled, ProfilerServer=stage-pdp01.cisco.com, 
+Aug  6 20:00:52 10.42.7.64 Aug  7 03:20:05 npf-sjca-pdp02 CISE_Profiler 0000373185 1 0 2014-08-07 03:20:05.549 -07:00 0011310202 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=90, EndpointIPAddress=10.56.129.142, EndpointMacAddress=3C:A9:F4:46:75:CC, EndpointMatchedPolicy=Windows7-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Windows7-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-46-75-cc\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=HASSI-WS03\,BYODRegistration=Unknown\,EndPointPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406112353750\,TimeToProfile=11\,Framed-IP-Address=10.56.129.142\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407394245820\,PolicyVersion=403\,Ide
 ntityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=90\,FeedService=false\,MatchedPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1394526689397\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=Mozilla/5.0 (Windows NT 6.1\\\; WOW64\\ rv:30.0) Gecko/20100101 Firefox/30.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf-sjca-pdp02.cisco.com, 
+Aug  6 21:00:48 10.42.7.64 Aug  7 04:20:00 npf-sjca-pdp02 CISE_Profiler 0000373902 1 0 2014-08-07 04:20:00.983 -07:00 0011322557 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=30, EndpointIPAddress=10.56.129.142, EndpointMacAddress=3C:A9:F4:46:75:CC, EndpointMatchedPolicy=Microsoft-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Microsoft-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-46-75-cc\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=HASSI-WS03\,BYODRegistration=Unknown\,EndPointPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406112353750\,TimeToProfile=11\,Framed-IP-Address=10.56.129.142\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407406806572\,PolicyVersion=403\,I
 dentityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=30\,FeedService=false\,MatchedPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1394526689397\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=MS-WebServices/1.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf-sjca-pdp02.cisco.com, 
+Aug  6 22:22:50 10.42.7.64 Aug  7 05:42:03 npf-sjca-pdp02 CISE_Profiler 0000374846 1 0 2014-08-07 05:42:03.617 -07:00 0011340138 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=10, EndpointMacAddress=68:A8:6D:4E:0D:86, EndpointMatchedPolicy=Apple-Device, EndpointOUI=Apple, EndpointPolicy=Apple-Device, EndpointProperty=StaticAssignment=false\,PostureApplicable=Yes\,host-name=PGIANG-M-306R\,BYODRegistration=Unknown\,EndPointPolicyID=377d8ba0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1407415322895\,TimeToProfile=717\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,PolicyVersion=403\,IdentityGroupID=abbbcac0-89e6-11e1-bf14-005056aa4dd7\,Total Certainty Factor=10\,ciaddr=0.0.0.0\,FeedService=false\,dhcp-parameter-request-list=1\, 3\, 6\, 15\, 119\, 95\, 252\, 44\, 46\,MatchedPolicyID=377d8ba0-68a6-11e1-bc72-0050568e013c\,NmapSubnetScanID=0\,PortalUser=, EndpointSourceEvent=DHCP Probe, EndpointIdentity
 Group=Apple-Device, ProfilerServer=npf-sjca-pdp02.cisco.com, 
+Aug  6 23:30:10 10.42.7.64 Aug  7 06:49:23 npf-sjca-pdp02 CISE_Profiler 0000375603 1 0 2014-08-07 06:49:23.920 -07:00 0011353768 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=90, EndpointIPAddress=10.56.129.142, EndpointMacAddress=3C:A9:F4:46:75:CC, EndpointMatchedPolicy=Windows7-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Windows7-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-46-75-cc\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=HASSI-WS03\,BYODRegistration=Unknown\,EndPointPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406112353750\,TimeToProfile=11\,Framed-IP-Address=10.56.129.142\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407410402099\,PolicyVersion=403\,Ide
 ntityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=90\,FeedService=false\,MatchedPolicyID=615ed410-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1394526689397\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=Mozilla/5.0 (Windows NT 6.1\\\; WOW64\\ rv:30.0) Gecko/20100101 Firefox/30.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf-sjca-pdp02.cisco.com, 
+Aug  6 23:30:48 10.42.7.64 Aug  7 06:50:01 npf-sjca-pdp02 CISE_Profiler 0000375611 1 0 2014-08-07 06:50:01.377 -07:00 0011353875 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=50, EndpointIPAddress=10.34.92.103, EndpointMacAddress=3C:A9:F4:29:FC:3C, EndpointMatchedPolicy=Microsoft-Workstation, EndpointNADAddress=10.34.76.212, EndpointOUI=Intel Corporate, EndpointPolicy=Microsoft-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=3c-a9-f4-29-fc-3c\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=AMIBASU-WS01\,BYODRegistration=Unknown\,EndPointPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406109860322\,L4_DST_PORT=50428\,TimeToProfile=7\,Framed-IP-Address=10.34.92.103\,LastNmapScanTime=1380758278898\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1406
 686034558\,PolicyVersion=403\,IdentityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=50\,operating-system=Microsoft Windows Vista SP0 - SP2\, Server 2008\, or Windows 7 Ultimate\,FeedService=false\,MatchedPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1373657280926\,NmapScanCount=3\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=MS-WebServices/1.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf-sjca-pdp02.cisco.com, 
+Aug  6 23:32:52 10.42.7.64 Aug  7 06:52:05 npf-sjca-pdp02 CISE_Profiler 0000375636 1 0 2014-08-07 06:52:05.272 -07:00 0011354313 80002 INFO  Profiler: Profiler EndPoint profiling event occurred, ConfigVersionId=241, EndpointCertainityMetric=30, EndpointIPAddress=10.56.129.143, EndpointMacAddress=E8:2A:EA:23:5E:3D, EndpointMatchedPolicy=Microsoft-Workstation, EndpointNADAddress=10.56.129.4, EndpointOUI=Intel Corporate, EndpointPolicy=Microsoft-Workstation, EndpointProperty=StaticAssignment=false\,Calling-Station-ID=e8-2a-ea-23-5e-3d\,Device Identifier=\,PostureApplicable=Yes\,dhcp-class-identifier=MSFT 5.0\,host-name=ANOY-WS01\,BYODRegistration=Unknown\,EndPointPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,FirstCollection=1406114784910\,TimeToProfile=7\,Framed-IP-Address=10.56.129.143\,LastNmapScanTime=0\,StaticGroupAssignment=false\,DeviceRegistrationStatus=NotRegistered\,NAS-Port-Type=Wireless - IEEE 802.11\,RegistrationTimeStamp=0\,UpdateTime=1407395211208\,PolicyVersion=403\,Ide
 ntityGroupID=5cb39b80-68a6-11e1-bc72-0050568e013c\,Total Certainty Factor=30\,FeedService=false\,MatchedPolicyID=5f4a24e0-68a6-11e1-bc72-0050568e013c\,DestinationIPAddress=10.42.7.64\,CreateTime=1405408515121\,NmapScanCount=0\,NmapSubnetScanID=0\,AAA-Server=npf-sjca-pdp02\,PortalUser=, EndpointSourceEvent=RADIUS Probe, EndpointUserAgent=MS-WebServices/1.0, EndpointIdentityGroup=Workstation, ProfilerServer=npf-sjca-pdp02.cisco.com, 
+Aug  6 16:40:52 10.42.7.64 Aug  7 00:00:04 npf-sjca-pdp02 CISE_Failed_Attempts 0000370855 1 0 2014-08-07 00:00:04.527 -07:00 0011266584 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270932, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056EF53E323F4, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:40:57 10.42.7.63 Aug  7 00:00:09 npf-sjca-pdp01 CISE_Failed_Attempts 0001969834 1 0 2014-08-07 00:00:09.568 -07:00 0098648519 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2084839, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4A53E323F9, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:41:24 10.34.84.145 Aug  7 00:00:36 stage-pdp01 CISE_Failed_Attempts 0000024616 1 0 2014-08-07 00:00:36.332 -07:00 0000287007 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19317, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:41:26 10.34.84.145 Aug  7 00:00:38 stage-pdp01 CISE_Failed_Attempts 0000024617 1 0 2014-08-07 00:00:38.336 -07:00 0000287011 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19318, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:41:28 10.34.84.145 Aug  7 00:00:40 stage-pdp01 CISE_Failed_Attempts 0000024618 1 0 2014-08-07 00:00:40.336 -07:00 0000287015 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19319, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:41:30 10.34.84.145 Aug  7 00:00:42 stage-pdp01 CISE_Failed_Attempts 0000024619 1 0 2014-08-07 00:00:42.340 -07:00 0000287019 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19320, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:41:32 10.34.84.145 Aug  7 00:00:44 stage-pdp01 CISE_Failed_Attempts 0000024620 1 0 2014-08-07 00:00:44.340 -07:00 0000287023 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19321, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:41:34 10.34.84.145 Aug  7 00:00:46 stage-pdp01 CISE_Failed_Attempts 0000024621 1 0 2014-08-07 00:00:46.344 -07:00 0000287027 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19322, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:42:02 10.42.7.64 Aug  7 00:01:14 npf-sjca-pdp02 CISE_Failed_Attempts 0000370865 1 0 2014-08-07 00:01:14.610 -07:00 0011266810 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=7, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270940, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F053E3243A, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:42:07 10.42.7.63 Aug  7 00:01:19 npf-sjca-pdp01 CISE_Failed_Attempts 0001969923 1 0 2014-08-07 00:01:19.665 -07:00 0098652715 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2084986, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4B53E3243F, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:42:12 10.42.7.64 Aug  7 00:01:24 npf-sjca-pdp02 CISE_Failed_Attempts 0000370867 1 0 2014-08-07 00:01:24.701 -07:00 0011266815 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270941, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F153E32444, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:42:17 10.42.7.63 Aug  7 00:01:29 npf-sjca-pdp01 CISE_Failed_Attempts 0001969935 1 0 2014-08-07 00:01:29.746 -07:00 0098653362 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085007, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4C53E32449, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:43:22 10.42.7.64 Aug  7 00:02:34 npf-sjca-pdp02 CISE_Failed_Attempts 0000370885 1 0 2014-08-07 00:02:34.792 -07:00 0011267367 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=4, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270956, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F353E3248A, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:43:27 10.42.7.63 Aug  7 00:02:39 npf-sjca-pdp01 CISE_Failed_Attempts 0001970043 1 0 2014-08-07 00:02:39.808 -07:00 0098657578 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085161, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4D53E3248F, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:43:56 10.42.7.64 Aug  7 00:03:08 npf-sjca-pdp02 CISE_Failed_Attempts 0000370897 1 0 2014-08-07 00:03:08.902 -07:00 0011267657 5435 NOTICE RADIUS: NAS conducted several failed authentications of the same scenario, ConfigVersionId=240, Device IP Address=10.56.129.4, Device Port=32770, DestinationIPAddress=10.42.7.64, DestinationPort=1813, RadiusPacketType=AccountingRequest, UserName=yshchory, Protocol=Radius, RequestLatency=49, NetworkDeviceName=NTN-WLC1, User-Name=yshchory, NAS-IP-Address=10.56.129.4, NAS-Port=1, Framed-IP-Address=10.56.129.141, Class=CACS:0a388104000045cd53e2be75:npf-sjca-pdp02/195481465/270958, Called-Station-ID=6c-41-6a-5f-6e-c0, Calling-Station-ID=90-18-7c-7b-59-01, NAS-Identifier=ntn01-11a-wlc1, Acct-Status-Type=Interim-Update, Acct-Delay-Time=0, Acct-Input-Octets=2359603, Acct-Output-Octets=26928466, Acct-Session-Id=53e2be78/90:18:7c:7b:59:01/13844, Acct-Authentic=RADIUS, Acct-Session-Time=1466, Acct-Input-Packets=14866, Acct-Output-Packets=23043, und
 efined-52=
+Aug  6 16:44:01 10.42.7.63 Aug  7 00:03:13 npf-sjca-pdp01 CISE_Failed_Attempts 0001970072 1 0 2014-08-07 00:03:13.112 -07:00 0098658804 5435 NOTICE RADIUS: NAS conducted several failed authentications of the same scenario, ConfigVersionId=133, Device IP Address=10.56.72.127, Device Port=1646, DestinationIPAddress=10.42.7.63, DestinationPort=1813, Protocol=Radius, NetworkDeviceName=ntn01-11a-sw4, User-Name=host/salfi-pc.cisco.com, NAS-IP-Address=10.56.72.127, NAS-Port=50212, Service-Type=Framed, Framed-IP-Address=10.56.111.14, Class=CACS:0A38487F00000397BDA7BCAC:npf-sjca-pdp02/195481465/270957, Called-Station-ID=00-26-99-28-5E-BB, Calling-Station-ID=3C-97-0E-C3-F8-F1, Acct-Status-Type=Interim-Update, Acct-Delay-Time=4, Acct-Input-Octets=225395, Acct-Output-Octets=761436, Acct-Session-Id=00000560, Acct-Authentic=RADIUS, Acct-Session-Time=43, Acct-Input-Packets=1163, Acct-Output-Packets=1080, NAS-Port-Type=Ethernet, NAS-Port-Id=GigabitEthernet2/12, undefined-151=F54C88B0, cisco-av-pair
 =audit-session-id=0A38487F00000397BDA7BCAC, cisco-av-pair=connect-progress=Auth Open, AcsSessionID=npf-sjca-pdp01/195491152/2085221, FailureReason=11038 RADIUS Accounting-Request header contains invalid Authenticator field, Step=11004, Step=11017, Step=11038, Step=5435, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0A38487F00000397BDA7BCAC, TotalFailedAttempts=2, TotalFailedTime=42, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, 
+Aug  6 16:44:32 10.42.7.64 Aug  7 00:03:44 npf-sjca-pdp02 CISE_Failed_Attempts 0000370899 1 0 2014-08-07 00:03:44.851 -07:00 0011267663 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=7, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270963, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F453E324D0, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:44:36 10.34.84.145 Aug  7 00:03:48 stage-pdp01 CISE_Failed_Attempts 0000024632 1 0 2014-08-07 00:03:48.375 -07:00 0000287084 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19329, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:44:37 10.42.7.63 Aug  7 00:03:49 npf-sjca-pdp01 CISE_Failed_Attempts 0001970128 1 0 2014-08-07 00:03:49.893 -07:00 0098661643 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085307, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4E53E324D5, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:44:38 10.34.84.145 Aug  7 00:03:50 stage-pdp01 CISE_Failed_Attempts 0000024633 1 0 2014-08-07 00:03:50.379 -07:00 0000287088 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19330, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:44:40 10.34.84.145 Aug  7 00:03:52 stage-pdp01 CISE_Failed_Attempts 0000024634 1 0 2014-08-07 00:03:52.379 -07:00 0000287092 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19331, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:44:42 10.34.84.145 Aug  7 00:03:54 stage-pdp01 CISE_Failed_Attempts 0000024635 1 0 2014-08-07 00:03:54.387 -07:00 0000287096 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19332, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:44:42 10.42.7.64 Aug  7 00:03:54 npf-sjca-pdp02 CISE_Failed_Attempts 0000370903 1 0 2014-08-07 00:03:54.924 -07:00 0011267670 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=4, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270964, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F553E324DA, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:44:44 10.34.84.145 Aug  7 00:03:56 stage-pdp01 CISE_Failed_Attempts 0000024636 1 0 2014-08-07 00:03:56.386 -07:00 0000287100 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19333, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:44:46 10.34.84.145 Aug  7 00:03:58 stage-pdp01 CISE_Failed_Attempts 0000024637 1 0 2014-08-07 00:03:58.390 -07:00 0000287104 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19334, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:44:47 10.42.7.63 Aug  7 00:03:59 npf-sjca-pdp01 CISE_Failed_Attempts 0001970140 1 0 2014-08-07 00:03:59.951 -07:00 0098662310 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085331, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D4F53E324DF, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:44:48 10.42.7.64 Aug  7 00:04:00 npf-sjca-pdp02 CISE_Failed_Attempts 0000370905 1 0 2014-08-07 00:04:00.526 -07:00 0011267674 5435 NOTICE RADIUS: NAS conducted several failed authentications of the same scenario, ConfigVersionId=240, Device IP Address=10.56.72.127, Device Port=1646, DestinationIPAddress=10.42.7.64, DestinationPort=1813, Protocol=Radius, NetworkDeviceName=ntn01-11a-sw4, User-Name=host/salfi-pc.cisco.com, NAS-IP-Address=10.56.72.127, NAS-Port=50212, Service-Type=Framed, Framed-IP-Address=169.254.53.87, Class=CACS:0A38487F00000397BDA7BCAC:npf-sjca-pdp02/195481465/270957, Called-Station-ID=00-26-99-28-5E-BB, Calling-Station-ID=3C-97-0E-C3-F8-F1, Acct-Status-Type=Interim-Update, Acct-Delay-Time=0, Acct-Input-Octets=1458615, Acct-Output-Octets=3836368, Acct-Session-Id=00000560, Acct-Authentic=RADIUS, Acct-Session-Time=95, Acct-Input-Packets=4505, Acct-Output-Packets=5619, NAS-Port-Type=Ethernet, NAS-Port-Id=GigabitEthernet2/12, undefined-151=F54C88B0, cisco-av-p
 air=audit-session-id=0A38487F00000397BDA7BCAC, cisco-av-pair=connect-progress=Auth Open, AcsSessionID=npf-sjca-pdp02/195481465/270965, FailureReason=11038 RADIUS Accounting-Request header contains invalid Authenticator field, Step=11004, Step=11017, Step=11038, Step=5435, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0A38487F00000397BDA7BCAC, TotalFailedAttempts=2, TotalFailedTime=52, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, 
+Aug  6 16:45:52 10.42.7.64 Aug  7 00:05:04 npf-sjca-pdp02 CISE_Failed_Attempts 0000370920 1 0 2014-08-07 00:05:04.969 -07:00 0011267987 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=6, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270977, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F653E32520, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:45:58 10.42.7.63 Aug  7 00:05:09 npf-sjca-pdp01 CISE_Failed_Attempts 0001970212 1 0 2014-08-07 00:05:09.998 -07:00 0098665518 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085460, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5053E32525, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:47:03 10.42.7.64 Aug  7 00:06:15 npf-sjca-pdp02 CISE_Failed_Attempts 0000370931 1 0 2014-08-07 00:06:15.016 -07:00 0011268196 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270985, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F753E32567, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:47:08 10.42.7.63 Aug  7 00:06:20 npf-sjca-pdp01 CISE_Failed_Attempts 0001970324 1 0 2014-08-07 00:06:20.055 -07:00 0098669942 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085599, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5153E3256C, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:47:13 10.42.7.64 Aug  7 00:06:25 npf-sjca-pdp02 CISE_Failed_Attempts 0000370934 1 0 2014-08-07 00:06:25.097 -07:00 0011268209 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/270987, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F853E32571, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:47:18 10.42.7.63 Aug  7 00:06:30 npf-sjca-pdp01 CISE_Failed_Attempts 0001970335 1 0 2014-08-07 00:06:30.119 -07:00 0098670037 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085618, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5253E32576, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:47:48 10.34.84.145 Aug  7 00:07:00 stage-pdp01 CISE_Failed_Attempts 0000024649 1 0 2014-08-07 00:07:00.418 -07:00 0000287210 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19342, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:47:50 10.34.84.145 Aug  7 00:07:02 stage-pdp01 CISE_Failed_Attempts 0000024650 1 0 2014-08-07 00:07:02.421 -07:00 0000287214 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19343, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:47:52 10.34.84.145 Aug  7 00:07:04 stage-pdp01 CISE_Failed_Attempts 0000024651 1 0 2014-08-07 00:07:04.425 -07:00 0000287218 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19344, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:47:54 10.34.84.145 Aug  7 00:07:06 stage-pdp01 CISE_Failed_Attempts 0000024652 1 0 2014-08-07 00:07:06.429 -07:00 0000287222 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19345, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:47:56 10.34.84.145 Aug  7 00:07:08 stage-pdp01 CISE_Failed_Attempts 0000024653 1 0 2014-08-07 00:07:08.429 -07:00 0000287226 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19346, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:47:58 10.34.84.145 Aug  7 00:07:10 stage-pdp01 CISE_Failed_Attempts 0000024654 1 0 2014-08-07 00:07:10.433 -07:00 0000287230 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19347, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:48:23 10.42.7.64 Aug  7 00:07:35 npf-sjca-pdp02 CISE_Failed_Attempts 0000370955 1 0 2014-08-07 00:07:35.138 -07:00 0011268472 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271001, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056F953E325B7, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:48:28 10.42.7.63 Aug  7 00:07:40 npf-sjca-pdp01 CISE_Failed_Attempts 0001970420 1 0 2014-08-07 00:07:40.178 -07:00 0098673462 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085757, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5353E325BC, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:49:33 10.42.7.64 Aug  7 00:08:45 npf-sjca-pdp02 CISE_Failed_Attempts 0000370984 1 0 2014-08-07 00:08:45.219 -07:00 0011269071 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271016, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FB53E325FD, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:49:38 10.42.7.63 Aug  7 00:08:50 npf-sjca-pdp01 CISE_Failed_Attempts 0001970519 1 0 2014-08-07 00:08:50.259 -07:00 0098677825 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085892, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5453E32602, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:49:43 10.42.7.64 Aug  7 00:08:55 npf-sjca-pdp02 CISE_Failed_Attempts 0000370986 1 0 2014-08-07 00:08:55.298 -07:00 0011269076 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271017, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FC53E32607, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:49:48 10.42.7.63 Aug  7 00:09:00 npf-sjca-pdp01 CISE_Failed_Attempts 0001970524 1 0 2014-08-07 00:09:00.330 -07:00 0098678019 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2085909, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5553E3260C, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:50:53 10.42.7.64 Aug  7 00:10:05 npf-sjca-pdp02 CISE_Failed_Attempts 0000370999 1 0 2014-08-07 00:10:05.339 -07:00 0011269371 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271027, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FD53E3264D, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:50:58 10.42.7.63 Aug  7 00:10:10 npf-sjca-pdp01 CISE_Failed_Attempts 0001970625 1 0 2014-08-07 00:10:10.388 -07:00 0098682297 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086061, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5653E32652, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:51:00 10.34.84.145 Aug  7 00:10:12 stage-pdp01 CISE_Failed_Attempts 0000024661 1 0 2014-08-07 00:10:12.492 -07:00 0000287258 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19354, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:51:02 10.34.84.145 Aug  7 00:10:14 stage-pdp01 CISE_Failed_Attempts 0000024662 1 0 2014-08-07 00:10:14.496 -07:00 0000287262 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19355, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:51:04 10.34.84.145 Aug  7 00:10:16 stage-pdp01 CISE_Failed_Attempts 0000024663 1 0 2014-08-07 00:10:16.496 -07:00 0000287266 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19356, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:51:06 10.34.84.145 Aug  7 00:10:18 stage-pdp01 CISE_Failed_Attempts 0000024664 1 0 2014-08-07 00:10:18.500 -07:00 0000287270 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19357, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:51:08 10.34.84.145 Aug  7 00:10:20 stage-pdp01 CISE_Failed_Attempts 0000024665 1 0 2014-08-07 00:10:20.504 -07:00 0000287274 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19358, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:51:10 10.34.84.145 Aug  7 00:10:22 stage-pdp01 CISE_Failed_Attempts 0000024667 1 0 2014-08-07 00:10:22.507 -07:00 0000287279 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19359, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:52:03 10.42.7.64 Aug  7 00:11:15 npf-sjca-pdp02 CISE_Failed_Attempts 0000371005 1 0 2014-08-07 00:11:15.432 -07:00 0011269421 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=4, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271031, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FE53E32693, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:52:08 10.42.7.63 Aug  7 00:11:20 npf-sjca-pdp01 CISE_Failed_Attempts 0001970691 1 0 2014-08-07 00:11:20.468 -07:00 0098685176 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086181, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5753E32698, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:52:13 10.42.7.64 Aug  7 00:11:25 npf-sjca-pdp02 CISE_Failed_Attempts 0000371007 1 0 2014-08-07 00:11:25.515 -07:00 0011269426 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271032, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a0740000056FF53E3269D, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:52:18 10.42.7.63 Aug  7 00:11:30 npf-sjca-pdp01 CISE_Failed_Attempts 0001970708 1 0 2014-08-07 00:11:30.551 -07:00 0098685669 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=8, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086202, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5853E326A2, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:53:23 10.42.7.64 Aug  7 00:12:35 npf-sjca-pdp02 CISE_Failed_Attempts 0000371016 1 0 2014-08-07 00:12:35.547 -07:00 0011269586 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271040, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570053E326E3, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:53:28 10.42.7.63 Aug  7 00:12:40 npf-sjca-pdp01 CISE_Failed_Attempts 0001970802 1 0 2014-08-07 00:12:40.596 -07:00 0098689883 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086334, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5953E326E8, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:54:12 10.34.84.145 Aug  7 00:13:24 stage-pdp01 CISE_Failed_Attempts 0000024680 1 0 2014-08-07 00:13:24.527 -07:00 0000287388 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19368, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:54:14 10.34.84.145 Aug  7 00:13:26 stage-pdp01 CISE_Failed_Attempts 0000024681 1 0 2014-08-07 00:13:26.531 -07:00 0000287392 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19369, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:54:16 10.34.84.145 Aug  7 00:13:28 stage-pdp01 CISE_Failed_Attempts 0000024682 1 0 2014-08-07 00:13:28.534 -07:00 0000287396 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19370, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:54:18 10.34.84.145 Aug  7 00:13:30 stage-pdp01 CISE_Failed_Attempts 0000024683 1 0 2014-08-07 00:13:30.538 -07:00 0000287400 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19371, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:54:20 10.34.84.145 Aug  7 00:13:32 stage-pdp01 CISE_Failed_Attempts 0000024684 1 0 2014-08-07 00:13:32.538 -07:00 0000287404 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19372, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:54:22 10.34.84.145 Aug  7 00:13:34 stage-pdp01 CISE_Failed_Attempts 0000024685 1 0 2014-08-07 00:13:34.542 -07:00 0000287408 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19373, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:54:33 10.42.7.64 Aug  7 00:13:45 npf-sjca-pdp02 CISE_Failed_Attempts 0000371020 1 0 2014-08-07 00:13:45.628 -07:00 0011269631 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271044, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570153E32729, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:54:38 10.42.7.63 Aug  7 00:13:50 npf-sjca-pdp01 CISE_Failed_Attempts 0001970913 1 0 2014-08-07 00:13:50.668 -07:00 0098695334 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086486, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5A53E3272E, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:54:43 10.42.7.64 Aug  7 00:13:55 npf-sjca-pdp02 CISE_Failed_Attempts 0000371025 1 0 2014-08-07 00:13:55.694 -07:00 0011269740 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=7, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271048, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570253E32733, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:54:48 10.42.7.63 Aug  7 00:14:00 npf-sjca-pdp01 CISE_Failed_Attempts 0001970924 1 0 2014-08-07 00:14:00.705 -07:00 0098695591 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086505, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5B53E32738, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:55:53 10.42.7.64 Aug  7 00:15:05 npf-sjca-pdp02 CISE_Failed_Attempts 0000371036 1 0 2014-08-07 00:15:05.742 -07:00 0011270054 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=6, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271057, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570353E32779, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:55:58 10.42.7.63 Aug  7 00:15:10 npf-sjca-pdp01 CISE_Failed_Attempts 0001970997 1 0 2014-08-07 00:15:10.772 -07:00 0098698954 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086621, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5C53E3277E, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:57:03 10.42.7.64 Aug  7 00:16:15 npf-sjca-pdp02 CISE_Failed_Attempts 0000371051 1 0 2014-08-07 00:16:15.827 -07:00 0011270497 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=6, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271067, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570453E327BF, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:57:08 10.42.7.63 Aug  7 00:16:20 npf-sjca-pdp01 CISE_Failed_Attempts 0001971096 1 0 2014-08-07 00:16:20.857 -07:00 0098703837 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086806, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5D53E327C4, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:57:24 10.34.84.145 Aug  7 00:16:36 stage-pdp01 CISE_Failed_Attempts 0000024697 1 0 2014-08-07 00:16:36.602 -07:00 0000287553 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19384, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:57:26 10.34.84.145 Aug  7 00:16:38 stage-pdp01 CISE_Failed_Attempts 0000024698 1 0 2014-08-07 00:16:38.605 -07:00 0000287557 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19385, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:57:28 10.34.84.145 Aug  7 00:16:40 stage-pdp01 CISE_Failed_Attempts 0000024699 1 0 2014-08-07 00:16:40.609 -07:00 0000287561 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19386, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:57:30 10.34.84.145 Aug  7 00:16:42 stage-pdp01 CISE_Failed_Attempts 0000024700 1 0 2014-08-07 00:16:42.613 -07:00 0000287565 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19387, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:57:32 10.34.84.145 Aug  7 00:16:44 stage-pdp01 CISE_Failed_Attempts 0000024701 1 0 2014-08-07 00:16:44.613 -07:00 0000287569 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19388, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:57:34 10.34.84.145 Aug  7 00:16:46 stage-pdp01 CISE_Failed_Attempts 0000024702 1 0 2014-08-07 00:16:46.617 -07:00 0000287573 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19389, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 16:58:03 10.42.7.64 Aug  7 00:17:15 npf-sjca-pdp02 CISE_Failed_Attempts 0000371063 1 0 2014-08-07 00:17:15.966 -07:00 0011270832 5435 NOTICE RADIUS: NAS conducted several failed authentications of the same scenario, ConfigVersionId=240, Device IP Address=10.34.76.212, Device Port=32770, DestinationIPAddress=10.42.7.64, DestinationPort=1813, RadiusPacketType=AccountingRequest, UserName=hslai, Protocol=Radius, RequestLatency=25, NetworkDeviceName=sjcm-00a-npf-wlc1, User-Name=hslai, NAS-IP-Address=10.34.76.212, NAS-Port=1, Framed-IP-Address=10.34.94.11, Class=CACS:0a224cd40002fdf953e327f2:npf-sjca-pdp02/195481465/271072, Called-Station-ID=88-43-e1-62-1d-20, Calling-Station-ID=24-a2-e1-3b-4b-cb, NAS-Identifier=sjcm-00a-npf-wlc1, Acct-Status-Type=Interim-Update, Acct-Delay-Time=0, Acct-Input-Octets=5198, Acct-Output-Octets=4093, Acct-Session-Id=53e327f2/24:a2:e1:3b:4b:cb/174403, Acct-Authentic=RADIUS, Acct-Session-Time=9, Acct-Input-Packets=37, Acct-Output-Packets=13, undefined-52
 =
+Aug  6 16:58:13 10.42.7.64 Aug  7 00:17:25 npf-sjca-pdp02 CISE_Failed_Attempts 0000371065 1 0 2014-08-07 00:17:25.902 -07:00 0011270838 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=4, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271076, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570553E32805, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:58:18 10.42.7.63 Aug  7 00:17:30 npf-sjca-pdp01 CISE_Failed_Attempts 0001971204 1 0 2014-08-07 00:17:30.916 -07:00 0098707928 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2086981, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5E53E3280A, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 16:59:23 10.42.7.64 Aug  7 00:18:35 npf-sjca-pdp02 CISE_Failed_Attempts 0000371070 1 0 2014-08-07 00:18:35.942 -07:00 0011271044 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271081, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570653E3284B, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 16:59:28 10.42.7.64 Aug  7 00:18:40 npf-sjca-pdp02 CISE_Failed_Attempts 0000371072 1 0 2014-08-07 00:18:40.669 -07:00 0011271053 5400 NOTICE Failed-Attempt: Authentication failed, ConfigVersionId=240, Device IP Address=10.56.129.4, Device Port=32770, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=istern, Protocol=Radius, RequestLatency=12, NetworkDeviceName=NTN-WLC1, User-Name=istern, NAS-IP-Address=10.56.129.4, NAS-Port=1, Service-Type=Framed, Framed-MTU=1300, State=37CPMSessionID=0a388104000045de53e2c750\;41SessionID=npf-sjca-pdp02/195481465/271077\;, Called-Station-ID=70-10-5c-f3-2f-80:alpha_byod, Calling-Station-ID=f0-27-65-48-8c-8f, NAS-Identifier=ntn01-11a-wlc1, NAS-Port-Type=Wireless - IEEE 802.11, Tunnel-Type=(tag=0) VLAN, Tunnel-Medium-Type=(tag=0) 802, Tunnel-Private-Group-ID=(tag=0) 604, undefined-89=
+Aug  6 16:59:28 10.42.7.63 Aug  7 00:18:40 npf-sjca-pdp01 CISE_Failed_Attempts 0001971282 1 0 2014-08-07 00:18:40.981 -07:00 0098711291 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=2, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2087140, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D5F53E32850, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 17:00:33 10.42.7.64 Aug  7 00:19:46 npf-sjca-pdp02 CISE_Failed_Attempts 0000371080 1 0 2014-08-07 00:19:46.020 -07:00 0011271232 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=240, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.64, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=5, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp02/195481465/271087, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a07400000570753E32892, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Resp
 onse={RadiusPacketType=Drop; },
+Aug  6 17:00:36 10.34.84.145 Aug  7 00:19:48 stage-pdp01 CISE_Failed_Attempts 0000024712 1 0 2014-08-07 00:19:48.660 -07:00 0000287604 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19396, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 17:00:38 10.34.84.145 Aug  7 00:19:50 stage-pdp01 CISE_Failed_Attempts 0000024713 1 0 2014-08-07 00:19:50.664 -07:00 0000287608 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19397, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 17:00:39 10.42.7.63 Aug  7 00:19:51 npf-sjca-pdp01 CISE_Failed_Attempts 0001971393 1 0 2014-08-07 00:19:51.042 -07:00 0098716185 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=133, Device IP Address=10.56.72.126, Device Port=1645, DestinationIPAddress=10.42.7.63, DestinationPort=1812, RadiusPacketType=AccessRequest, UserName=#CTSREQUEST#, Protocol=Radius, RequestLatency=1, NetworkDeviceName=ntn01-11a-sw3, User-Name=#CTSREQUEST#, NAS-IP-Address=10.56.72.126, Service-Type=Outbound, AcsSessionID=npf-sjca-pdp01/195491152/2087311, SelectedAccessService=NDAC_SGT_Service, FailureReason=11302 Received Secure RADIUS request without a cts-pac-opaque cisco-av-pair attribute, Step=11001, Step=11017, Step=15012, Step=11302, NetworkDeviceGroups=Location#All Locations#NTN, NetworkDeviceGroups=Device Type#All Device Types#Wired, CPMSessionID=0a2a073f00005D6053E32897, Model Name=4503, Location=Location#All Locations#NTN, Device Type=Device Type#All Device Types#Wired, Res
 ponse={RadiusPacketType=Drop; },
+Aug  6 17:00:40 10.34.84.145 Aug  7 00:19:52 stage-pdp01 CISE_Failed_Attempts 0000024714 1 0 2014-08-07 00:19:52.664 -07:00 0000287612 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19398, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 17:00:42 10.34.84.145 Aug  7 00:19:54 stage-pdp01 CISE_Failed_Attempts 0000024715 1 0 2014-08-07 00:19:54.668 -07:00 0000287616 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19399, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 17:00:44 10.34.84.145 Aug  7 00:19:56 stage-pdp01 CISE_Failed_Attempts 0000024716 1 0 2014-08-07 00:19:56.672 -07:00 0000287620 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19400, FailureReason=11007 Could not locate Network Device or AAA Client, Step=11001, Step=11017, Step=11007, Step=5405, 
+Aug  6 17:00:46 10.34.84.145 Aug  7 00:19:58 stage-pdp01 CISE_Failed_Attempts 0000024717 1 0 2014-08-07 00:19:58.675 -07:00 0000287624 5405 NOTICE Failed-Attempt: RADIUS Request dropped, ConfigVersionId=113, Device IP Address=172.23.91.132, Device Port=32769, DestinationIPAddress=10.34.84.145, DestinationPort=1812, Protocol=Radius, User-Name=test, NAS-IP-Address=192.168.30.11, Service-Type=NAS Prompt, NAS-Identifier=Cisco_1b:e0:84, AcsSessionID=stage-pdp01/196593288/19401, FailureR

<TRUNCATED>


[18/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/LancopeExampleOutput
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/LancopeExampleOutput b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/LancopeExampleOutput
new file mode 100644
index 0000000..b1bccf9
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/LancopeExampleOutput
@@ -0,0 +1,40 @@
+{"message":"<131>Jul 17 15:27:27 smc-01 StealthWatch[12365]: 2014-06-24T14:37:58Z 192.168.200.9 199.237.198.232 Critical Bad Host The host has been observed doing something bad to another host. Source Host is http (80/tcp) client to target.host.name (199.237.198.232)","@version":"1","@timestamp":"2014-07-17T15:24:32.217Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:35:00 smc-01 StealthWatch[12365]: 2014-07-17T15:34:30Z 10.201.3.83 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 92.64M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:32:05.934Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:35:00 smc-01 StealthWatch[12365]: 2014-07-17T15:34:30Z 10.201.3.145 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 45.2M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:32:05.935Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:35:00 smc-01 StealthWatch[12365]: 2014-07-17T15:34:30Z 10.201.3.50 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 41.46M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:32:05.936Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:42:01 smc-01 StealthWatch[12365]: 2014-07-17T15:42:00Z 10.10.101.24 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 39.37M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:39:05.976Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:56:01 smc-01 StealthWatch[12365]: 2014-07-17T15:55:00Z 0.0.100.0 0.0.0.0 Major ICMP Flood The source IP has sent an excessive number of ICMP packets in the last 5 minutes. Observed 262.4k pp5m. Policy maximum allows up to 100k pp5m.","@version":"1","@timestamp":"2014-07-17T15:53:05.995Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:56:01 smc-01 StealthWatch[12365]: 2014-07-17T15:55:00Z 0.0.88.0 0.0.0.0 Major High Total Traffic The total traffic inbound + outbound exceeds the acceptable total traffic values. Observed 16.26G bytes. Expected 4.17G bytes, tolerance of 50 allows up to 15.06G bytes.","@version":"1","@timestamp":"2014-07-17T15:53:05.996Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:57:01 smc-01 StealthWatch[12365]: 2014-07-17T15:56:30Z 10.201.3.50 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 42.49M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:54:05.984Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:59:01 smc-01 StealthWatch[12365]: 2014-07-17T15:58:30Z 10.40.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.55M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:56:05.992Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:59:01 smc-01 StealthWatch[12365]: 2014-07-17T15:58:30Z 10.30.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.47M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:56:05.995Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:59:01 smc-01 StealthWatch[12365]: 2014-07-17T15:58:30Z 10.20.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 40.48M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:56:05.995Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:59:01 smc-01 StealthWatch[12365]: 2014-07-17T15:58:30Z 10.201.3.83 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 96.74M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:56:05.992Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:59:01 smc-01 StealthWatch[12365]: 2014-07-17T15:58:30Z 10.100.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 32.95M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:56:05.997Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:59:01 smc-01 StealthWatch[12365]: 2014-07-17T15:58:30Z 10.90.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.52M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:56:06.000Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:59:01 smc-01 StealthWatch[12365]: 2014-07-17T15:58:30Z 10.80.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.51M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:56:06.002Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:59:01 smc-01 StealthWatch[12365]: 2014-07-17T15:58:30Z 10.70.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.49M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:56:06.002Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:59:01 smc-01 StealthWatch[12365]: 2014-07-17T15:58:30Z 10.110.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 32.92M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:56:05.997Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:59:01 smc-01 StealthWatch[12365]: 2014-07-17T15:58:30Z 10.60.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.49M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:56:06.003Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 15:59:01 smc-01 StealthWatch[12365]: 2014-07-17T15:58:30Z 10.50.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.48M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:56:06.004Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:06:01 smc-01 StealthWatch[12365]: 2014-07-17T16:05:00Z 10.10.101.46 0.0.0.0 Major New Flows Initiated The host has exceeded the acceptable total number of new flows initiated in a 5-minute period. ","@version":"1","@timestamp":"2014-07-17T16:03:06.046Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:06:01 smc-01 StealthWatch[12365]: 2014-07-17T16:05:00Z 10.10.101.46 0.0.0.0 Major Max Flows Initiated The host has initiated more than an acceptable maximum number of flows. ","@version":"1","@timestamp":"2014-07-17T16:03:06.046Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:19:30Z 10.110.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 33.01M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T16:17:05.146Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:19:30Z 10.100.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 33.03M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T16:17:05.147Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:19:30Z 10.90.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.59M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T16:17:05.148Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:19:30Z 10.80.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.58M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T16:17:05.157Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:19:30Z 10.70.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.56M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T16:17:05.157Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:19:30Z 10.60.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.56M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T16:17:05.158Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:19:30Z 10.50.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.55M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T16:17:05.160Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:19:30Z 10.30.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.55M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T16:17:05.173Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:19:30Z 10.201.3.83 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 96.82M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T16:17:05.173Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:19:30Z 10.20.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 40.55M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T16:17:05.174Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:20:00Z 10.110.10.254 10.120.80.254 Minor Worm Propagation The host has scanned and connected on a particular port across more than one subnet, and the host was previously scanned and connected to by a host for which the Worm Activity alarm has been raised. Worm propagated from Source Host using smb (445/tcp)","@version":"1","@timestamp":"2014-07-17T16:17:05.174Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:20:00Z 10.100.10.254 10.110.100.254 Minor Worm Propagation The host has scanned and connected on a particular port across more than one subnet, and the host was previously scanned and connected to by a host for which the Worm Activity alarm has been raised. Worm propagated from Source Host using smb (445/tcp)","@version":"1","@timestamp":"2014-07-17T16:17:05.174Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:20:00Z 10.90.10.254 10.100.100.254 Minor Worm Propagation The host has scanned and connected on a particular port across more than one subnet, and the host was previously scanned and connected to by a host for which the Worm Activity alarm has been raised. Worm propagated from Source Host using smb (445/tcp)","@version":"1","@timestamp":"2014-07-17T16:17:05.174Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:20:00Z 10.80.10.254 10.90.100.254 Minor Worm Propagation The host has scanned and connected on a particular port across more than one subnet, and the host was previously scanned and connected to by a host for which the Worm Activity alarm has been raised. Worm propagated from Source Host using smb (445/tcp)","@version":"1","@timestamp":"2014-07-17T16:17:05.175Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:20:00Z 10.70.10.254 10.80.100.254 Minor Worm Propagation The host has scanned and connected on a particular port across more than one subnet, and the host was previously scanned and connected to by a host for which the Worm Activity alarm has been raised. Worm propagated from Source Host using smb (445/tcp)","@version":"1","@timestamp":"2014-07-17T16:17:05.183Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:20:00Z 10.60.10.254 10.70.100.254 Minor Worm Propagation The host has scanned and connected on a particular port across more than one subnet, and the host was previously scanned and connected to by a host for which the Worm Activity alarm has been raised. Worm propagated from Source Host using smb (445/tcp)","@version":"1","@timestamp":"2014-07-17T16:17:05.184Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:20:00Z 10.50.10.254 10.60.100.254 Minor Worm Propagation The host has scanned and connected on a particular port across more than one subnet, and the host was previously scanned and connected to by a host for which the Worm Activity alarm has been raised. Worm propagated from Source Host using smb (445/tcp)","@version":"1","@timestamp":"2014-07-17T16:17:05.184Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:20:00Z 10.40.10.254 10.50.100.254 Minor Worm Propagation The host has scanned and connected on a particular port across more than one subnet, and the host was previously scanned and connected to by a host for which the Worm Activity alarm has been raised. Worm propagated from Source Host using smb (445/tcp)","@version":"1","@timestamp":"2014-07-17T16:17:05.184Z","type":"syslog","host":"10.122.196.201"}
+{"message":"<131>Jul 17 16:20:00 smc-01 StealthWatch[12365]: 2014-07-17T16:19:30Z 10.40.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.63M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T16:17:05.168Z","type":"syslog","host":"192.249.113.37"}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/PCAPExampleOutput
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/PCAPExampleOutput b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/PCAPExampleOutput
new file mode 100644
index 0000000..e730181
Binary files /dev/null and b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/PCAPExampleOutput differ

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/PaloaltoOutput
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/PaloaltoOutput b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/PaloaltoOutput
new file mode 100644
index 0000000..16793a2
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/PaloaltoOutput
@@ -0,0 +1,100 @@
+<11>Jan  5 05:38:59 PAN1.exampleCustomer.com 1,2015/01/05 05:38:58,0006C110285,THREAT,vulnerability,1,2015/01/05 05:38:58,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 05:38:58,12031,1,54180,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=67AF705D60B1119C0F18BEA336F9",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347368099,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109656,, 
+<11>Jan  5 05:38:59 PAN1.exampleCustomer.com 1,2015/01/05 05:38:59,0006C110285,THREAT,vulnerability,1,2015/01/05 05:38:59,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 05:38:59,9399,1,54185,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=8;tile=1;ord=F7315B6954238BE7FAE19D6EE0ECD",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347368106,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109661,, 
+<11>Jan  5 05:39:00 PAN1.exampleCustomer.com 1,2015/01/05 05:38:59,0006C110285,THREAT,vulnerability,1,2015/01/05 05:38:59,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 05:38:59,50636,1,54181,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=ECA531364D3B6522F9B89EE09381",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347368111,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109663,, 
+<11>Jan  5 05:39:00 PAN1.exampleCustomer.com 1,2015/01/05 05:38:59,0006C110285,THREAT,vulnerability,1,2015/01/05 05:38:59,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 05:38:59,19582,1,54177,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=160x600&id=14;tile=1;ord=9DB9E71EB91389C954E499B68203",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347368112,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109664,, 
+<11>Jan  5 05:39:00 PAN1.exampleCustomer.com 1,2015/01/05 05:38:59,0006C110285,THREAT,vulnerability,1,2015/01/05 05:38:59,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 05:38:59,38426,1,54202,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=336x288&id=4;tile=1;ord=B1B8DA9446290140922C4F6E092D8",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347368119,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109668,, 
+<11>Jan  5 07:11:37 PAN1.exampleCustomer.com 1,2015/01/05 07:11:36,0006C110285,THREAT,vulnerability,1,2015/01/05 07:11:36,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 07:11:36,28124,1,56475,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=6;tile=1;ord=E526836F078EB22491799C6373ED3",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347431967,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109692,, 
+<11>Jan  5 07:11:37 PAN1.exampleCustomer.com 1,2015/01/05 07:11:37,0006C110285,THREAT,vulnerability,1,2015/01/05 07:11:37,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 07:11:37,36574,1,56485,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=6;tile=1;ord=E526836F078EB22491799C6373ED3",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347431978,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109694,, 
+<11>Jan  5 07:11:37 PAN1.exampleCustomer.com 1,2015/01/05 07:11:37,0006C110285,THREAT,vulnerability,1,2015/01/05 07:11:37,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 07:11:37,3892,1,56486,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=E052042F211E553D6E1E44921E49",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347431979,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109695,, 
+<11>Jan  5 07:15:23 PAN1.exampleCustomer.com 1,2015/01/05 07:15:23,0006C110285,THREAT,vulnerability,1,2015/01/05 07:15:23,10.0.0.115,216.0.10.230,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 07:15:23,15102,1,56706,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=EB863BEB8809A5598F62C4CEDED7",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347434790,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109701,, 
+<11>Jan  5 07:15:23 PAN1.exampleCustomer.com 1,2015/01/05 07:15:23,0006C110285,THREAT,vulnerability,1,2015/01/05 07:15:23,10.0.0.115,216.0.10.230,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 07:15:23,54920,1,56704,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=4FB22ED5B7A0C344DB28AB34C1B3",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347434799,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109706,, 
+<11>Jan  5 06:57:50 PAN1.exampleCustomer.com 1,2015/01/05 06:57:50,0006C110285,THREAT,vulnerability,1,2015/01/05 06:57:50,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 06:57:50,59603,1,56051,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=6845CCF1045EE15B60F30B807684",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347421830,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109684,, 
+<11>Jan  5 06:57:50 PAN1.exampleCustomer.com 1,2015/01/05 06:57:50,0006C110285,THREAT,vulnerability,1,2015/01/05 06:57:50,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 06:57:50,24223,1,56042,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=256A9BBB8867977D118E2E511742",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347421831,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109685,, 
+<11>Jan  5 06:57:50 PAN1.exampleCustomer.com 1,2015/01/05 06:57:50,0006C110285,THREAT,vulnerability,1,2015/01/05 06:57:50,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 06:57:50,61627,1,56043,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=6845CCF1045EE15B60F30B807684",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347421828,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109682,, 
+<11>Jan  5 07:11:36 PAN1.exampleCustomer.com 1,2015/01/05 07:11:36,0006C110285,THREAT,vulnerability,1,2015/01/05 07:11:36,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 07:11:36,37087,1,56307,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=E052042F211E553D6E1E44921E49",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347431965,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109691,, 
+<11>Jan  5 05:48:38 PAN1.exampleCustomer.com 1,2015/01/05 05:48:38,0006C110285,THREAT,vulnerability,1,2015/01/05 05:48:38,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 05:48:38,48136,1,54557,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=EDD821C39BC0A49777874E02F7FA",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347373997,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109676,, 
+<11>Jan  5 05:39:01 PAN1.exampleCustomer.com 1,2015/01/05 05:39:00,0006C110285,THREAT,vulnerability,1,2015/01/05 05:39:00,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 05:39:00,60649,1,54209,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=728x90&id=1;tile=1;ord=6510BF66C3B427ED44AC521752E695",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347368140,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109674,, 
+<12>Jan  5 06:41:35 PAN1.exampleCustomer.com 1,2015/01/05 06:41:34,0006C113118,THREAT,virus,1,2015/01/05 06:41:34,94.0.0.3,10.0.0.208,94.0.0.3,211.0.10.226,EX-Allow,,example\user.name,web-browsing,vsys1,untrust,trust,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 06:41:34,16864,2,80,60194,80,56595,0x404000,tcp,deny,"FreemakeVideoConverterSetup.exe",Virus/Win32.WGeneric.dyxeh(2367869),any,medium,server-to-client,40462931,0x0,GB,10.0.0.0-10.255.255.255,0,,0,, 
+<10>Jan  5 05:58:47 PAN1 1,2015/01/05 05:58:46,009401011564,THREAT,vulnerability,1,2015/01/05 05:58:46,10.0.0.38,10.3.0.31,0.0.0.0,0.0.0.0,INT_out,,,ms-ds-smb,vsys1,v_internal,v_external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 05:58:46,44183,1,60510,445,0,0,0x80004000,tcp,reset-both,"",Microsoft Windows SMBv2 Remote Code Execution Vulnerability(32541),any,critical,client-to-server,724178,0x0,Unknown,Unknown,0,,1200515273392656547,, 
+<11>Jan  5 07:41:48 PAN1.exampleCustomer.com 1,2015/01/05 07:41:47,0006C110285,THREAT,vulnerability,1,2015/01/05 07:41:47,10.0.0.115,216.0.10.230,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 07:41:47,20240,1,65530,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=9944D12C8FB4EB798036CAD371C6",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347454781,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109719,, 
+<11>Jan  5 07:41:48 PAN1.exampleCustomer.com 1,2015/01/05 07:41:47,0006C110285,THREAT,vulnerability,1,2015/01/05 07:41:47,10.0.0.115,216.0.10.230,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 07:41:47,2518,1,65531,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=8;tile=1;ord=E0827A4B1C6179DF64205E13AECDF",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347454775,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109715,, 
+<12>Jan  5 09:08:53 PAN1.exampleCustomer.com 1,2015/01/05 09:08:52,0011C103117,THREAT,virus,1,2015/01/05 09:08:52,61.0.0.202,10.0.0.81,0.0.0.0,0.0.0.0,EX-Allow,,example\user.name,web-browsing,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 09:08:52,72686,1,80,60538,0,0,0x4000,tcp,deny,"haozip_v5.0_up6.exe",Virus/Win32.WGeneric.dpqqf(2516743),any,medium,server-to-client,3422073984,0x0,CN,10.0.0.0-10.255.255.255,0,,0,, 
+<12>Jan  5 09:10:14 PAN1.exampleCustomer.com 1,2015/01/05 09:10:13,001606003946,THREAT,virus,1,2015/01/05 09:10:13,8.30.222.22,10.0.0.109,8.30.222.22,172.13.0.21,EX-Allow,,example\user.name,web-browsing,vsys1,untrust,trust,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 09:10:13,17060,1,80,64672,80,21754,0x404000,tcp,deny,"youdaogouwu-3.13-dictupdate.exe",Virus/Win32.WGeneric.dyugt(2272380),any,medium,server-to-client,38698043,0x0,US,10.0.0.0-10.255.255.255,0,,0,, 
+<11>Jan  5 09:10:37 PAN1 1,2015/01/05 09:10:36,0003C105690,THREAT,vulnerability,1,2015/01/05 09:10:36,10.0.0.222,95.0.0.154,192.168.100.11,95.0.0.154,Guest_to_Internet,,,web-browsing,vsys1,GuestAccess,untrust,vlan.84,vlan.200,LOG-Default,2015/01/05 09:10:36,97395,1,59784,80,46548,80,0x80400000,tcp,reset-both,"8-134.0-87.0.zip",HTTP Unauthorized Brute-force Attack(40031),any,high,client-to-server,247195018,0x0,10.0.0.0-10.255.255.255,IT,0,,1200340530903386781,, 
+<11>Jan  5 09:02:24 PAN1 1,2015/01/05 09:02:24,0003C105690,THREAT,vulnerability,1,2015/01/05 09:02:24,10.0.0.222,95.0.0.154,192.168.100.11,95.0.0.154,Guest_to_Internet,,,web-browsing,vsys1,GuestAccess,untrust,vlan.84,vlan.200,LOG-Default,2015/01/05 09:02:24,137904,1,59762,80,7021,80,0x80400000,tcp,reset-both,"8-136.0-83.0.zip",HTTP Unauthorized Brute-force Attack(40031),any,high,client-to-server,247188168,0x0,10.0.0.0-10.255.255.255,IT,0,,1200340530903386777,, 
+<11>Jan  5 09:23:52 PAN1 1,2015/01/05 09:23:51,009401011564,THREAT,vulnerability,1,2015/01/05 09:23:51,10.0.0.135,10.1.0.42,0.0.0.0,0.0.0.0,INT_out,,,sccp,vsys1,v_internal,v_external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 09:23:51,15299,1,49643,2000,0,0,0x80004000,tcp,reset-both,"",Digium Asterisk Skinny Channel NULL-Pointer Dereference Vulnerability(35378),any,high,client-to-server,732393,0x0,Unknown,Unknown,0,,1200515273392656561,, 
+<10>Jan  5 10:03:58 PAN1 1,2015/01/05 10:03:58,009401011564,THREAT,vulnerability,1,2015/01/05 10:03:58,10.0.0.38,10.3.0.37,0.0.0.0,0.0.0.0,INT_out,,,ms-ds-smb,vsys1,v_internal,v_external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 10:03:58,57935,1,11648,445,0,0,0x80004000,tcp,reset-both,"",Microsoft Windows SMBv2 Remote Code Execution Vulnerability(32541),any,critical,client-to-server,733522,0x0,Unknown,Unknown,0,,1200515273392656570,, 
+<11>Jan  5 07:19:09 PAN1 1,2015/01/05 07:19:08,009401011564,THREAT,vulnerability,1,2015/01/05 07:19:08,10.0.0.135,10.1.0.42,0.0.0.0,0.0.0.0,INT_out,,,sccp,vsys1,v_internal,v_external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 07:19:08,22557,1,49638,2000,0,0,0x80004000,tcp,reset-both,"",Digium Asterisk Skinny Channel NULL-Pointer Dereference Vulnerability(35378),any,high,client-to-server,727520,0x0,Unknown,Unknown,0,,1200515273392656555,, 
+<10>Jan  5 10:04:00 PAN1 1,2015/01/05 10:04:00,009401011564,THREAT,vulnerability,1,2015/01/05 10:04:00,10.0.0.38,10.2.0.40,0.0.0.0,0.0.0.0,INT_out,,,ms-ds-smb,vsys1,v_internal,v_external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 10:04:00,37972,1,43861,445,0,0,0x80004000,tcp,reset-both,"",Microsoft Windows SMBv2 Remote Code Execution Vulnerability(32541),any,critical,client-to-server,733536,0x0,Unknown,Unknown,0,,1200515273392656584,, 
+<10>Jan  5 10:04:01 PAN1 1,2015/01/05 10:04:01,009401011564,THREAT,vulnerability,1,2015/01/05 10:04:01,10.0.0.38,172.13.0.68,0.0.0.0,0.0.0.0,INT_out,,,ms-ds-smb,vsys1,v_internal,v_external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 10:04:01,49163,1,43869,445,0,0,0x80004000,tcp,reset-both,"",Microsoft Windows SMBv2 Remote Code Execution Vulnerability(32541),any,critical,client-to-server,733543,0x0,Unknown,US,0,,1200515273392656591,, 
+<10>Jan  5 02:16:00 PAN1.exampleCustomer.com 1,2015/01/05 02:16:00,009401009421,THREAT,spyware,1,2015/01/05 02:16:00,10.0.0.67,54.0.0.140,68.1.100.154,54.0.0.140,EX-Allow,,,web-browsing,vsys1,internal,external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 02:16:00,2898,1,50429,80,13954,80,0x400000,tcp,reset-both,"install.ashx",WGeneric.Gen Command and Control Traffic(13600),any,critical,client-to-server,3841944,0x0,10.0.0.0-10.255.255.255,US,0,,0,, 
+<10>Jan  5 02:16:17 PAN1.exampleCustomer.com 1,2015/01/05 02:16:17,009401009421,THREAT,spyware,1,2015/01/05 02:16:17,10.0.0.67,54.0.0.140,68.1.100.154,54.0.0.140,EX-Allow,,,web-browsing,vsys1,internal,external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 02:16:17,21959,1,50459,80,45933,80,0x400000,tcp,reset-both,"install.ashx",WGeneric.Gen Command and Control Traffic(13600),any,critical,client-to-server,3842040,0x0,10.0.0.0-10.255.255.255,US,0,,0,, 
+<10>Jan  5 10:55:21 PAN1.exampleCustomer.com 1,2015/01/05 10:55:21,0011C103117,THREAT,vulnerability,1,2015/01/05 10:55:21,172.13.0.44,10.0.0.48,0.0.0.0,0.0.0.0,EX-Allow,,,ssl,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 10:55:21,116502,1,55910,443,0,0,0x80004000,tcp,reset-both,"bar.exampleCustomer.com/",OpenSSL SSL/TLS MITM vulnerability(36485),any,critical,client-to-server,3422361316,0x0,NO,10.0.0.0-10.255.255.255,0,,1200269920802300348,, 
+<12>Jan  5 11:31:36 PAN1.exampleCustomer.com 1,2015/01/05 11:31:36,0011C103117,THREAT,vulnerability,1,2015/01/05 11:31:36,31.0.0.198,10.0.0.210,0.0.0.0,0.0.0.0,EX-Allow,,,twitter-base,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 11:31:36,181928,1,55325,443,0,0,0x80004000,tcp,alert,"foo.exampleCustomer.com/",OpenSSL TLS Malformed Heartbeat Request Found - Heartbleed(36397),any,medium,client-to-server,3422463820,0x0,CH,10.0.0.0-10.255.255.255,0,,1200269920802300355,, 
+<12>Jan  5 11:31:17 PAN1.exampleCustomer.com 1,2015/01/05 11:31:17,0011C103117,THREAT,vulnerability,1,2015/01/05 11:31:17,31.0.0.198,10.0.0.56,0.0.0.0,0.0.0.0,EX-Allow,,,twitter-base,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 11:31:17,33936654,1,55325,443,0,0,0x80004000,tcp,alert,"*.exampleCustomer.com/",OpenSSL TLS Malformed Heartbeat Request Found - Heartbleed(36397),any,medium,client-to-server,3422463061,0x0,CH,10.0.0.0-10.255.255.255,0,,1344385108878191554,, 
+<12>Jan  5 11:07:20 PAN1.exampleCustomer.com 1,2015/01/05 11:07:20,0011C103117,THREAT,vulnerability,1,2015/01/05 11:07:20,31.0.0.198,10.0.0.70,0.0.0.0,0.0.0.0,EX-EasyAV,,,twitter-base,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 11:07:20,142520,1,55325,443,0,0,0x4000,tcp,alert,"fizzbuzz.exampleCustomer.com/",OpenSSL TLS Malformed Heartbeat Request Found - Heartbleed(36397),any,medium,client-to-server,3422395620,0x0,CH,10.0.0.0-10.255.255.255,0,,0,, 
+<10>Jan  5 10:04:06 PAN1 1,2015/01/05 10:04:05,009401011564,THREAT,vulnerability,1,2015/01/05 10:04:05,10.0.0.38,10.2.0.20,0.0.0.0,0.0.0.0,INT_out,,,ms-ds-smb,vsys1,v_internal,v_external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 10:04:05,58977,1,43882,445,0,0,0x80004000,tcp,reset-both,"",Microsoft Windows SMBv2 Remote Code Execution Vulnerability(32541),any,critical,client-to-server,733556,0x0,Unknown,Unknown,0,,1200515273392656603,, 
+<11>Jan  5 11:20:02 PAN1 1,2015/01/05 11:20:02,009401011564,THREAT,vulnerability,1,2015/01/05 11:20:02,10.0.0.131,10.1.0.42,0.0.0.0,0.0.0.0,INT_out,,,sccp,vsys1,v_internal,v_external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 11:20:02,25219,1,49569,2000,0,0,0x80004000,tcp,reset-both,"",Digium Asterisk Skinny Channel NULL-Pointer Dereference Vulnerability(35378),any,high,client-to-server,735575,0x0,Unknown,Unknown,0,,1200515273392656605,, 
+<11>Jan  5 12:31:01 PAN1.exampleCustomer.com 1,2015/01/05 12:31:01,0006C110285,THREAT,vulnerability,1,2015/01/05 12:31:01,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:31:01,12971,1,56879,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=160x600&id=14;tile=1;ord=339DEA400FDFBF9127DA196347F1",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347631498,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109742,, 
+<11>Jan  5 12:31:01 PAN1.exampleCustomer.com 1,2015/01/05 12:31:01,0006C110285,THREAT,vulnerability,1,2015/01/05 12:31:01,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:31:01,52846,1,56881,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=160x600&id=14;tile=1;ord=A501E1CAA93F3B256222F902C051",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347631499,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109743,, 
+<11>Jan  5 12:31:01 PAN1.exampleCustomer.com 1,2015/01/05 12:31:01,0006C110285,THREAT,vulnerability,1,2015/01/05 12:31:01,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:31:01,132,1,56880,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=A01019D3E75E253C81B9DBE60AF0",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347631500,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109744,, 
+<11>Jan  5 11:39:28 PAN1.exampleCustomer.com 1,2015/01/05 11:39:28,0006C110285,THREAT,vulnerability,1,2015/01/05 11:39:28,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 11:39:28,55273,1,55241,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=160x600&id=13;tile=1;ord=F20325FB397BD62AFCE60C004651",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347599433,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109725,, 
+<11>Jan  5 12:09:04 PAN1.exampleCustomer.com 1,2015/01/05 12:09:03,0006C110285,THREAT,vulnerability,1,2015/01/05 12:09:03,10.0.0.115,216.0.10.198,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,web-browsing,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:09:03,40131,1,61994,80,0,0,0x80004000,tcp,reset-both,"ad.aspx?f=300x250&id=12;tile=1;ord=9C998477823511B311AA24EC53D6",HTTP: IIS Denial Of Service Attempt(40019),any,high,client-to-server,347617382,0x0,10.0.0.0-10.255.255.255,US,0,,1200568889751109736,, 
+<12>Jan  5 13:45:24 PAN1.exampleCustomer.com 1,2015/01/05 13:45:23,0011C103117,THREAT,vulnerability,1,2015/01/05 13:45:23,31.0.0.198,10.0.0.60,0.0.0.0,0.0.0.0,EX-Allow,,,twitter-base,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 13:45:23,179279,1,55325,443,0,0,0x80004000,tcp,alert,"*.exampleCustomer.com/",Unknown(36397),any,medium,client-to-server,3423036992,0x0,CH,10.0.0.0-10.255.255.255,0,,1200269920802300367,, 
+<12>Jan  5 13:45:24 PAN1.exampleCustomer.com 1,2015/01/05 13:45:23,0011C103117,THREAT,vulnerability,1,2015/01/05 13:45:23,10.0.0.10,10.1.0.81,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,twitter-base,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 13:45:23,32298,1,55325,443,0,0,0x80004000,tcp,alert,"*.exampleCustomer.com/",OpenSSL TLS Malformed Heartbeat Request Found - Heartbleed(36397),any,medium,client-to-server,3423036994,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,,1200269920802300369,, 
+<10>Jan  5 04:24:30 PAN1.exampleCustomer.com 1,2015/01/05 04:24:29,009401009421,THREAT,spyware,1,2015/01/05 04:24:29,10.0.0.67,54.0.0.133,68.1.100.154,54.0.0.133,EX-Allow,,,web-browsing,vsys1,internal,external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 04:24:29,18282,1,49800,80,13532,80,0x400000,tcp,reset-both,"install.ashx",WGeneric.Gen Command and Control Traffic(13600),any,critical,client-to-server,3875271,0x0,10.0.0.0-10.255.255.255,US,0,,0,, 
+<12>Jan  5 11:32:12 PAN1.exampleCustomer.com 1,2015/01/05 11:32:12,0011C103117,THREAT,vulnerability,1,2015/01/05 11:32:12,31.0.0.198,10.0.0.102,0.0.0.0,0.0.0.0,EX-Allow,,,twitter-base,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 11:32:12,255259,1,55325,443,0,0,0x80004000,tcp,alert,"foo.exampleCustomer.com/",OpenSSL TLS Malformed Heartbeat Request Found - Heartbleed(36397),any,medium,client-to-server,3422465396,0x0,CH,10.0.0.0-10.255.255.255,0,,1200269920802300360,, 
+<12>Jan  5 11:31:46 PAN1.exampleCustomer.com 1,2015/01/05 11:31:46,0011C103117,THREAT,vulnerability,1,2015/01/05 11:31:46,31.0.0.198,10.0.0.50,0.0.0.0,0.0.0.0,EX-Allow,,,twitter-base,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 11:31:46,33699961,1,55325,443,0,0,0x80004000,tcp,alert,"*.exampleCustomer.com/",OpenSSL TLS Malformed Heartbeat Request Found - Heartbleed(36397),any,medium,client-to-server,3422464320,0x0,CH,10.0.0.0-10.255.255.255,0,,1344385108878191555,, 
+<12>Jan  5 11:36:03 PAN1.exampleCustomer.com 1,2015/01/05 11:36:02,0006C113555,THREAT,vulnerability,1,2015/01/05 11:36:02,10.0.0.62,10.1.0.11,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,msrpc,vsys1,Inside,Inside,ethernet1/4,tunnel.1,LOG-Default,2015/01/05 11:36:02,16469,1,51461,445,0,0,0x80004000,tcp,alert,"",Microsoft DCE RPC Big Endian Evasion Vulnerability(33510),any,medium,client-to-server,46375536,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,,1200283142590569503,, 
+<11>Jan  5 13:26:50 PAN1.exampleCustomer.com 1,2015/01/05 13:26:49,0011C103117,THREAT,vulnerability,1,2015/01/05 13:26:49,10.0.0.167,10.1.0.41,0.0.0.0,0.0.0.0,EX-EasyAV,example\user.name.hernandez,,ssh,vsys1,v_internal,v_external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 13:26:49,121926,1,49754,9101,0,0,0x4000,tcp,reset-both,"",SSH User Authentication Brute-force Attempt(40015),any,high,client-to-server,3422922092,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,,0,, 
+<11>Jan  5 10:18:37 NTOR1FWPAN1 1,2015/01/05 10:18:37,009401008933,THREAT,vulnerability,1,2015/01/05 10:18:37,10.0.0.50,54.0.0.7,38.140.11.98,54.0.0.7,TOR-outbound,,,web-browsing,vsys1,Inside,Outside,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 10:18:37,7226,1,51724,80,58706,80,0x80400000,tcp,reset-both,"_PhotoXML.php",Microsoft Office Sharepoint Server Elevation of Privilege Vulnerability(32001),any,high,client-to-server,1252593,0x0,10.0.0.0-10.255.255.255,US,0,,1200584606076633093,, 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.53,10.1.0.174,0.0.0.0,0.0.0.0,EX-EasyAV2,,,mssql-db,vsys1,v_dmz-internal,v_dmz-external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 12:51:33,34103936,1,54270,40004,0,0,0x401c,tcp,allow,5385,3299,2086,26,2015/01/05 12:51:01,30,any,0,17754932047,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,11,15 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.22,10.1.0.28,0.0.0.0,0.0.0.0,EX-Allow,,example\user.name,vmware,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,33888863,1,62961,902,0,0,0x4019,udp,allow,108,108,0,1,2015/01/05 12:51:01,30,any,0,17754932051,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,1,0 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,172.13.0.2,10.0.0.32,0.0.0.0,0.0.0.0,EX-Allow,,,dns,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,33841444,1,17294,53,0,0,0x4019,udp,allow,94,94,0,1,2015/01/05 12:51:01,30,any,0,17754932054,0x0,US,10.0.0.0-10.255.255.255,0,1,0 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,71.0.0.174,10.0.0.32,0.0.0.0,0.0.0.0,EX-Allow,,,dns,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,33992062,1,57783,53,0,0,0x4019,udp,allow,247,86,161,2,2015/01/05 12:51:01,30,any,0,17754932055,0x0,US,10.0.0.0-10.255.255.255,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,58.0.0.196,10.0.0.17,0.0.0.0,0.0.0.0,EX-Allow,,,ssl,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,34310602,1,25678,443,0,0,0x4053,tcp,allow,21935,11456,10479,44,2015/01/05 12:48:44,167,EX-Allowed,0,17754932059,0x0,IN,10.0.0.0-10.255.255.255,0,20,24 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.39,10.1.0.163,0.0.0.0,0.0.0.0,EX-Allow,,example\user.name,ms-ds-smb,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,33760927,1,52688,445,0,0,0x401a,tcp,allow,2229,1287,942,10,2015/01/05 12:51:01,30,any,0,17754932062,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,6,4 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.39,10.1.0.163,0.0.0.0,0.0.0.0,EX-Allow,,example\user.name,ms-ds-smb,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,33595018,1,52689,445,0,0,0x401a,tcp,allow,2229,1287,942,10,2015/01/05 12:51:01,30,any,0,17754932064,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,6,4 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.7,10.1.0.81,0.0.0.0,0.0.0.0,EX-Allow,,,netbios-ns,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,34098107,1,137,137,0,0,0x4019,udp,allow,532,220,312,6,2015/01/05 12:51:01,30,any,0,17754932070,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,3,3 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.39,10.1.0.163,0.0.0.0,0.0.0.0,EX-Allow,,example\user.name,ms-ds-smb,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,34326343,1,52690,445,0,0,0x401a,tcp,allow,2229,1287,942,10,2015/01/05 12:51:01,30,any,0,17754932071,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,6,4 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,172.13.0.15,10.0.0.53,0.0.0.0,0.0.0.0,EX-EasyAV,,,eset-remote-admin,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,33859365,1,23390,443,0,0,0x405e,tcp,allow,725,405,320,11,2015/01/05 12:51:01,30,any,0,17754932073,0x0,US,10.0.0.0-10.255.255.255,0,6,5 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.53,10.1.0.174,0.0.0.0,0.0.0.0,EX-EasyAV2,,,mssql-db,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,33621086,1,54266,40004,0,0,0x401c,tcp,allow,5325,3299,2026,25,2015/01/05 12:51:01,30,any,0,17754932075,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,11,14 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,96.0.0.102,10.0.0.57,0.0.0.0,0.0.0.0,EX-Allow,,,ssl,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,33924142,1,51230,443,0,0,0x4053,tcp,allow,18350,9280,9070,41,2015/01/05 12:51:01,30,EX-Allowed,0,17754932080,0x0,US,10.0.0.0-10.255.255.255,0,19,22 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,72.0.0.131,10.0.0.174,0.0.0.0,0.0.0.0,EX-Allow,,,ssl,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,34186774,1,28203,443,0,0,0x4053,tcp,allow,4121,2209,1912,20,2015/01/05 12:51:01,30,EX-Allowed,0,17754932086,0x0,US,10.0.0.0-10.255.255.255,0,10,10 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,216.0.10.244,10.0.0.53,0.0.0.0,0.0.0.0,EX-EasyAV,,,ssl,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,33988765,1,45150,443,0,0,0x401c,tcp,allow,626,358,268,9,2015/01/05 12:50:41,50,any,0,17754932095,0x0,US,10.0.0.0-10.255.255.255,0,5,4 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,172.12.216.82,10.0.0.53,0.0.0.0,0.0.0.0,EX-EasyAV,,,eset-update,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,33577240,1,3882,80,0,0,0x401c,tcp,allow,94947,2570,92377,106,2015/01/05 12:50:47,44,EX-Allowed,0,17754932107,0x0,US,10.0.0.0-10.255.255.255,0,38,68 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.33,10.1.0.85,0.0.0.0,0.0.0.0,EX-Allow,,,zabbix,vsys1,v_dmz-external,v_dmz-internal,ethernet1/3,ethernet1/4,LOG-Default,2015/01/05 12:51:33,34078885,1,46056,10050,0,0,0x405e,tcp,allow,728,367,361,11,2015/01/05 12:51:01,30,any,0,17754932117,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,6,5 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.11,10.1.0.33,0.0.0.0,0.0.0.0,EX-Allow,,,incomplete,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,34222137,1,59966,443,0,0,0x401c,tcp,allow,404,198,206,7,2015/01/05 12:51:01,30,any,0,17754932131,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,4,3 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.12,172.13.0.23,0.0.0.0,0.0.0.0,EX-Allow,,,dns,vsys1,v_dmz-internal,v_dmz-external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 12:51:33,33560784,1,52991,53,0,0,0x4019,udp,allow,815,96,719,2,2015/01/05 12:51:01,30,any,0,17754932142,0x0,10.0.0.0-10.255.255.255,US,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.52,10.1.0.174,0.0.0.0,0.0.0.0,EX-EasyAV2,,,mssql-db,vsys1,v_dmz-internal,v_dmz-external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 12:51:33,109384,1,50721,40004,0,0,0x401c,tcp,allow,4211,2125,2086,25,2015/01/05 12:51:02,30,any,0,17754932194,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,10,15 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.53,10.1.0.174,0.0.0.0,0.0.0.0,EX-EasyAV2,,,mssql-db,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,134519,1,54273,40004,0,0,0x401c,tcp,allow,5375,3289,2086,26,2015/01/05 12:51:02,30,any,0,17754932204,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,11,15 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.53,10.1.0.174,0.0.0.0,0.0.0.0,EX-EasyAV2,,,mssql-db,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,15005,1,54268,40004,0,0,0x401c,tcp,allow,7084,3787,3297,26,2015/01/05 12:51:02,30,any,0,17754932228,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,11,15 
+<14>Jan  5 12:51:34 PAN1 1,2015/01/05 12:51:33,0003C105690,TRAFFIC,drop,1,2015/01/05 12:51:33,10.0.0.219,10.3.0.21,0.0.0.0,0.0.0.0,catch all deny,,,not-applicable,vsys1,GuestAccess,trust,vlan.84,,LOG-Default,2015/01/05 12:51:33,0,1,62063,389,0,0,0x0,tcp,deny,70,70,0,1,2015/01/05 12:51:34,0,any,0,956329030,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,1,0 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0006C113555,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.217,172.13.0.168,186.225.121.238,172.13.0.168,Guest WiFi to Internet,,,skype-probe,vsys1,Guest WiFi,Ext_Internet,ethernet1/3.109,ethernet1/2,LOG-Default,2015/01/05 12:51:33,46888,1,11566,40023,55962,40023,0x404050,udp,allow,1446,79,1367,2,2015/01/05 12:51:03,0,any,0,265102737,0x0,10.0.0.0-10.255.255.255,US,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0006C113555,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.20,10.1.0.28,0.0.0.0,0.0.0.0,EX-Allow,,example\user.name,vmware,vsys1,Inside,Inside,ethernet1/4,tunnel.1,LOG-Default,2015/01/05 12:51:33,46821,1,61199,902,0,0,0x4019,udp,allow,108,108,0,1,2015/01/05 12:51:03,0,any,0,265102739,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,1,0 
+<14>Jan  5 12:51:34 PAN1 1,2015/01/05 12:51:33,0003C105690,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.147,4.2.2.2,192.168.100.11,4.2.2.2,Guest_to_Internet,,,dns,vsys1,GuestAccess,untrust,vlan.84,vlan.200,LOG-Default,2015/01/05 12:51:33,188024,1,57269,53,59952,53,0x400019,udp,allow,194,73,121,2,2015/01/05 12:50:49,0,any,0,956329037,0x0,10.0.0.0-10.255.255.255,US,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.11,172.13.0.110,0.0.0.0,0.0.0.0,EX-Allow,,,dns,vsys1,v_internal,v_external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 12:51:33,51569,1,60390,53,0,0,0x4019,udp,allow,815,96,719,2,2015/01/05 12:51:02,30,any,0,17754932369,0x0,10.0.0.0-10.255.255.255,US,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.11,10.1.0.81,0.0.0.0,0.0.0.0,EX-Allow,,,ping,vsys1,v_dmz-internal,v_dmz-external,ethernet1/4,ethernet1/3,LOG-Default,2015/01/05 12:51:33,185459,1,0,0,0,0,0x4019,icmp,allow,120,60,60,2,2015/01/05 12:51:29,0,any,0,17754932372,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.11,10.1.0.44,0.0.0.0,0.0.0.0,EX-Allow,,,ping,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,84730,1,0,0,0,0,0x4019,icmp,allow,120,60,60,2,2015/01/05 12:51:29,0,any,0,17754932379,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0006C110285,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.73,10.1.0.12,0.0.0.0,0.0.0.0,EX-Allow,,,dns,vsys1,internal,external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,12561,1,57334,53,0,0,0x4019,udp,allow,206,95,111,2,2015/01/05 12:51:03,0,any,0,803406326,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0006C110285,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.35,10.3.0.65,0.0.0.0,0.0.0.0,EX-Allow,,,web-browsing,vsys1,external,internal,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 12:51:33,3286,1,57095,80,0,0,0x401c,tcp,allow,3506,899,2607,12,2015/01/05 12:51:03,0,private-ip-addresses,0,803406334,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,7,5 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0006C110285,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.35,10.3.0.65,0.0.0.0,0.0.0.0,EX-Allow,,,web-browsing,vsys1,external,internal,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 12:51:33,42426,1,57096,80,0,0,0x401c,tcp,allow,3386,1390,1996,12,2015/01/05 12:51:03,0,private-ip-addresses,0,803406335,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,7,5 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0006C110285,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.35,10.3.0.65,0.0.0.0,0.0.0.0,EX-Allow,,,web-browsing,vsys1,external,internal,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 12:51:33,15733,1,57130,80,0,0,0x401c,tcp,allow,1661,926,735,12,2015/01/05 12:51:03,0,private-ip-addresses,0,803406337,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,7,5 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:33,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:33,10.0.0.11,10.1.0.60,0.0.0.0,0.0.0.0,EX-Allow,,,ping,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:33,239420,1,0,0,0,0,0x4019,icmp,allow,120,60,60,2,2015/01/05 12:51:29,0,any,0,17754932383,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,1,1 
+<14>Jan  5 04:51:34 PAN1.exampleCustomer.com 1,2015/01/05 04:51:33,009401009421,TRAFFIC,end,1,2015/01/05 04:51:33,10.0.0.67,63.0.0.78,68.1.100.154,63.0.0.78,EX-Allow,,,web-browsing,vsys1,internal,external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 04:51:33,13687,1,53152,80,64294,80,0x40001c,tcp,allow,1039,576,463,12,2015/01/05 04:51:03,1,search-engines,0,8195211,0x0,10.0.0.0-10.255.255.255,US,0,6,6 
+<14>Jan  5 04:51:34 PAN1.exampleCustomer.com 1,2015/01/05 04:51:33,009401009421,TRAFFIC,end,1,2015/01/05 04:51:33,10.0.0.67,77.0.0.59,68.1.100.154,77.0.0.59,EX-Allow,,,web-browsing,vsys1,internal,external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 04:51:33,36193,1,53155,80,48756,80,0x40001c,tcp,allow,946,540,406,10,2015/01/05 04:51:04,0,computer-and-internet-security,0,8195212,0x0,10.0.0.0-10.255.255.255,CZ,0,5,5 
+<14>Jan  5 04:51:34 PAN1.exampleCustomer.com 1,2015/01/05 04:51:33,009401009421,TRAFFIC,end,1,2015/01/05 04:51:33,10.0.0.67,63.0.0.78,68.1.100.154,63.0.0.78,EX-Allow,,,web-browsing,vsys1,internal,external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 04:51:33,8727,1,53154,80,6852,80,0x40001c,tcp,allow,1039,576,463,12,2015/01/05 04:51:04,0,search-engines,0,8195213,0x0,10.0.0.0-10.255.255.255,US,0,6,6 
+<14>Jan  5 04:51:34 PAN1.exampleCustomer.com 1,2015/01/05 04:51:33,009401009421,TRAFFIC,end,1,2015/01/05 04:51:33,10.0.0.67,77.0.0.59,68.1.100.154,77.0.0.59,EX-Allow,,,web-browsing,vsys1,internal,external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 04:51:33,16955,1,53153,80,19440,80,0x40001c,tcp,allow,946,540,406,10,2015/01/05 04:51:03,1,computer-and-internet-security,0,8195216,0x0,10.0.0.0-10.255.255.255,CZ,0,5,5 
+<14>Jan  5 04:51:34 PAN1.exampleCustomer.com 1,2015/01/05 04:51:33,009401009421,TRAFFIC,end,1,2015/01/05 04:51:33,10.0.0.101,23.200,10,217,68.0.0.154,23.200,10,217,EX-WebControlRestrict,,,itunes-base,vsys1,internal,external,ethernet1/1,ethernet1/2,LOG-Default,2015/01/05 04:51:33,14851,1,55137,443,29553,443,0x400019,tcp,allow,654,580,74,7,2015/01/05 04:50:34,0,shopping,0,8195217,0x0,10.0.0.0-10.255.255.255,US,0,6,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:34,0006C113555,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.46,172.13.0.2,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,incomplete,vsys1,Inside,Inside,ethernet1/4,tunnel.1,LOG-Default,2015/01/05 12:51:34,57850,1,65286,139,0,0,0x4019,tcp,allow,62,62,0,1,2015/01/05 12:51:29,0,any,0,265102746,0x0,10.0.0.0-10.255.255.255,192.168.0.0-192.168.255.255,0,1,0 
+<14>Jan  5 12:51:34 PAN1 1,2015/01/05 12:51:34,0003C105690,TRAFFIC,end,1,2015/01/05 12:51:34,216.0.10.194,192.168.100.11,0.0.0.0,0.0.0.0,Internet to Internet,,,insufficient-data,vsys1,untrust,untrust,vlan.200,vlan.200,LOG-Default,2015/01/05 12:51:34,259007,1,80,11347,0,0,0xc,udp,allow,90,90,0,1,2015/01/05 12:50:25,0,any,0,956329050,0x0,US,US,0,1,0 
+<14>Jan  5 12:51:34 PAN1 1,2015/01/05 12:51:34,0003C105690,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.147,4.2.2.2,192.168.100.11,4.2.2.2,Guest_to_Internet,,,dns,vsys1,GuestAccess,untrust,vlan.84,vlan.200,LOG-Default,2015/01/05 12:51:34,13024,1,56694,53,51398,53,0x400019,udp,allow,222,82,140,2,2015/01/05 12:50:49,0,any,0,956329055,0x0,10.0.0.0-10.255.255.255,US,0,1,1 
+<14>Jan  5 12:51:34 PAN1 1,2015/01/05 12:51:34,0003C105690,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.147,4.2.2.2,192.168.100.11,4.2.2.2,Guest_to_Internet,,,dns,vsys1,GuestAccess,untrust,vlan.84,vlan.200,LOG-Default,2015/01/05 12:51:34,62999,1,58277,53,5576,53,0x400019,udp,allow,328,96,232,2,2015/01/05 12:50:49,0,any,0,956329056,0x0,10.0.0.0-10.255.255.255,US,0,1,1 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:34,001606007155,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.156,96.0.0.138,172.13.0.35,96.0.0.138,EX-Allow,example\user.name,,web-browsing,vsys1,trust,untrust,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:34,61348,1,65231,80,48623,80,0x40401a,tcp,allow,50316,4297,46019,67,2015/01/05 12:51:03,1,travel,0,179851307,0x0,10.0.0.0-10.255.255.255,US,0,28,39 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:34,001606007155,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.148,96.0.0.35,172.13.0.35,96.0.0.35,EX-Allow,example\user.name,,symantec-av-update,vsys1,trust,untrust,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:34,61220,1,60900,80,12964,80,0x40401a,tcp,allow,39350,3087,36263,56,2015/01/05 12:50:07,57,computer-and-internet-security,0,179851311,0x0,10.0.0.0-10.255.255.255,US,0,23,33 
+<14>Jan  5 12:51:34 PAN1 1,2015/01/05 12:51:34,009401003136,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.138,213.0.10.101,172.13.0.142,213.0.10.101,Outbound,,,ssl,vsys1,internal,external,ethernet1/4,ethernet1/1,LOG-Default,2015/01/05 12:51:34,62600,1,55014,443,22537,443,0x40001c,tcp,allow,2956,1853,1103,20,2015/01/05 12:51:04,0,travel,0,54644537,0x0,10.0.0.0-10.255.255.255,CH,0,9,11 
+<14>Jan  5 12:51:34 PAN1 1,2015/01/05 12:51:34,009401003136,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.138,213.0.10.101,172.13.0.142,213.0.10.101,Outbound,,,ssl,vsys1,internal,external,ethernet1/4,ethernet1/1,LOG-Default,2015/01/05 12:51:34,45328,1,55025,443,48646,443,0x40001c,tcp,allow,2828,1845,983,18,2015/01/05 12:51:04,0,travel,0,54644544,0x0,10.0.0.0-10.255.255.255,CH,0,9,9 
+<14>Jan  5 12:51:34 PAN1.exampleCustomer.com 1,2015/01/05 12:51:34,0004C103634,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.165,93.0.0.200,0.0.0.0,0.0.0.0,EX-Allow,example\user.name,,ssl,vsys1,v_internal,v_external,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:34,15787,1,53105,443,0,0,0x4053,tcp,allow,10222,1275,8947,22,2015/01/05 12:48:03,181,business-and-economy,0,307579464,0x0,10.0.0.0-10.255.255.255,EU,0,10,12 
+<14>Jan  5 12:51:35 PAN1 1,2015/01/05 12:51:34,0003C105690,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.11,10.3.0.26,0.0.0.0,0.0.0.0,ICMP DMZ to In,,,ping,vsys1,F5_DMZ_WAN,trust,vlan.81,vlan.399,LOG-Default,2015/01/05 12:51:34,33876,1,0,0,0,0,0x19,icmp,allow,128,64,64,2,2015/01/05 12:51:20,0,any,0,956329058,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,1,1 
+<14>Jan  5 12:51:35 PAN1.exampleCustomer.com 1,2015/01/05 12:51:34,0006C113555,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.53,8.8.8.8,172.13.0.238,8.8.8.8,Guest WiFi to Internet,,,dns,vsys1,Guest WiFi,Ext_Internet,ethernet1/3.109,ethernet1/2,LOG-Default,2015/01/05 12:51:34,53079,1,59288,53,31746,53,0x404019,udp,allow,194,91,103,2,2015/01/05 12:51:04,0,any,0,265102750,0x0,10.0.0.0-10.255.255.255,US,0,1,1 
+<14>Jan  5 12:51:35 PAN1.exampleCustomer.com 1,2015/01/05 12:51:34,0011C103117,TRAFFIC,end,1,2015/01/05 12:51:34,10.0.0.53,10.1.0.174,0.0.0.0,0.0.0.0,EX-EasyAV2,,,mssql-db,vsys1,v_external,v_internal,ethernet1/2,ethernet1/1,LOG-Default,2015/01/05 12:51:34,141372,1,54279,40004,0,0,0x401c,tcp,allow,3783,1697,2086,25,2015/01/05 12:51:03,30,any,0,17754932394,0x0,10.0.0.0-10.255.255.255,10.0.0.0-10.255.255.255,0,10,15 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/SnortOutput
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/SnortOutput b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/SnortOutput
new file mode 100644
index 0000000..0497b0f
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/SnortOutput
@@ -0,0 +1,3 @@
+01/27-16:01:04.877970 ,129,12,1,"Consecutive TCP small segments exceeding threshold",TCP,10.0.2.2,56642,10.0.2.15,22,52:54:00:12:35:02,08:00:27:7F:93:2D,0x4E,***AP***,0x9AFF3D7,0xC8761D52,,0xFFFF,64,0,59677,64,65536,,,,
+02/22-15:56:48.612494 ,129,12,1,"Consecutive TCP small segments exceeding threshold",TCP,96.44.142.5,80,10.0.2.15,50895,52:54:00:12:35:02,08:00:27:7F:93:2D,0x6E,***AP***,0xDB45F7A,0x7701DD5B,,0xFFFF,64,0,16785,96,98304,,,,
+02/22-15:56:48.616775 ,129,12,1,"Consecutive TCP small segments exceeding threshold",TCP,96.44.142.5,80,10.0.2.15,50895,52:54:00:12:35:02,08:00:27:7F:93:2D,0xA6,***AP***,0xDB508F2,0x7701DD5B,,0xFFFF,64,0,16824,152,155648,,,,
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/SourcefireExampleOutput
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/SourcefireExampleOutput b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/SourcefireExampleOutput
new file mode 100644
index 0000000..5f177df
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/SourcefireExampleOutput
@@ -0,0 +1,2 @@
+SFIMS: [Primary Detection Engine (a7213248-6423-11e3-8537-fac6a92b7d9d)][MTD Access Control] Connection Type: Start, User: Unknown, Client: Unknown, Application Protocol: Unknown, Web App: Unknown, Firewall Rule Name: MTD Access Control, Firewall Rule Action: Allow, Firewall Rule Reasons: Unknown, URL Category: Unknown, URL_Reputation: Risk unknown, URL: Unknown, Interface Ingress: s1p1, Interface Egress: N/A, Security Zone Ingress: Unknown, Security Zone Egress: N/A, Security Intelligence Matching IP: None, Security Intelligence Category: None, {TCP} 72.163.0.129:60517 -> 10.1.128.236:443
+SFIMS: [Primary Detection Engine (a7213248-6423-11e3-8537-fac6a92b7d9d)][MTD Access Control] Connection Type: Start, User: Unknown, Client: Unknown, Application Protocol: Unknown, Web App: Unknown, Firewall Rule Name: MTD Access Control, Firewall Rule Action: Allow, Firewall Rule Reasons: Unknown, URL Category: Unknown, URL_Reputation: Risk unknown, URL: Unknown, Interface Ingress: s1p1, Interface Egress: N/A, Security Zone Ingress: Unknown, Security Zone Egress: N/A, Security Intelligence Matching IP: None, Security Intelligence Category: None, {TCP} 10.5.200.245:45283 -> 72.163.0.129:21
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/YafExampleOutput
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/YafExampleOutput b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/YafExampleOutput
new file mode 100644
index 0000000..8f3ff44
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/YafExampleOutput
@@ -0,0 +1,10 @@
+2016-01-28 15:29:48.512|2016-01-28 15:29:48.512|   0.000|   0.000|  6|                          216.21.170.221|   80|                               10.0.2.15|39468|      AS|       0|       0|       0|22efa001|00000000|000|000|       1|      44|       0|       0|    0|idle
+2016-01-28 15:29:48.502|2016-01-28 15:29:48.502|   0.000|   0.000| 17|                               10.0.2.15|37299|                                10.0.2.3|   53|       A|       0|       0|       0|10000000|00000000|000|000|       1|      56|       0|       0|    0|idle
+2016-01-28 15:29:48.504|2016-01-28 15:29:48.504|   0.000|   0.000| 17|                                10.0.2.3|   53|                               10.0.2.15|37299|       A|       0|       0|       0|00000000|00000000|000|000|       1|     312|       0|       0|    0|idle
+2016-01-28 15:29:48.504|2016-01-28 15:29:48.504|   0.000|   0.000| 17|                               10.0.2.15|56303|                                10.0.2.3|   53|       A|       0|       0|       0|00000000|00000000|000|000|       1|      56|       0|       0|    0|idle
+2016-01-28 15:29:48.506|2016-01-28 15:29:48.506|   0.000|   0.000| 17|                                10.0.2.3|   53|                               10.0.2.15|56303|       A|       0|       0|       0|00000000|00000000|000|000|       1|      84|       0|       0|    0|idle
+2016-01-28 15:29:48.508|2016-01-28 15:29:48.508|   0.000|   0.000|  6|                               10.0.2.15|39468|                          216.21.170.221|   80|       S|       0|       0|       0|58c52fca|00000000|000|000|       1|      60|       0|       0|    0|idle
+2016-01-28 15:29:48.512|2016-01-28 15:29:48.512|   0.000|   0.000|  6|                               10.0.2.15|39468|                          216.21.170.221|   80|       A|       0|       0|       0|58c52fcb|00000000|000|000|       1|      40|       0|       0|    0|idle 
+2016-01-28 15:29:48.512|2016-01-28 15:29:48.512|   0.000|   0.000|  6|                               10.0.2.15|39468|                          216.21.170.221|   80|      AP|       0|       0|       0|58c52fcb|00000000|000|000|       1|     148|       0|       0|    0|idle 
+2016-01-28 15:29:48.512|2016-01-28 15:29:48.512|   0.000|   0.000|  6|                          216.21.170.221|   80|                               10.0.2.15|39468|       A|       0|       0|       0|22efa002|00000000|000|000|       1|      40|       0|       0|    0|idle 
+2016-01-28 15:29:48.562|2016-01-28 15:29:48.562|   0.000|   0.000|  6|                          216.21.170.221|   80|                               10.0.2.15|39468|      AP|       0|       0|       0|22efa002|00000000|000|000|       1|     604|       0|       0|    0|idle
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleParsed/SnortParsed
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleParsed/SnortParsed b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleParsed/SnortParsed
new file mode 100644
index 0000000..318b158
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleParsed/SnortParsed
@@ -0,0 +1,3 @@
+{"msg":"\"Consecutive TCP small segments exceeding threshold\"","sig_rev":"1","ip_dst_addr":"10.0.2.15","ip_dst_port":"22","ethsrc":"52:54:00:12:35:02","tcpseq":"0x9AFF3D7","dgmlen":"64","icmpid":"","tcplen":"","tcpwindow":"0xFFFF","icmpseq":"","tcpack":"0xC8761D52","original_string":"01\/27-16:01:04.877970 ,129,12,1,\"Consecutive TCP small segments exceeding threshold\",TCP,10.0.2.2,56642,10.0.2.15,22,52:54:00:12:35:02,08:00:27:7F:93:2D,0x4E,***AP***,0x9AFF3D7,0xC8761D52,,0xFFFF,64,0,59677,64,65536,,,,","icmpcode":"","tos":"0","id":"59677","timestamp":1453932941970,"ethdst":"08:00:27:7F:93:2D","ip_src_addr":"10.0.2.2","ttl":"64","source.type":"snort","ethlen":"0x4E","iplen":"65536","icmptype":"","protocol":"TCP","ip_src_port":"56642","tcpflags":"***AP***","sig_id":"12","sig_generator":"129", "is_alert" : "true"}
+{"msg":"\"Consecutive TCP small segments exceeding threshold\"","sig_rev":"1","ip_dst_addr":"10.0.2.15","ip_dst_port":"50895","ethsrc":"52:54:00:12:35:02","tcpseq":"0xDB45F7A","dgmlen":"96","icmpid":"","tcplen":"","tcpwindow":"0xFFFF","icmpseq":"","tcpack":"0x7701DD5B","original_string":"02\/22-15:56:48.612494 ,129,12,1,\"Consecutive TCP small segments exceeding threshold\",TCP,96.44.142.5,80,10.0.2.15,50895,52:54:00:12:35:02,08:00:27:7F:93:2D,0x6E,***AP***,0xDB45F7A,0x7701DD5B,,0xFFFF,64,0,16785,96,98304,,,,","icmpcode":"","tos":"0","id":"16785","timestamp":1456178820494,"ethdst":"08:00:27:7F:93:2D","ip_src_addr":"96.44.142.5","ttl":"64","source.type":"snort","ethlen":"0x6E","iplen":"98304","icmptype":"","protocol":"TCP","ip_src_port":"80","tcpflags":"***AP***","sig_id":"12","sig_generator":"129", "is_alert" : "true"}
+{"msg":"\"Consecutive TCP small segments exceeding threshold\"","sig_rev":"1","ip_dst_addr":"10.0.2.15","ip_dst_port":"50895","ethsrc":"52:54:00:12:35:02","tcpseq":"0xDB508F2","dgmlen":"152","icmpid":"","tcplen":"","tcpwindow":"0xFFFF","icmpseq":"","tcpack":"0x7701DD5B","original_string":"02\/22-15:56:48.616775 ,129,12,1,\"Consecutive TCP small segments exceeding threshold\",TCP,96.44.142.5,80,10.0.2.15,50895,52:54:00:12:35:02,08:00:27:7F:93:2D,0xA6,***AP***,0xDB508F2,0x7701DD5B,,0xFFFF,64,0,16824,152,155648,,,,","icmpcode":"","tos":"0","id":"16824","timestamp":1456178824775,"ethdst":"08:00:27:7F:93:2D","ip_src_addr":"96.44.142.5","ttl":"64","source.type":"snort","ethlen":"0xA6","iplen":"155648","icmptype":"","protocol":"TCP","ip_src_port":"80","tcpflags":"***AP***","sig_id":"12","sig_generator":"129", "is_alert" : "true"}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleParsed/YafExampleParsed
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleParsed/YafExampleParsed b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleParsed/YafExampleParsed
new file mode 100644
index 0000000..6155e98
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleParsed/YafExampleParsed
@@ -0,0 +1,10 @@
+{"iflags":"AS","uflags":0,"isn":"22efa001","ip_dst_addr":"10.0.2.15","ip_dst_port":39468,"duration":"0.000","rpkt":0,"original_string":"2016-01-28 15:29:48.512|2016-01-28 15:29:48.512|   0.000|   0.000|  6|                          216.21.170.221|   80|                               10.0.2.15|39468|      AS|       0|       0|       0|22efa001|00000000|000|000|       1|      44|       0|       0|    0|idle","pkt":1,"ruflags":0,"roct":0,"ip_src_addr":"216.21.170.221","tag":0,"rtag":0,"ip_src_port":80,"timestamp":1453994988512,"app":0,"oct":44,"end_reason":"idle","risn":0,"end_time":1453994988512,"source.type":"yaf","start_time":1453994988512,"riflags":0,"rtt":"0.000","protocol":6}
+{"iflags":"A","uflags":0,"isn":10000000,"ip_dst_addr":"10.0.2.3","ip_dst_port":53,"duration":"0.000","rpkt":0,"original_string":"2016-01-28 15:29:48.502|2016-01-28 15:29:48.502|   0.000|   0.000| 17|                               10.0.2.15|37299|                                10.0.2.3|   53|       A|       0|       0|       0|10000000|00000000|000|000|       1|      56|       0|       0|    0|idle","pkt":1,"ruflags":0,"roct":0,"ip_src_addr":"10.0.2.15","tag":0,"rtag":0,"ip_src_port":37299,"timestamp":1453994988502,"app":0,"oct":56,"end_reason":"idle","risn":0,"end_time":1453994988502,"source.type":"yaf","start_time":1453994988502,"riflags":0,"rtt":"0.000","protocol":17}
+{"iflags":"A","uflags":0,"isn":0,"ip_dst_addr":"10.0.2.15","ip_dst_port":37299,"duration":"0.000","rpkt":0,"original_string":"2016-01-28 15:29:48.504|2016-01-28 15:29:48.504|   0.000|   0.000| 17|                                10.0.2.3|   53|                               10.0.2.15|37299|       A|       0|       0|       0|00000000|00000000|000|000|       1|     312|       0|       0|    0|idle","pkt":1,"ruflags":0,"roct":0,"ip_src_addr":"10.0.2.3","tag":0,"rtag":0,"ip_src_port":53,"timestamp":1453994988504,"app":0,"oct":312,"end_reason":"idle","risn":0,"end_time":1453994988504,"source.type":"yaf","start_time":1453994988504,"riflags":0,"rtt":"0.000","protocol":17}
+{"iflags":"A","uflags":0,"isn":0,"ip_dst_addr":"10.0.2.3","ip_dst_port":53,"duration":"0.000","rpkt":0,"original_string":"2016-01-28 15:29:48.504|2016-01-28 15:29:48.504|   0.000|   0.000| 17|                               10.0.2.15|56303|                                10.0.2.3|   53|       A|       0|       0|       0|00000000|00000000|000|000|       1|      56|       0|       0|    0|idle","pkt":1,"ruflags":0,"roct":0,"ip_src_addr":"10.0.2.15","tag":0,"rtag":0,"ip_src_port":56303,"timestamp":1453994988504,"app":0,"oct":56,"end_reason":"idle","risn":0,"end_time":1453994988504,"source.type":"yaf","start_time":1453994988504,"riflags":0,"rtt":"0.000","protocol":17}
+{"iflags":"A","uflags":0,"isn":0,"ip_dst_addr":"10.0.2.15","ip_dst_port":56303,"duration":"0.000","rpkt":0,"original_string":"2016-01-28 15:29:48.506|2016-01-28 15:29:48.506|   0.000|   0.000| 17|                                10.0.2.3|   53|                               10.0.2.15|56303|       A|       0|       0|       0|00000000|00000000|000|000|       1|      84|       0|       0|    0|idle","pkt":1,"ruflags":0,"roct":0,"ip_src_addr":"10.0.2.3","tag":0,"rtag":0,"ip_src_port":53,"timestamp":1453994988506,"app":0,"oct":84,"end_reason":"idle","risn":0,"end_time":1453994988506,"source.type":"yaf","start_time":1453994988506,"riflags":0,"rtt":"0.000","protocol":17}
+{"iflags":"S","uflags":0,"isn":"58c52fca","ip_dst_addr":"216.21.170.221","ip_dst_port":80,"duration":"0.000","rpkt":0,"original_string":"2016-01-28 15:29:48.508|2016-01-28 15:29:48.508|   0.000|   0.000|  6|                               10.0.2.15|39468|                          216.21.170.221|   80|       S|       0|       0|       0|58c52fca|00000000|000|000|       1|      60|       0|       0|    0|idle","pkt":1,"ruflags":0,"roct":0,"ip_src_addr":"10.0.2.15","tag":0,"rtag":0,"ip_src_port":39468,"timestamp":1453994988508,"app":0,"oct":60,"end_reason":"idle","risn":0,"end_time":1453994988508,"source.type":"yaf","start_time":1453994988508,"riflags":0,"rtt":"0.000","protocol":6}
+{"iflags":"A","uflags":0,"isn":"58c52fcb","ip_dst_addr":"216.21.170.221","ip_dst_port":80,"duration":"0.000","rpkt":0,"original_string":"2016-01-28 15:29:48.512|2016-01-28 15:29:48.512|   0.000|   0.000|  6|                               10.0.2.15|39468|                          216.21.170.221|   80|       A|       0|       0|       0|58c52fcb|00000000|000|000|       1|      40|       0|       0|    0|idle ","pkt":1,"ruflags":0,"roct":0,"ip_src_addr":"10.0.2.15","tag":0,"rtag":0,"ip_src_port":39468,"timestamp":1453994988512,"app":0,"oct":40,"end_reason":"idle ","risn":0,"end_time":1453994988512,"source.type":"yaf","start_time":1453994988512,"riflags":0,"rtt":"0.000","protocol":6}
+{"iflags":"AP","uflags":0,"isn":"58c52fcb","ip_dst_addr":"216.21.170.221","ip_dst_port":80,"duration":"0.000","rpkt":0,"original_string":"2016-01-28 15:29:48.512|2016-01-28 15:29:48.512|   0.000|   0.000|  6|                               10.0.2.15|39468|                          216.21.170.221|   80|      AP|       0|       0|       0|58c52fcb|00000000|000|000|       1|     148|       0|       0|    0|idle ","pkt":1,"ruflags":0,"roct":0,"ip_src_addr":"10.0.2.15","tag":0,"rtag":0,"ip_src_port":39468,"timestamp":1453994988512,"app":0,"oct":148,"end_reason":"idle ","risn":0,"end_time":1453994988512,"source.type":"yaf","start_time":1453994988512,"riflags":0,"rtt":"0.000","protocol":6}
+{"iflags":"A","uflags":0,"isn":"22efa002","ip_dst_addr":"10.0.2.15","ip_dst_port":39468,"duration":"0.000","rpkt":0,"original_string":"2016-01-28 15:29:48.512|2016-01-28 15:29:48.512|   0.000|   0.000|  6|                          216.21.170.221|   80|                               10.0.2.15|39468|       A|       0|       0|       0|22efa002|00000000|000|000|       1|      40|       0|       0|    0|idle ","pkt":1,"ruflags":0,"roct":0,"ip_src_addr":"216.21.170.221","tag":0,"rtag":0,"ip_src_port":80,"timestamp":1453994988512,"app":0,"oct":40,"end_reason":"idle ","risn":0,"end_time":1453994988512,"source.type":"yaf","start_time":1453994988512,"riflags":0,"rtt":"0.000","protocol":6}
+{"iflags":"AP","uflags":0,"isn":"22efa002","ip_dst_addr":"10.0.2.15","ip_dst_port":39468,"duration":"0.000","rpkt":0,"original_string":"2016-01-28 15:29:48.562|2016-01-28 15:29:48.562|   0.000|   0.000|  6|                          216.21.170.221|   80|                               10.0.2.15|39468|      AP|       0|       0|       0|22efa002|00000000|000|000|       1|     604|       0|       0|    0|idle","pkt":1,"ruflags":0,"roct":0,"ip_src_addr":"216.21.170.221","tag":0,"rtag":0,"ip_src_port":80,"timestamp":1453994988562,"app":0,"oct":604,"end_reason":"idle","risn":0,"end_time":1453994988562,"source.type":"yaf","start_time":1453994988562,"riflags":0,"rtt":"0.000","protocol":6}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/README.md
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/README.md b/metron-platform/metron-parsers/README.md
new file mode 100644
index 0000000..9fcb431
--- /dev/null
+++ b/metron-platform/metron-parsers/README.md
@@ -0,0 +1,82 @@
+#metron-parsers
+
+##Module Description
+
+This module provides a list of parsers that can be used with the Metron framework.  There are two types of parsers.  First type is a Java parser.  This kind of parser is optimized for speed and performance and is built for use with higher velicity topologies.  These parsers are not easily modifiable and in order to make changes to them the entire topology need to be recompiled.  The second type of parser provided with the system is a Grok parser.  This type of parser is primarily designed for lower-velocity topologies or for quickly standing up a parser for a new telemetry before a permanent Java parser can be written for it.
+
+##Message Format
+
+All Metron messages follow a specific format in order to ingest a message.  If a message does not conform to this format it will be dropped and put onto an error queue for further examination.  The message must be of a JSON format and must have a JSON tag message like so:
+
+```
+{"message" : message content}
+
+```
+
+Where appropriate there is also a standardization around the 5-tuple JSON fields.  This is done so the topology correlation engine further down stream can correlate messages from different topologies by these fields.  We are currently working on expanding the message standardization beyond these fields, but this feature is not yet availabe.  The standard field names are as follows:
+
+* ip_src_addr: layer 3 source IP
+* ip_dst_addr: layer 3 dest IP
+* ip_src_port: layer 4 source port
+* ip_dst_port: layer 4 dest port
+* protocol: layer 4 protocol
+* timestamp (epoch)
+* original_string: A human friendly string representation of the message
+
+The timestamp and original_string fields are madatory. The remaining standard fields are optional.  If any of the optional fields are not applicable then the field should be left out of the JSON.
+
+So putting it all together a typical Metron message with all 5-tuple fields present would look like the following:
+
+```json
+{
+"message": 
+{"ip_src_addr": xxxx, 
+"ip_dst_addr": xxxx, 
+"ip_src_port": xxxx, 
+"ip_dst_port": xxxx, 
+"protocol": xxxx, 
+"original_string": xxx,
+"additional-field 1": xxx,
+}
+
+}
+```
+
+##Parser Bolt
+
+The Metron parser bolt is a standard bolt, which can be extended with multiple Java and Grok parser adapter for parsing different topology messages.  The bolt signature for declaration in a storm topology is as follows:
+
+```
+AbstractParserBolt parser_bolt = new TelemetryParserBolt()
+.withMessageParser(parser)
+.withMessageFilter(new GenericMessageFilter())
+.withMetricConfig(config);
+
+```
+
+Metric Config - optional argument for exporting custom metrics to graphite.  If set to null no metrics will be exported.  If set, then a list of metrics defined in the metrics.conf file of each topology will define will metrics are exported and how often.
+
+Message Filter - a filter defining which messages can be dropped.  This feature is only present in the Java paerer adapters
+
+Message Parser - defines the parser adapter to be used for a topology
+
+##Parser Adapters
+
+Parser adapters are loaded dynamically in each Metron topology.  They are defined in topology.conf in the configuration item bolt.parser.adapter
+
+###Java Parser Adapters
+Java parser adapters are indended for higher-velocity topologies and are not easily changed or extended.  As the adoption of Metron continues we plan on extending our library of Java adapters to process more log formats.  As of this moment the Java adapters included with Metron are:
+
+* org.apache.metron.parsers.ise.BasicIseParser : Parse ISE messages
+* org.apache.metron.parsers.bro.BasicBroParser : Parse Bro messages
+* org.apache.metron.parsers.sourcefire.BasicSourcefireParser : Parse Sourcefire messages
+* org.apache.metron.parsers.lancope.BasicLancopeParser : Parse Lancope messages
+
+###Grok Parser Adapters
+Grok parser adapters are designed primarly for someone who is not a Java coder for quickly standing up a parser adapter for lower velocity topologies.  Grok relies on Regex for message parsing, which is much slower than purpose-built Java parsers, but is more extensible.  Grok parsers are defined via a config file and the topplogy does not need to be recombiled in order to make changes to them.  An example of a Grok perser is:
+
+* org.apache.metron.parsers.GrokParser
+
+For more information on the Grok project please refer to the following link:
+
+https://github.com/thekrakken/java-grok


[29/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-elasticsearch/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-elasticsearch/pom.xml b/metron-platform/metron-elasticsearch/pom.xml
new file mode 100644
index 0000000..974d396
--- /dev/null
+++ b/metron-platform/metron-elasticsearch/pom.xml
@@ -0,0 +1,250 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software
+	Foundation (ASF) under one or more contributor license agreements. See the
+	NOTICE file distributed with this work for additional information regarding
+	copyright ownership. The ASF licenses this file to You under the Apache License,
+	Version 2.0 (the "License"); you may not use this file except in compliance
+	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+	Unless required by applicable law or agreed to in writing, software distributed
+	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
+	OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+  the specific language governing permissions and limitations under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.metron</groupId>
+        <artifactId>metron-platform</artifactId>
+        <version>0.1BETA</version>
+    </parent>
+    <artifactId>metron-elasticsearch</artifactId>
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+    </properties>
+    <dependencies>
+        <dependency>
+            <groupId>com.google.guava</groupId>
+            <artifactId>guava</artifactId>
+            <version>${global_hbase_guava_version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-enrichment</artifactId>
+            <version>${project.parent.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.elasticsearch</groupId>
+            <artifactId>elasticsearch</artifactId>
+            <version>${global_elasticsearch_version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-core</artifactId>
+            <version>${global_storm_version}</version>
+            <scope>provided</scope>
+            <exclusions>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>log4j-over-slf4j</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-client</artifactId>
+            <version>${global_hbase_version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>log4j</groupId>
+                    <artifactId>log4j</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-hdfs</artifactId>
+            <version>${global_hadoop_version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-common</artifactId>
+            <version>${global_hadoop_version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka_2.9.2</artifactId>
+            <version>${global_kafka_version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>log4j</groupId>
+                    <artifactId>log4j</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-integration-test</artifactId>
+            <version>${project.parent.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.mockito</groupId>
+            <artifactId>mockito-all</artifactId>
+            <version>${global_mockito_version}</version>
+            <scope>test</scope>
+        </dependency>
+
+    </dependencies>
+    <reporting>
+        <plugins>
+            <!-- Normally, dependency report takes time, skip it -->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-project-info-reports-plugin</artifactId>
+                <version>2.7</version>
+
+                <configuration>
+                    <dependencyLocationsEnabled>false</dependencyLocationsEnabled>
+                </configuration>
+            </plugin>
+
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>emma-maven-plugin</artifactId>
+                <version>1.0-alpha-3</version>
+                <inherited>true</inherited>
+            </plugin>
+        </plugins>
+    </reporting>
+
+    <build>
+        <plugins>
+            <plugin>
+                <!-- Separates the unit tests from the integration tests. -->
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-surefire-plugin</artifactId>
+                <version>2.12.4</version>
+                <configuration>
+                    <!-- Skip the default running of this plug-in (or everything is run twice...see below) -->
+                    <argLine>-Xmx2048m -XX:MaxPermSize=256m</argLine>
+                    <skip>true</skip>
+                    <!-- Show 100% of the lines from the stack trace (doesn't work) -->
+                    <trimStackTrace>false</trimStackTrace>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>unit-tests</id>
+                        <phase>test</phase>
+                        <goals>
+                            <goal>test</goal>
+                        </goals>
+                        <configuration>
+                            <!-- Never skip running the tests when the test phase is invoked -->
+                            <skip>false</skip>
+                            <includes>
+                                <!-- Include unit tests within integration-test phase. -->
+                                <include>**/*Test.java</include>
+                            </includes>
+                            <excludes>
+                                <!-- Exclude integration tests within (unit) test phase. -->
+                                <exclude>**/*IntegrationTest.java</exclude>
+                            </excludes>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>integration-tests</id>
+                        <phase>integration-test</phase>
+                        <goals>
+                            <goal>test</goal>
+                        </goals>
+                        <configuration>
+                            <!-- Never skip running the tests when the integration-test phase is invoked -->
+                            <skip>false</skip>
+                            <includes>
+                                <!-- Include integration tests within integration-test phase. -->
+                                <include>**/*IntegrationTest.java</include>
+                            </includes>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <version>2.3</version>
+                <configuration>
+                    <createDependencyReducedPom>true</createDependencyReducedPom>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <artifactSet>
+                                <excludes>
+                                    <exclude>storm:storm-core:*</exclude>
+                                    <exclude>storm:storm-lib:*</exclude>
+                                    <exclude>org.slf4j.impl*</exclude>
+                                    <exclude>org.slf4j:slf4j-log4j*</exclude>
+                                </excludes>
+                            </artifactSet>
+                            <transformers>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
+                                    <resource>.yaml</resource>
+                                </transformer>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                    <mainClass></mainClass>
+                                </transformer>
+                            </transformers>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <configuration>
+                    <descriptor>src/main/assembly/assembly.xml</descriptor>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>make-assembly</id> <!-- this is used for inheritance merges -->
+                        <phase>package</phase> <!-- bind to the packaging phase -->
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-elasticsearch/src/main/assembly/assembly.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-elasticsearch/src/main/assembly/assembly.xml b/metron-platform/metron-elasticsearch/src/main/assembly/assembly.xml
new file mode 100644
index 0000000..bacaae3
--- /dev/null
+++ b/metron-platform/metron-elasticsearch/src/main/assembly/assembly.xml
@@ -0,0 +1,52 @@
+<!--
+  Licensed to the Apache Software
+	Foundation (ASF) under one or more contributor license agreements. See the
+	NOTICE file distributed with this work for additional information regarding
+	copyright ownership. The ASF licenses this file to You under the Apache License,
+	Version 2.0 (the "License"); you may not use this file except in compliance
+	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+	Unless required by applicable law or agreed to in writing, software distributed
+	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
+	OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+  the specific language governing permissions and limitations under the License.
+  -->
+
+<assembly>
+  <id>archive</id>
+  <formats>
+    <format>tar.gz</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>${project.basedir}/src/main/config</directory>
+      <outputDirectory>/config</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+      <excludes>
+        <exclude>**/*.formatted</exclude>
+        <exclude>**/*.filtered</exclude>
+      </excludes>
+      <fileMode>0644</fileMode>
+      <lineEnding>unix</lineEnding>
+    </fileSet>
+    <fileSet>
+      <directory>${project.basedir}/src/main/scripts</directory>
+      <outputDirectory>/scripts</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+      <excludes>
+        <exclude>**/*.formatted</exclude>
+        <exclude>**/*.filtered</exclude>
+      </excludes>
+      <fileMode>0644</fileMode>
+      <lineEnding>unix</lineEnding>
+    </fileSet>
+    <fileSet>
+      <directory>${project.basedir}/target</directory>
+      <includes>
+        <include>${project.artifactId}-${project.version}.jar</include>
+      </includes>
+      <outputDirectory>/lib</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+    </fileSet>
+  </fileSets>
+</assembly>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-elasticsearch/src/main/config/elasticsearch.properties
----------------------------------------------------------------------
diff --git a/metron-platform/metron-elasticsearch/src/main/config/elasticsearch.properties b/metron-platform/metron-elasticsearch/src/main/config/elasticsearch.properties
new file mode 100644
index 0000000..e2370ab
--- /dev/null
+++ b/metron-platform/metron-elasticsearch/src/main/config/elasticsearch.properties
@@ -0,0 +1,109 @@
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+
+##### Kafka #####
+
+kafka.zk=node1:2181
+kafka.broker=node1:6667
+spout.kafka.topic.asa=asa
+spout.kafka.topic.bro=bro
+spout.kafka.topic.fireeye=fireeye
+spout.kafka.topic.ise=ise
+spout.kafka.topic.lancope=lancope
+spout.kafka.topic.paloalto=paloalto
+spout.kafka.topic.pcap=pcap
+spout.kafka.topic.snort=snort
+spout.kafka.topic.yaf=yaf
+
+##### Indexing #####
+writer.class.name=org.apache.metron.elasticsearch.writer.ElasticsearchWriter
+
+##### ElasticSearch #####
+
+es.ip=10.22.0.214
+es.port=9300
+es.clustername=elasticsearch
+
+##### MySQL #####
+
+mysql.ip=10.22.0.214
+mysql.port=3306
+mysql.username=root
+mysql.password=hadoop123
+
+##### Metrics #####
+
+#reporters
+org.apache.metron.metrics.reporter.graphite=true
+org.apache.metron.metrics.reporter.console=false
+org.apache.metron.metrics.reporter.jmx=false
+
+#Graphite Addresses
+
+org.apache.metron.metrics.graphite.address=localhost
+org.apache.metron.metrics.graphite.port=2023
+
+#TelemetryParserBolt
+org.apache.metron.metrics.TelemetryParserBolt.acks=true
+org.apache.metron.metrics.TelemetryParserBolt.emits=true
+org.apache.metron.metrics.TelemetryParserBolt.fails=true
+
+
+#GenericEnrichmentBolt
+org.apache.metron.metrics.GenericEnrichmentBolt.acks=true
+org.apache.metron.metrics.GenericEnrichmentBolt.emits=true
+org.apache.metron.metrics.GenericEnrichmentBolt.fails=true
+
+
+#TelemetryIndexingBolt
+org.apache.metron.metrics.TelemetryIndexingBolt.acks=true
+org.apache.metron.metrics.TelemetryIndexingBolt.emits=true
+org.apache.metron.metrics.TelemetryIndexingBolt.fails=true
+
+##### Host Enrichment #####
+
+org.apache.metron.enrichment.host.known_hosts=[{"ip":"10.1.128.236", "local":"YES", "type":"webserver", "asset_value" : "important"},\
+{"ip":"10.1.128.237", "local":"UNKNOWN", "type":"unknown", "asset_value" : "important"},\
+{"ip":"10.60.10.254", "local":"YES", "type":"printer", "asset_value" : "important"}]
+
+##### HDFS #####
+
+bolt.hdfs.batch.size=5000
+bolt.hdfs.field.delimiter=|
+bolt.hdfs.file.rotation.size.in.mb=5
+bolt.hdfs.file.system.url=hdfs://iot01.cloud.hortonworks.com:8020
+bolt.hdfs.wip.file.path=/paloalto/wip
+bolt.hdfs.finished.file.path=/paloalto/rotated
+bolt.hdfs.compression.codec.class=org.apache.hadoop.io.compress.SnappyCodec
+index.hdfs.output=/tmp/metron/enriched
+
+##### HBase #####
+bolt.hbase.table.name=pcap
+bolt.hbase.table.fields=t:value
+bolt.hbase.table.key.tuple.field.name=key
+bolt.hbase.table.timestamp.tuple.field.name=timestamp
+bolt.hbase.enable.batching=false
+bolt.hbase.write.buffer.size.in.bytes=2000000
+bolt.hbase.durability=SKIP_WAL
+bolt.hbase.partitioner.region.info.refresh.interval.mins=60
+
+##### Threat Intel #####
+
+threat.intel.tracker.table=
+threat.intel.tracker.cf=
+threat.intel.ip.table=
+threat.intel.ip.cf=

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-elasticsearch/src/main/java/org/apache/metron/elasticsearch/writer/ElasticsearchWriter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-elasticsearch/src/main/java/org/apache/metron/elasticsearch/writer/ElasticsearchWriter.java b/metron-platform/metron-elasticsearch/src/main/java/org/apache/metron/elasticsearch/writer/ElasticsearchWriter.java
new file mode 100644
index 0000000..6b54fec
--- /dev/null
+++ b/metron-platform/metron-elasticsearch/src/main/java/org/apache/metron/elasticsearch/writer/ElasticsearchWriter.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.elasticsearch.writer;
+
+import backtype.storm.tuple.Tuple;
+import org.apache.metron.common.configuration.Configurations;
+import org.apache.metron.common.configuration.SensorEnrichmentConfig;
+import org.apache.metron.common.interfaces.BulkMessageWriter;
+import org.elasticsearch.action.bulk.BulkRequestBuilder;
+import org.elasticsearch.action.bulk.BulkResponse;
+import org.elasticsearch.action.index.IndexRequestBuilder;
+import org.elasticsearch.client.transport.TransportClient;
+import org.elasticsearch.common.settings.ImmutableSettings;
+import org.elasticsearch.common.transport.InetSocketTransportAddress;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.Serializable;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+
+public class ElasticsearchWriter implements BulkMessageWriter<JSONObject>, Serializable {
+
+  private Map<String, String> optionalSettings;
+  private transient TransportClient client;
+  private SimpleDateFormat dateFormat;
+  private static final Logger LOG = LoggerFactory
+          .getLogger(ElasticsearchWriter.class);
+
+  public ElasticsearchWriter withOptionalSettings(Map<String, String> optionalSettings) {
+    this.optionalSettings = optionalSettings;
+    return this;
+  }
+
+  @Override
+  public void init(Map stormConf, Configurations configurations) {
+    Map<String, Object> globalConfiguration = configurations.getGlobalConfig();
+    ImmutableSettings.Builder builder = ImmutableSettings.settingsBuilder();
+    builder.put("cluster.name", globalConfiguration.get("es.clustername"));
+    builder.put("client.transport.ping_timeout","500s");
+    if (optionalSettings != null) {
+      builder.put(optionalSettings);
+    }
+    client = new TransportClient(builder.build())
+            .addTransportAddress(new InetSocketTransportAddress(globalConfiguration.get("es.ip").toString(), Integer.parseInt(globalConfiguration.get("es.port").toString())));
+    dateFormat = new SimpleDateFormat((String) globalConfiguration.get("es.date.format"));
+
+  }
+
+  @Override
+  public void write(String sensorType, Configurations configurations, List<Tuple> tuples, List<JSONObject> messages) throws Exception {
+    SensorEnrichmentConfig sensorEnrichmentConfig = configurations.getSensorEnrichmentConfig(sensorType);
+    String indexPostfix = dateFormat.format(new Date());
+    BulkRequestBuilder bulkRequest = client.prepareBulk();
+    for(JSONObject message: messages) {
+      String indexName = sensorType;
+      if (sensorEnrichmentConfig != null) {
+        indexName = sensorEnrichmentConfig.getIndex();
+      }
+      IndexRequestBuilder indexRequestBuilder = client.prepareIndex(indexName + "_index_" + indexPostfix,
+              sensorType + "_doc");
+
+      indexRequestBuilder.setSource(message.toJSONString());
+      bulkRequest.add(indexRequestBuilder);
+    }
+    BulkResponse resp = bulkRequest.execute().actionGet();
+    if (resp.hasFailures()) {
+      throw new Exception(resp.buildFailureMessage());
+    }
+  }
+
+  @Override
+  public void close() throws Exception {
+    client.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-elasticsearch/src/main/scripts/start_elasticsearch_topology.sh
----------------------------------------------------------------------
diff --git a/metron-platform/metron-elasticsearch/src/main/scripts/start_elasticsearch_topology.sh b/metron-platform/metron-elasticsearch/src/main/scripts/start_elasticsearch_topology.sh
new file mode 100755
index 0000000..ed80d82
--- /dev/null
+++ b/metron-platform/metron-elasticsearch/src/main/scripts/start_elasticsearch_topology.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+# 
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#     http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+METRON_VERSION=0.1BETA
+METRON_HOME=/usr/metron/$METRON_VERSION
+TOPOLOGY_JAR=metron-elasticsearch-$METRON_VERSION.jar
+storm jar $METRON_HOME/lib/$TOPOLOGY_JAR org.apache.storm.flux.Flux --remote $METRON_HOME/config/enrichment/remote.yaml --filter $METRON_HOME/config/elasticsearch.properties

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-elasticsearch/src/test/java/org/apache/metron/elasticsearch/integration/ElasticsearchEnrichmentIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-elasticsearch/src/test/java/org/apache/metron/elasticsearch/integration/ElasticsearchEnrichmentIntegrationTest.java b/metron-platform/metron-elasticsearch/src/test/java/org/apache/metron/elasticsearch/integration/ElasticsearchEnrichmentIntegrationTest.java
new file mode 100644
index 0000000..6931aff
--- /dev/null
+++ b/metron-platform/metron-elasticsearch/src/test/java/org/apache/metron/elasticsearch/integration/ElasticsearchEnrichmentIntegrationTest.java
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.elasticsearch.integration;
+
+import org.apache.metron.integration.EnrichmentIntegrationTest;
+import org.apache.metron.integration.ComponentRunner;
+import org.apache.metron.integration.InMemoryComponent;
+import org.apache.metron.integration.Processor;
+import org.apache.metron.integration.ReadinessState;
+import org.apache.metron.elasticsearch.integration.components.ElasticSearchComponent;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+public class ElasticsearchEnrichmentIntegrationTest extends EnrichmentIntegrationTest {
+
+  private String indexDir = "target/elasticsearch";
+  private String dateFormat = "yyyy.MM.dd.HH";
+  private String index = "yaf_index_" + new SimpleDateFormat(dateFormat).format(new Date());
+
+  @Override
+  public InMemoryComponent getSearchComponent(final Properties topologyProperties) {
+    return new ElasticSearchComponent.Builder()
+            .withHttpPort(9211)
+            .withIndexDir(new File(indexDir))
+            .build();
+  }
+
+  @Override
+  public Processor<List<Map<String, Object>>> getProcessor(final List<byte[]> inputMessages) {
+    return new Processor<List<Map<String, Object>>>() {
+      List<Map<String, Object>> docs = null;
+      public ReadinessState process(ComponentRunner runner) {
+        ElasticSearchComponent elasticSearchComponent = runner.getComponent("search", ElasticSearchComponent.class);
+        if (elasticSearchComponent.hasIndex(index)) {
+          List<Map<String, Object>> docsFromDisk;
+          try {
+            docs = elasticSearchComponent.getAllIndexedDocs(index, "yaf_doc");
+            docsFromDisk = readDocsFromDisk(hdfsDir);
+            System.out.println(docs.size() + " vs " + inputMessages.size() + " vs " + docsFromDisk.size());
+          } catch (IOException e) {
+            throw new IllegalStateException("Unable to retrieve indexed documents.", e);
+          }
+          if (docs.size() < inputMessages.size() || docs.size() != docsFromDisk.size()) {
+            return ReadinessState.NOT_READY;
+          } else {
+            return ReadinessState.READY;
+          }
+        } else {
+          return ReadinessState.NOT_READY;
+        }
+      }
+
+      public List<Map<String, Object>> getResult() {
+        return docs;
+      }
+    };
+  }
+
+  @Override
+  public void setAdditionalProperties(Properties topologyProperties) {
+    topologyProperties.setProperty("writer.class.name", "org.apache.metron.elasticsearch.writer.ElasticsearchWriter");
+  }
+
+  @Override
+  public String cleanField(String field) {
+    return field;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-elasticsearch/src/test/java/org/apache/metron/elasticsearch/integration/components/ElasticSearchComponent.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-elasticsearch/src/test/java/org/apache/metron/elasticsearch/integration/components/ElasticSearchComponent.java b/metron-platform/metron-elasticsearch/src/test/java/org/apache/metron/elasticsearch/integration/components/ElasticSearchComponent.java
new file mode 100644
index 0000000..61d50f1
--- /dev/null
+++ b/metron-platform/metron-elasticsearch/src/test/java/org/apache/metron/elasticsearch/integration/components/ElasticSearchComponent.java
@@ -0,0 +1,186 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.elasticsearch.integration.components;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.metron.integration.InMemoryComponent;
+import org.apache.metron.integration.UnableToStartException;
+import org.elasticsearch.ElasticsearchTimeoutException;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
+import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
+import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
+import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.client.ElasticsearchClient;
+import org.elasticsearch.client.transport.TransportClient;
+import org.elasticsearch.common.settings.ImmutableSettings;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.transport.InetSocketTransportAddress;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.node.Node;
+import org.elasticsearch.node.NodeBuilder;
+import org.elasticsearch.search.SearchHit;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+public class ElasticSearchComponent implements InMemoryComponent {
+
+    public static class Builder{
+        private int httpPort;
+        private File indexDir;
+        private Map<String, String> extraElasticSearchSettings = null;
+        public Builder withHttpPort(int httpPort) {
+            this.httpPort = httpPort;
+            return this;
+        }
+        public Builder withIndexDir(File indexDir) {
+            this.indexDir = indexDir;
+            return this;
+        }
+        public Builder withExtraElasticSearchSettings(Map<String, String> extraElasticSearchSettings) {
+            this.extraElasticSearchSettings = extraElasticSearchSettings;
+            return this;
+        }
+        public ElasticSearchComponent build() {
+            return new ElasticSearchComponent(httpPort, indexDir, extraElasticSearchSettings);
+        }
+    }
+
+    private Client client;
+    private Node node;
+    private int httpPort;
+    private File indexDir;
+    private Map<String, String> extraElasticSearchSettings;
+
+    public ElasticSearchComponent(int httpPort, File indexDir) {
+        this(httpPort, indexDir, null);
+    }
+    public ElasticSearchComponent(int httpPort, File indexDir, Map<String, String> extraElasticSearchSettings) {
+        this.httpPort = httpPort;
+        this.indexDir = indexDir;
+        this.extraElasticSearchSettings = extraElasticSearchSettings;
+    }
+    public Client getClient() {
+        return client;
+    }
+
+    private void cleanDir(File dir) throws IOException {
+        if(dir.exists()) {
+            FileUtils.deleteDirectory(dir);
+        }
+        dir.mkdirs();
+    }
+    public void start() throws UnableToStartException {
+        File logDir= new File(indexDir, "/logs");
+        File dataDir= new File(indexDir, "/data");
+        try {
+            cleanDir(logDir);
+            cleanDir(dataDir);
+
+        } catch (IOException e) {
+            throw new UnableToStartException("Unable to clean log or data directories", e);
+        }
+        ImmutableSettings.Builder immutableSettings = ImmutableSettings.settingsBuilder()
+                .put("node.http.enabled", true)
+                .put("http.port", httpPort)
+                .put("cluster.name", "metron")
+                .put("path.logs",logDir.getAbsolutePath())
+                .put("path.data",dataDir.getAbsolutePath())
+                .put("gateway.type", "none")
+                .put("index.store.type", "memory")
+                .put("index.number_of_shards", 1)
+                .put("node.mode", "network")
+                .put("index.number_of_replicas", 1);
+        if(extraElasticSearchSettings != null) {
+            immutableSettings = immutableSettings.put(extraElasticSearchSettings);
+        }
+        Settings settings = immutableSettings.build();
+        node = NodeBuilder.nodeBuilder().settings(settings).node();
+        node.start();
+        settings = ImmutableSettings.settingsBuilder()
+					.put("cluster.name", "metron").build();
+		client = new TransportClient(settings)
+					.addTransportAddress(new InetSocketTransportAddress("localhost",
+							9300));
+
+        waitForCluster(client, ClusterHealthStatus.YELLOW, new TimeValue(60000));
+    }
+
+    public static void waitForCluster(ElasticsearchClient client, ClusterHealthStatus status, TimeValue timeout) throws UnableToStartException {
+        try {
+            ClusterHealthResponse healthResponse =
+                    (ClusterHealthResponse)client.execute(ClusterHealthAction.INSTANCE, new ClusterHealthRequest().waitForStatus(status).timeout(timeout)).actionGet();
+            if (healthResponse != null && healthResponse.isTimedOut()) {
+                throw new UnableToStartException("cluster state is " + healthResponse.getStatus().name()
+                        + " and not " + status.name()
+                        + ", from here on, everything will fail!");
+            }
+        } catch (ElasticsearchTimeoutException e) {
+            throw new UnableToStartException("timeout, cluster does not respond to health request, cowardly refusing to continue with operations");
+        }
+    }
+
+    public List<Map<String, Object>> getAllIndexedDocs(String index, String sourceType) throws IOException {
+       return getAllIndexedDocs(index, sourceType, null);
+    }
+    public List<Map<String, Object>> getAllIndexedDocs(String index, String sourceType, String subMessage) throws IOException {
+        getClient().admin().indices().refresh(new RefreshRequest());
+        SearchResponse response = getClient().prepareSearch(index)
+                .setTypes(sourceType)
+                .setSource("message")
+                .setFrom(0)
+                .setSize(1000)
+                .execute().actionGet();
+        List<Map<String, Object>> ret = new ArrayList<Map<String, Object>>();
+        for (SearchHit hit : response.getHits()) {
+            Object o = null;
+            if(subMessage == null) {
+                o = hit.getSource();
+            }
+            else {
+                o = hit.getSource().get(subMessage);
+            }
+            ret.add((Map<String, Object>)(o));
+        }
+        return ret;
+    }
+    public boolean hasIndex(String indexName) {
+        Set<String> indices = getClient().admin()
+                                    .indices()
+                                    .stats(new IndicesStatsRequest())
+                                    .actionGet()
+                                    .getIndices()
+                                    .keySet();
+        return indices.contains(indexName);
+
+    }
+
+    public void stop() {
+        node.stop();
+        node = null;
+        client = null;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-elasticsearch/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/metron-platform/metron-elasticsearch/src/test/resources/log4j.properties b/metron-platform/metron-elasticsearch/src/test/resources/log4j.properties
new file mode 100644
index 0000000..0d50388
--- /dev/null
+++ b/metron-platform/metron-elasticsearch/src/test/resources/log4j.properties
@@ -0,0 +1,24 @@
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+# Root logger option
+log4j.rootLogger=ERROR, stdout
+
+# Direct log messages to stdout
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-elasticsearch/src/test/resources/log4j2.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-elasticsearch/src/test/resources/log4j2.xml b/metron-platform/metron-elasticsearch/src/test/resources/log4j2.xml
new file mode 100755
index 0000000..68d5eac
--- /dev/null
+++ b/metron-platform/metron-elasticsearch/src/test/resources/log4j2.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<configuration monitorInterval="60">
+  <Appenders>
+    <Console name="Console" target="SYSTEM_OUT">
+     <PatternLayout pattern="%-4r [%t] %-5p %c{1.} - %msg%n"/>
+    </Console>
+  </Appenders>
+  <Loggers>
+    <Root level="error">
+      <AppenderRef ref="Console"/>
+    </Root>
+  </Loggers>
+</configuration>
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/README.md
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/README.md b/metron-platform/metron-enrichment/README.md
new file mode 100644
index 0000000..83d2926
--- /dev/null
+++ b/metron-platform/metron-enrichment/README.md
@@ -0,0 +1,125 @@
+#metron-enrichment
+
+##Module Description
+
+This module enables enrichment of message metafields with additional information from various enrichment sources.  Currently there is only a limited number of enrichments available, but this is an extensible framework that can be extended with additional enrichments.  Enrichments currently available are geo, whois, hosts, and CIF.
+
+##Message Format
+
+Enrichment bolts are designed to go after the parser bolts.  Parser bolts will parse the telemetry, taking it from its native format and producing a standard JSON that would look like so:
+
+```json
+{
+"message":
+{"ip_src_addr": xxxx,
+"ip_dst_addr": xxxx,
+"ip_src_port": xxxx,
+"ip_dst_port": xxxx,
+"protocol": xxxx,
+"additional-field 1": xxx,
+}
+
+}
+```
+
+A single enrichment bolt would enrich the message and produce a JSON enrichment and attach it to the message.  Enrichments are stackable so multiple enrichments can be attached sequentially after a single parser bolt.  Stacked enrichments would produce messages under the "enrichment" tag and attach it to the message like so:
+
+```json
+{
+"message":
+{"ip_src_addr": xxxx,
+"ip_dst_addr": xxxx,
+"ip_src_port": xxxx,
+"ip_dst_port": xxxx,
+"protocol": xxxx,
+"additional-field 1": xxxx,
+},
+"enrichment" : {"geo": xxxx, "whois": xxxx, "hosts": xxxxx, "CIF": "xxxxx"}
+
+}
+```
+
+##Enrichment Sources
+
+Each enrichment has to have an anrichment source which can serve as a lookup table for enriching relevant message fields.  In order to minimize the use of additional platforms and tools we primarily try to rely on HBase as much as possible to store the enrichment information for lookup by key.  In order to use Hbase we have to pre-process the enrichment feeds for bulk-loading into HBase with specific key format optimized for retrieval as well as utilize caches within the enrichment bolts to be able to provide enrichments real-time.  Our wiki contains information on how to setup the environment, pre-process feeds, and plug in the enrichment sources.
+
+##Enrichment Bolt
+
+The enrichment bolt is designed to be extensible to be re-used for all kinds of enrichment processes.  The bolt signature for declaration in a storm topology is as follows:
+
+
+
+```
+GenericEnrichmentBolt geo_enrichment = new GenericEnrichmentBolt()
+.withEnrichmentTag(
+config.getString("bolt.enrichment.geo.enrichment_tag"))
+.withAdapter(geo_adapter)
+.withMaxTimeRetain(
+config.getInt("bolt.enrichment.geo.MAX_TIME_RETAIN_MINUTES"))
+.withMaxCacheSize(
+config.getInt("bolt.enrichment.geo.MAX_CACHE_SIZE_OBJECTS_NUM"))
+.withKeys(geo_keys).withMetricConfiguration(config);
+
+```
+
+EnrichmentTag - Name of the enrichment (geo, whois, hosts, etc)
+Keys - Keys which this enrichment is able to enrich (hosts field for hosts enrichment, source_ip, dest_ip, for geo enrichment, etc)
+MaxTimeToRetain & MaxCacheSize - define the caching policy of the enrichment bolt
+Adapter - which adapter to use with the enrichment bolt instance
+
+###Geo Adapter
+Geo adapter is able to do geo enrichment on hosts and destination IPs.  The open source verison of the geo adapter uses the free Geo feeds from MaxMind.  The format of these feeds does not easily lend itself to a no-sql DB so this adapter is designed to work with mySql.  But it is extensible enough to be made work with a variety of other back ends.
+
+The signature of a geo adapter is as follows;
+
+```
+GeoMysqlAdapter geo_adapter = new GeoMysqlAdapter(
+config.getString("mysql.ip"), config.getInt("mysql.port"),
+config.getString("mysql.username"),
+config.getString("mysql.password"),
+config.getString("bolt.enrichment.geo.adapter.table"));
+
+```
+
+###Hosts Adapter
+The hosts adapter is designed to enrich message format with the static host information that can be read from a standard text file.  This adapter is intended for use with a network crawling script that can identify all customer assets and place them in a text file.  For example, this script would identify all workstations, printers, appliantces, etc.  Then if any of these assets are seen in the telemetry messages flowing through the adapter this enrichment would fire and the relevant known information about a host would be attached.  We are currently working on porting this adapter to work with HBase, but this work is not ready yet.  The known hosts file is located under the /etc/whitelists config directory of Metron.
+
+The signature of the hosts adapter is as follows:
+
+```
+Map<String, JSONObject> known_hosts = SettingsLoader
+.loadKnownHosts(hosts_path);
+
+HostFromPropertiesFileAdapter host_adapter = new HostFromPropertiesFileAdapter(
+known_hosts);
+
+```
+* The source and dest ips refer to the name of the message JSON key where the host information is located
+
+###Whois Adapter
+Whois adapter enriches the host name with additional whois information obtained from our proprietary Cisco feed.  The enricher itself is provided in this open source distribution, but the feed is not.  You have to have your own feed in order to use it.  Alternatively, you can contact us for providing you with this feed, but we would have to charge you a fee (we can't distribute it for free). The implemetation of the whois enrichment we provide works with HBase
+
+The signature of the whois adapter is as follows:
+
+```
+
+EnrichmentAdapter whois_adapter = new WhoisHBaseAdapter(
+config.getString("bolt.enrichment.whois.hbase.table.name"),
+config.getString("kafka.zk.list"),
+config.getString("kafka.zk.port"));
+```
+
+###CIF Adapter
+CIF adapter is designed to take in CIF feeds and cross-reference them against every message processed by Storm.  If there is a hit then the relevant information is attached to the message.  
+
+The signature of the CIF adapter is as follows:
+
+```
+CIFHbaseAdapter = new CIFHbaseAdapter(config
+.getString("kafka.zk.list"), config
+.getString("kafka.zk.port"), config
+.getString("bolt.enrichment.cif.tablename")))
+```
+
+##Stacking Enrichments
+Enrichments can be stacked.  By default each enrichment bolt listens on the "message" stream.  In order to create and stack enrichment bolts create a new bolt and instantiate the appropariate adapter.  You can look at our sample topologies to see how enrichments can be stacked

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/pom.xml b/metron-platform/metron-enrichment/pom.xml
new file mode 100644
index 0000000..f6244a0
--- /dev/null
+++ b/metron-platform/metron-enrichment/pom.xml
@@ -0,0 +1,255 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 
+  Licensed to the Apache Software 
+	Foundation (ASF) under one or more contributor license agreements. See the 
+	NOTICE file distributed with this work for additional information regarding 
+	copyright ownership. The ASF licenses this file to You under the Apache License, 
+	Version 2.0 (the "License"); you may not use this file except in compliance 
+	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 
+	Unless required by applicable law or agreed to in writing, software distributed 
+	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES 
+	OR CONDITIONS OF ANY KIND, either express or implied. See the License for 
+  the specific language governing permissions and limitations under the License. 
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.metron</groupId>
+        <artifactId>metron-platform</artifactId>
+        <version>0.1BETA</version>
+    </parent>
+    <artifactId>metron-enrichment</artifactId>
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+        <mysql.version>5.1.31</mysql.version>
+        <slf4j.version>1.7.7</slf4j.version>
+        <storm.hdfs.version>0.1.2</storm.hdfs.version>
+        <guava.version>${global_hbase_guava_version}</guava.version>
+    </properties>
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-common</artifactId>
+            <version>${project.parent.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-hbase</artifactId>
+            <version>${project.parent.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+            <version>${slf4j.version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>mysql</groupId>
+            <artifactId>mysql-connector-java</artifactId>
+            <version>${mysql.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-client</artifactId>
+            <version>${global_hbase_version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>log4j</groupId>
+                    <artifactId>log4j</artifactId>
+                </exclusion>
+            </exclusions>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-hdfs</artifactId>
+            <version>${global_hadoop_version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+            </exclusions>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-core</artifactId>
+            <version>${global_storm_version}</version>
+            <scope>provided</scope>
+            <exclusions>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>log4j-over-slf4j</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-hdfs</artifactId>
+            <version>${global_storm_version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.apache.storm</groupId>
+                    <artifactId>storm-core</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.apache.hadoop</groupId>
+                    <artifactId>hadoop-client</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>com.google.guava</groupId>
+            <artifactId>guava</artifactId>
+            <version>${global_guava_version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-common</artifactId>
+            <version>${global_hadoop_version}</version>
+            <scope>provided</scope>
+            <exclusions>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.mockito</groupId>
+            <artifactId>mockito-all</artifactId>
+            <version>${global_mockito_version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>commons-validator</groupId>
+            <artifactId>commons-validator</artifactId>
+            <version>1.4.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-test-utilities</artifactId>
+            <version>0.1BETA</version>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+    <reporting>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-surefire-plugin</artifactId>
+                <configuration>
+                    <systemProperties>
+                        <property>
+                            <name>mode</name>
+                            <value>global</value>
+                        </property>
+                    </systemProperties>
+                </configuration>
+            </plugin>
+            <!-- Normally, dependency report takes time, skip it -->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-project-info-reports-plugin</artifactId>
+                <version>2.7</version>
+
+                <configuration>
+                    <dependencyLocationsEnabled>false</dependencyLocationsEnabled>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>emma-maven-plugin</artifactId>
+                <version>1.0-alpha-3</version>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-pmd-plugin</artifactId>
+                <configuration>
+                    <targetJdk>1.7</targetJdk>
+                </configuration>
+            </plugin>
+        </plugins>
+    </reporting>
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <version>3.1</version>
+                <configuration>
+                    <source>1.7</source>
+                    <target>1.7</target>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <version>1.4</version>
+                <configuration>
+                    <createDependencyReducedPom>true</createDependencyReducedPom>
+                    <artifactSet>
+                        <excludes>
+                            <exclude>*slf4j*</exclude>
+                        </excludes>
+                    </artifactSet>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <relocations>
+                                <relocation>
+                                    <pattern>com.google.common</pattern>
+                                    <shadedPattern>org.apache.metron.guava</shadedPattern>
+                                </relocation>
+                            </relocations>
+                            <transformers>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
+                                    <resource>.yaml</resource>
+                                </transformer>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                    <mainClass></mainClass>
+                                </transformer>
+                            </transformers>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <configuration>
+                    <descriptor>src/main/assembly/assembly.xml</descriptor>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>make-assembly</id> <!-- this is used for inheritance merges -->
+                        <phase>package</phase> <!-- bind to the packaging phase -->
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/assembly/assembly.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/assembly/assembly.xml b/metron-platform/metron-enrichment/src/main/assembly/assembly.xml
new file mode 100644
index 0000000..796dcc4
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/assembly/assembly.xml
@@ -0,0 +1,44 @@
+<!--
+  Licensed to the Apache Software
+	Foundation (ASF) under one or more contributor license agreements. See the
+	NOTICE file distributed with this work for additional information regarding
+	copyright ownership. The ASF licenses this file to You under the Apache License,
+	Version 2.0 (the "License"); you may not use this file except in compliance
+	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+	Unless required by applicable law or agreed to in writing, software distributed
+	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
+	OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+  the specific language governing permissions and limitations under the License.
+  -->
+
+<assembly>
+  <id>archive</id>
+  <formats>
+    <format>tar.gz</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>${project.basedir}/src/main/scripts</directory>
+      <outputDirectory>/scripts</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+      <excludes>
+        <exclude>**/*.formatted</exclude>
+        <exclude>**/*.filtered</exclude>
+      </excludes>
+      <fileMode>0755</fileMode>
+      <lineEnding>unix</lineEnding>
+    </fileSet>
+    <fileSet>
+      <directory>${project.basedir}/src/main/flux</directory>
+      <outputDirectory>/flux</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+      <excludes>
+        <exclude>**/*.formatted</exclude>
+        <exclude>**/*.filtered</exclude>
+      </excludes>
+      <fileMode>0644</fileMode>
+      <lineEnding>unix</lineEnding>
+    </fileSet>
+  </fileSets>
+</assembly>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/flux/enrichment/remote.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/flux/enrichment/remote.yaml b/metron-platform/metron-enrichment/src/main/flux/enrichment/remote.yaml
new file mode 100644
index 0000000..b499b24
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/flux/enrichment/remote.yaml
@@ -0,0 +1,413 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "enrichment"
+config:
+    topology.workers: 1
+    topology.acker.executors: 0
+
+components:
+# Enrichment
+    -   id: "jdbcConfig"
+        className: "org.apache.metron.enrichment.adapters.jdbc.MySqlConfig"
+        properties:
+            -   name: "host"
+                value: "${mysql.ip}"
+            -   name: "port"
+                value: ${mysql.port}
+            -   name: "username"
+                value: "${mysql.username}"
+            -   name: "password"
+                value: "${mysql.password}"
+            -   name: "table"
+                value: "GEO"
+    -   id: "geoEnrichmentAdapter"
+        className: "org.apache.metron.enrichment.adapters.geo.GeoAdapter"
+        configMethods:
+            -   name: "withJdbcConfig"
+                args:
+                    - ref: "jdbcConfig"
+    -   id: "geoEnrichment"
+        className: "org.apache.metron.enrichment.configuration.Enrichment"
+        constructorArgs:
+            -   "geo"
+            -   ref: "geoEnrichmentAdapter"
+    -   id: "hostEnrichmentAdapter"
+        className: "org.apache.metron.enrichment.adapters.host.HostFromJSONListAdapter"
+        constructorArgs:
+            - '${org.apache.metron.enrichment.host.known_hosts}'
+    -   id: "hostEnrichment"
+        className: "org.apache.metron.enrichment.configuration.Enrichment"
+        constructorArgs:
+            -   "host"
+            -   ref: "hostEnrichmentAdapter"
+
+    -   id: "simpleHBaseEnrichmentConfig"
+        className: "org.apache.metron.enrichment.adapters.simplehbase.SimpleHBaseConfig"
+        configMethods:
+            -   name: "withProviderImpl"
+                args:
+                    - "${hbase.provider.impl}"
+            -   name: "withHBaseTable"
+                args:
+                    - "${enrichment.simple.hbase.table}"
+            -   name: "withHBaseCF"
+                args:
+                    - "${enrichment.simple.hbase.cf}"
+    -   id: "simpleHBaseEnrichmentAdapter"
+        className: "org.apache.metron.enrichment.adapters.simplehbase.SimpleHBaseAdapter"
+        configMethods:
+           -    name: "withConfig"
+                args:
+                    - ref: "simpleHBaseEnrichmentConfig"
+    -   id: "simpleHBaseEnrichment"
+        className: "org.apache.metron.enrichment.configuration.Enrichment"
+        constructorArgs:
+          -   "hbaseEnrichment"
+          -   ref: "simpleHBaseEnrichmentAdapter"
+    -   id: "enrichments"
+        className: "java.util.ArrayList"
+        configMethods:
+            -   name: "add"
+                args:
+                    - ref: "geoEnrichment"
+            -   name: "add"
+                args:
+                    - ref: "hostEnrichment"
+            -   name: "add"
+                args:
+                    - ref: "simpleHBaseEnrichment"
+# Threat Intel
+
+    -   id: "simpleHBaseThreatIntelConfig"
+        className: "org.apache.metron.enrichment.adapters.threatintel.ThreatIntelConfig"
+        configMethods:
+            -   name: "withProviderImpl"
+                args:
+                    - "${hbase.provider.impl}"
+            -   name: "withTrackerHBaseTable"
+                args:
+                    - "${threat.intel.tracker.table}"
+            -   name: "withTrackerHBaseCF"
+                args:
+                    - "${threat.intel.tracker.cf}"
+            -   name: "withHBaseTable"
+                args:
+                    - "${threat.intel.simple.hbase.table}"
+            -   name: "withHBaseCF"
+                args:
+                    - "${threat.intel.simple.hbase.cf}"
+    -   id: "simpleHBaseThreatIntelAdapter"
+        className: "org.apache.metron.enrichment.adapters.threatintel.ThreatIntelAdapter"
+        configMethods:
+           -    name: "withConfig"
+                args:
+                    - ref: "simpleHBaseThreatIntelConfig"
+    -   id: "simpleHBaseThreatIntelEnrichment"
+        className: "org.apache.metron.enrichment.configuration.Enrichment"
+        constructorArgs:
+          -   "hbaseThreatIntel"
+          -   ref: "simpleHBaseThreatIntelAdapter"
+
+    -   id: "threatIntels"
+        className: "java.util.ArrayList"
+        configMethods:
+            -   name: "add"
+                args:
+                    - ref: "simpleHBaseThreatIntelEnrichment"
+
+    -   id: "fileNameFormat"
+        className: "org.apache.storm.hdfs.bolt.format.DefaultFileNameFormat"
+        configMethods:
+            -   name: "withPrefix"
+                args:
+                    - "enrichment-"
+            -   name: "withExtension"
+                args:
+                  - ".json"
+            -   name: "withPath"
+                args:
+                    - "${index.hdfs.output}"
+
+    -   id: "hdfsRotationPolicy"
+        className: "${bolt.hdfs.rotation.policy}"
+        constructorArgs:
+          -  ${bolt.hdfs.rotation.policy.count}
+          - "${bolt.hdfs.rotation.policy.units}"
+#indexing
+    -   id: "hdfsWriter"
+        className: "org.apache.metron.writer.hdfs.HdfsWriter"
+        configMethods:
+            -   name: "withFileNameFormat"
+                args:
+                    - ref: "fileNameFormat"
+            -   name: "withRotationPolicy"
+                args:
+                    - ref: "hdfsRotationPolicy"
+
+    -   id: "indexWriter"
+        className: "${writer.class.name}"
+
+#kafka/zookeeper
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "enrichments"
+            # zk root
+            - ""
+            # id
+            - "enrichments"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -1
+
+spouts:
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+bolts:
+# Enrichment Bolts
+    -   id: "enrichmentSplitBolt"
+        className: "org.apache.metron.enrichment.bolt.EnrichmentSplitterBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withEnrichments"
+                args:
+                    - ref: "enrichments"
+    -   id: "geoEnrichmentBolt"
+        className: "org.apache.metron.enrichment.bolt.GenericEnrichmentBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withEnrichment"
+                args:
+                    - ref: "geoEnrichment"
+            -   name: "withMaxCacheSize"
+                args: [10000]
+            -   name: "withMaxTimeRetain"
+                args: [10]
+    -   id: "hostEnrichmentBolt"
+        className: "org.apache.metron.enrichment.bolt.GenericEnrichmentBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withEnrichment"
+                args:
+                    - ref: "hostEnrichment"
+            -   name: "withMaxCacheSize"
+                args: [10000]
+            -   name: "withMaxTimeRetain"
+                args: [10]
+    -   id: "simpleHBaseEnrichmentBolt"
+        className: "org.apache.metron.enrichment.bolt.GenericEnrichmentBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withEnrichment"
+                args:
+                    - ref: "simpleHBaseEnrichment"
+            -   name: "withMaxCacheSize"
+                args: [10000]
+            -   name: "withMaxTimeRetain"
+                args: [10]
+    -   id: "enrichmentJoinBolt"
+        className: "org.apache.metron.enrichment.bolt.EnrichmentJoinBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withMaxCacheSize"
+                args: [10000]
+            -   name: "withMaxTimeRetain"
+                args: [10]
+
+# Threat Intel Bolts
+    -   id: "threatIntelSplitBolt"
+        className: "org.apache.metron.enrichment.bolt.ThreatIntelSplitterBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withEnrichments"
+                args:
+                    - ref: "threatIntels"
+            -   name: "withMessageFieldName"
+                args: ["message"]
+    -   id: "simpleHBaseThreatIntelBolt"
+        className: "org.apache.metron.enrichment.bolt.GenericEnrichmentBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withEnrichment"
+                args:
+                    - ref: "simpleHBaseThreatIntelEnrichment"
+            -   name: "withMaxCacheSize"
+                args: [10000]
+            -   name: "withMaxTimeRetain"
+                args: [10]
+    -   id: "threatIntelJoinBolt"
+        className: "org.apache.metron.enrichment.bolt.ThreatIntelJoinBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withMaxCacheSize"
+                args: [10000]
+            -   name: "withMaxTimeRetain"
+                args: [10]
+# Indexing Bolts
+    -   id: "indexingBolt"
+        className: "org.apache.metron.enrichment.bolt.BulkMessageWriterBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withBulkMessageWriter"
+                args:
+                    - ref: "indexWriter"
+    -   id: "hdfsIndexingBolt"
+        className: "org.apache.metron.enrichment.bolt.BulkMessageWriterBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withBulkMessageWriter"
+                args:
+                    - ref: "hdfsWriter"
+
+
+streams:
+#parser
+    -   name: "spout -> enrichmentSplit"
+        from: "kafkaSpout"
+        to: "enrichmentSplitBolt"
+        grouping:
+            type: SHUFFLE
+
+#enrichment
+    -   name: "enrichmentSplit -> host"
+        from: "enrichmentSplitBolt"
+        to: "hostEnrichmentBolt"
+        grouping:
+            streamId: "host"
+            type: FIELDS
+            args: ["key"]
+    -   name: "enrichmentSplit -> geo"
+        from: "enrichmentSplitBolt"
+        to: "geoEnrichmentBolt"
+        grouping:
+            streamId: "geo"
+            type: FIELDS
+            args: ["key"]
+
+    -   name: "enrichmentSplit -> simpleHBaseEnrichmentBolt"
+        from: "enrichmentSplitBolt"
+        to: "simpleHBaseEnrichmentBolt"
+        grouping:
+            streamId: "hbaseEnrichment"
+            type: FIELDS
+            args: ["key"]
+
+    -   name: "splitter -> join"
+        from: "enrichmentSplitBolt"
+        to: "enrichmentJoinBolt"
+        grouping:
+            streamId: "message"
+            type: FIELDS
+            args: ["key"]
+    -   name: "geo -> join"
+        from: "geoEnrichmentBolt"
+        to: "enrichmentJoinBolt"
+        grouping:
+            streamId: "geo"
+            type: FIELDS
+            args: ["key"]
+
+
+    -   name: "simpleHBaseEnrichmentBolt -> join"
+        from: "simpleHBaseEnrichmentBolt"
+        to: "enrichmentJoinBolt"
+        grouping:
+            streamId: "hbaseEnrichment"
+            type: FIELDS
+            args: ["key"]
+    -   name: "host -> join"
+        from: "hostEnrichmentBolt"
+        to: "enrichmentJoinBolt"
+        grouping:
+            streamId: "host"
+            type: FIELDS
+            args: ["key"]
+
+#threat intel
+    -   name: "enrichmentJoin -> threatSplit"
+        from: "enrichmentJoinBolt"
+        to: "threatIntelSplitBolt"
+        grouping:
+            streamId: "message"
+            type: FIELDS
+            args: ["key"]
+
+    -   name: "threatSplit -> simpleHBaseThreatIntel"
+        from: "threatIntelSplitBolt"
+        to: "simpleHBaseThreatIntelBolt"
+        grouping:
+            streamId: "hbaseThreatIntel"
+            type: FIELDS
+            args: ["key"]
+
+    -   name: "simpleHBaseThreatIntel -> join"
+        from: "simpleHBaseThreatIntelBolt"
+        to: "threatIntelJoinBolt"
+        grouping:
+            streamId: "hbaseThreatIntel"
+            type: FIELDS
+            args: ["key"]
+    -   name: "threatIntelSplit -> threatIntelJoin"
+        from: "threatIntelSplitBolt"
+        to: "threatIntelJoinBolt"
+        grouping:
+            streamId: "message"
+            type: FIELDS
+            args: ["key"]
+#indexing
+    -   name: "threatIntelJoin -> indexing"
+        from: "threatIntelJoinBolt"
+        to: "indexingBolt"
+        grouping:
+            streamId: "message"
+            type: FIELDS
+            args: ["key"]
+
+    -   name: "threatIntelJoin -> hdfs"
+        from: "threatIntelJoinBolt"
+        to: "hdfsIndexingBolt"
+        grouping:
+            streamId: "message"
+            type: SHUFFLE
+
+    -   name: "indexingBolt -> errorIndexingBolt"
+        from: "indexingBolt"
+        to: "indexingBolt"
+        grouping:
+            streamId: "error"
+            type: SHUFFLE


[35/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/test/resources/effective_tld_names.dat
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/test/resources/effective_tld_names.dat b/metron-platform/metron-common/src/test/resources/effective_tld_names.dat
new file mode 100644
index 0000000..36e5d4c
--- /dev/null
+++ b/metron-platform/metron-common/src/test/resources/effective_tld_names.dat
@@ -0,0 +1,9719 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+// ===BEGIN ICANN DOMAINS===
+
+// ac : http://en.wikipedia.org/wiki/.ac
+ac
+com.ac
+edu.ac
+gov.ac
+net.ac
+mil.ac
+org.ac
+
+// ad : http://en.wikipedia.org/wiki/.ad
+ad
+nom.ad
+
+// ae : http://en.wikipedia.org/wiki/.ae
+// see also: "Domain Name Eligibility Policy" at http://www.aeda.ae/eng/aepolicy.php
+ae
+co.ae
+net.ae
+org.ae
+sch.ae
+ac.ae
+gov.ae
+mil.ae
+
+// aero : see http://www.information.aero/index.php?id=66
+aero
+accident-investigation.aero
+accident-prevention.aero
+aerobatic.aero
+aeroclub.aero
+aerodrome.aero
+agents.aero
+aircraft.aero
+airline.aero
+airport.aero
+air-surveillance.aero
+airtraffic.aero
+air-traffic-control.aero
+ambulance.aero
+amusement.aero
+association.aero
+author.aero
+ballooning.aero
+broker.aero
+caa.aero
+cargo.aero
+catering.aero
+certification.aero
+championship.aero
+charter.aero
+civilaviation.aero
+club.aero
+conference.aero
+consultant.aero
+consulting.aero
+control.aero
+council.aero
+crew.aero
+design.aero
+dgca.aero
+educator.aero
+emergency.aero
+engine.aero
+engineer.aero
+entertainment.aero
+equipment.aero
+exchange.aero
+express.aero
+federation.aero
+flight.aero
+freight.aero
+fuel.aero
+gliding.aero
+government.aero
+groundhandling.aero
+group.aero
+hanggliding.aero
+homebuilt.aero
+insurance.aero
+journal.aero
+journalist.aero
+leasing.aero
+logistics.aero
+magazine.aero
+maintenance.aero
+marketplace.aero
+media.aero
+microlight.aero
+modelling.aero
+navigation.aero
+parachuting.aero
+paragliding.aero
+passenger-association.aero
+pilot.aero
+press.aero
+production.aero
+recreation.aero
+repbody.aero
+res.aero
+research.aero
+rotorcraft.aero
+safety.aero
+scientist.aero
+services.aero
+show.aero
+skydiving.aero
+software.aero
+student.aero
+taxi.aero
+trader.aero
+trading.aero
+trainer.aero
+union.aero
+workinggroup.aero
+works.aero
+
+// af : http://www.nic.af/help.jsp
+af
+gov.af
+com.af
+org.af
+net.af
+edu.af
+
+// ag : http://www.nic.ag/prices.htm
+ag
+com.ag
+org.ag
+net.ag
+co.ag
+nom.ag
+
+// ai : http://nic.com.ai/
+ai
+off.ai
+com.ai
+net.ai
+org.ai
+
+// al : http://www.ert.gov.al/ert_alb/faq_det.html?Id=31
+al
+com.al
+edu.al
+gov.al
+mil.al
+net.al
+org.al
+
+// am : http://en.wikipedia.org/wiki/.am
+am
+
+// an : http://www.una.an/an_domreg/default.asp
+an
+com.an
+net.an
+org.an
+edu.an
+
+// ao : http://en.wikipedia.org/wiki/.ao
+// http://www.dns.ao/REGISTR.DOC
+ao
+ed.ao
+gv.ao
+og.ao
+co.ao
+pb.ao
+it.ao
+
+// aq : http://en.wikipedia.org/wiki/.aq
+aq
+
+// ar : https://nic.ar/normativa-vigente.xhtml
+ar
+com.ar
+edu.ar
+gob.ar
+gov.ar
+int.ar
+mil.ar
+net.ar
+org.ar
+tur.ar
+
+// arpa : http://en.wikipedia.org/wiki/.arpa
+// Confirmed by registry <ia...@icann.org> 2008-06-18
+arpa
+e164.arpa
+in-addr.arpa
+ip6.arpa
+iris.arpa
+uri.arpa
+urn.arpa
+
+// as : http://en.wikipedia.org/wiki/.as
+as
+gov.as
+
+// asia : http://en.wikipedia.org/wiki/.asia
+asia
+
+// at : http://en.wikipedia.org/wiki/.at
+// Confirmed by registry <it...@nic.at> 2008-06-17
+at
+ac.at
+co.at
+gv.at
+or.at
+
+// au : http://en.wikipedia.org/wiki/.au
+// http://www.auda.org.au/
+au
+// 2LDs
+com.au
+net.au
+org.au
+edu.au
+gov.au
+asn.au
+id.au
+// Historic 2LDs (closed to new registration, but sites still exist)
+info.au
+conf.au
+oz.au
+// CGDNs - http://www.cgdn.org.au/
+act.au
+nsw.au
+nt.au
+qld.au
+sa.au
+tas.au
+vic.au
+wa.au
+// 3LDs
+act.edu.au
+nsw.edu.au
+nt.edu.au
+qld.edu.au
+sa.edu.au
+tas.edu.au
+vic.edu.au
+wa.edu.au
+// act.gov.au  Bug 984824 - Removed at request of Greg Tankard
+// nsw.gov.au  Bug 547985 - Removed at request of <Sh...@services.nsw.gov.au>
+// nt.gov.au  Bug 940478 - Removed at request of Greg Connors <Gr...@nt.gov.au>
+qld.gov.au
+sa.gov.au
+tas.gov.au
+vic.gov.au
+wa.gov.au
+
+// aw : http://en.wikipedia.org/wiki/.aw
+aw
+com.aw
+
+// ax : http://en.wikipedia.org/wiki/.ax
+ax
+
+// az : http://en.wikipedia.org/wiki/.az
+az
+com.az
+net.az
+int.az
+gov.az
+org.az
+edu.az
+info.az
+pp.az
+mil.az
+name.az
+pro.az
+biz.az
+
+// ba : http://en.wikipedia.org/wiki/.ba
+ba
+org.ba
+net.ba
+edu.ba
+gov.ba
+mil.ba
+unsa.ba
+unbi.ba
+co.ba
+com.ba
+rs.ba
+
+// bb : http://en.wikipedia.org/wiki/.bb
+bb
+biz.bb
+co.bb
+com.bb
+edu.bb
+gov.bb
+info.bb
+net.bb
+org.bb
+store.bb
+tv.bb
+
+// bd : http://en.wikipedia.org/wiki/.bd
+*.bd
+
+// be : http://en.wikipedia.org/wiki/.be
+// Confirmed by registry <te...@dns.be> 2008-06-08
+be
+ac.be
+
+// bf : http://en.wikipedia.org/wiki/.bf
+bf
+gov.bf
+
+// bg : http://en.wikipedia.org/wiki/.bg
+// https://www.register.bg/user/static/rules/en/index.html
+bg
+a.bg
+b.bg
+c.bg
+d.bg
+e.bg
+f.bg
+g.bg
+h.bg
+i.bg
+j.bg
+k.bg
+l.bg
+m.bg
+n.bg
+o.bg
+p.bg
+q.bg
+r.bg
+s.bg
+t.bg
+u.bg
+v.bg
+w.bg
+x.bg
+y.bg
+z.bg
+0.bg
+1.bg
+2.bg
+3.bg
+4.bg
+5.bg
+6.bg
+7.bg
+8.bg
+9.bg
+
+// bh : http://en.wikipedia.org/wiki/.bh
+bh
+com.bh
+edu.bh
+net.bh
+org.bh
+gov.bh
+
+// bi : http://en.wikipedia.org/wiki/.bi
+// http://whois.nic.bi/
+bi
+co.bi
+com.bi
+edu.bi
+or.bi
+org.bi
+
+// biz : http://en.wikipedia.org/wiki/.biz
+biz
+
+// bj : http://en.wikipedia.org/wiki/.bj
+bj
+asso.bj
+barreau.bj
+gouv.bj
+
+// bm : http://www.bermudanic.bm/dnr-text.txt
+bm
+com.bm
+edu.bm
+gov.bm
+net.bm
+org.bm
+
+// bn : http://en.wikipedia.org/wiki/.bn
+*.bn
+
+// bo : http://www.nic.bo/
+bo
+com.bo
+edu.bo
+gov.bo
+gob.bo
+int.bo
+org.bo
+net.bo
+mil.bo
+tv.bo
+
+// br : http://registro.br/dominio/categoria.html
+// Submitted by registry <fn...@registro.br> 2014-08-11
+br
+adm.br
+adv.br
+agr.br
+am.br
+arq.br
+art.br
+ato.br
+b.br
+bio.br
+blog.br
+bmd.br
+cim.br
+cng.br
+cnt.br
+com.br
+coop.br
+ecn.br
+eco.br
+edu.br
+emp.br
+eng.br
+esp.br
+etc.br
+eti.br
+far.br
+flog.br
+fm.br
+fnd.br
+fot.br
+fst.br
+g12.br
+ggf.br
+gov.br
+imb.br
+ind.br
+inf.br
+jor.br
+jus.br
+leg.br
+lel.br
+mat.br
+med.br
+mil.br
+mp.br
+mus.br
+net.br
+*.nom.br
+not.br
+ntr.br
+odo.br
+org.br
+ppg.br
+pro.br
+psc.br
+psi.br
+qsl.br
+radio.br
+rec.br
+slg.br
+srv.br
+taxi.br
+teo.br
+tmp.br
+trd.br
+tur.br
+tv.br
+vet.br
+vlog.br
+wiki.br
+zlg.br
+
+// bs : http://www.nic.bs/rules.html
+bs
+com.bs
+net.bs
+org.bs
+edu.bs
+gov.bs
+
+// bt : http://en.wikipedia.org/wiki/.bt
+bt
+com.bt
+edu.bt
+gov.bt
+net.bt
+org.bt
+
+// bv : No registrations at this time.
+// Submitted by registry <ja...@uninett.no> 2006-06-16
+bv
+
+// bw : http://en.wikipedia.org/wiki/.bw
+// http://www.gobin.info/domainname/bw.doc
+// list of other 2nd level tlds ?
+bw
+co.bw
+org.bw
+
+// by : http://en.wikipedia.org/wiki/.by
+// http://tld.by/rules_2006_en.html
+// list of other 2nd level tlds ?
+by
+gov.by
+mil.by
+// Official information does not indicate that com.by is a reserved
+// second-level domain, but it's being used as one (see www.google.com.by and
+// www.yahoo.com.by, for example), so we list it here for safety's sake.
+com.by
+
+// http://hoster.by/
+of.by
+
+// bz : http://en.wikipedia.org/wiki/.bz
+// http://www.belizenic.bz/
+bz
+com.bz
+net.bz
+org.bz
+edu.bz
+gov.bz
+
+// ca : http://en.wikipedia.org/wiki/.ca
+ca
+// ca geographical names
+ab.ca
+bc.ca
+mb.ca
+nb.ca
+nf.ca
+nl.ca
+ns.ca
+nt.ca
+nu.ca
+on.ca
+pe.ca
+qc.ca
+sk.ca
+yk.ca
+// gc.ca: http://en.wikipedia.org/wiki/.gc.ca
+// see also: http://registry.gc.ca/en/SubdomainFAQ
+gc.ca
+
+// cat : http://en.wikipedia.org/wiki/.cat
+cat
+
+// cc : http://en.wikipedia.org/wiki/.cc
+cc
+
+// cd : http://en.wikipedia.org/wiki/.cd
+// see also: https://www.nic.cd/domain/insertDomain_2.jsp?act=1
+cd
+gov.cd
+
+// cf : http://en.wikipedia.org/wiki/.cf
+cf
+
+// cg : http://en.wikipedia.org/wiki/.cg
+cg
+
+// ch : http://en.wikipedia.org/wiki/.ch
+ch
+
+// ci : http://en.wikipedia.org/wiki/.ci
+// http://www.nic.ci/index.php?page=charte
+ci
+org.ci
+or.ci
+com.ci
+co.ci
+edu.ci
+ed.ci
+ac.ci
+net.ci
+go.ci
+asso.ci
+aéroport.ci
+int.ci
+presse.ci
+md.ci
+gouv.ci
+
+// ck : http://en.wikipedia.org/wiki/.ck
+*.ck
+!www.ck
+
+// cl : http://en.wikipedia.org/wiki/.cl
+cl
+gov.cl
+gob.cl
+co.cl
+mil.cl
+
+// cm : http://en.wikipedia.org/wiki/.cm plus bug 981927
+cm
+co.cm
+com.cm
+gov.cm
+net.cm
+
+// cn : http://en.wikipedia.org/wiki/.cn
+// Submitted by registry <ta...@cnnic.cn> 2008-06-11
+cn
+ac.cn
+com.cn
+edu.cn
+gov.cn
+net.cn
+org.cn
+mil.cn
+公司.cn
+网络.cn
+網絡.cn
+// cn geographic names
+ah.cn
+bj.cn
+cq.cn
+fj.cn
+gd.cn
+gs.cn
+gz.cn
+gx.cn
+ha.cn
+hb.cn
+he.cn
+hi.cn
+hl.cn
+hn.cn
+jl.cn
+js.cn
+jx.cn
+ln.cn
+nm.cn
+nx.cn
+qh.cn
+sc.cn
+sd.cn
+sh.cn
+sn.cn
+sx.cn
+tj.cn
+xj.cn
+xz.cn
+yn.cn
+zj.cn
+hk.cn
+mo.cn
+tw.cn
+
+// co : http://en.wikipedia.org/wiki/.co
+// Submitted by registry <te...@uniandes.edu.co> 2008-06-11
+co
+arts.co
+com.co
+edu.co
+firm.co
+gov.co
+info.co
+int.co
+mil.co
+net.co
+nom.co
+org.co
+rec.co
+web.co
+
+// com : http://en.wikipedia.org/wiki/.com
+com
+
+// coop : http://en.wikipedia.org/wiki/.coop
+coop
+
+// cr : http://www.nic.cr/niccr_publico/showRegistroDominiosScreen.do
+cr
+ac.cr
+co.cr
+ed.cr
+fi.cr
+go.cr
+or.cr
+sa.cr
+
+// cu : http://en.wikipedia.org/wiki/.cu
+cu
+com.cu
+edu.cu
+org.cu
+net.cu
+gov.cu
+inf.cu
+
+// cv : http://en.wikipedia.org/wiki/.cv
+cv
+
+// cw : http://www.una.cw/cw_registry/
+// Confirmed by registry <re...@una.net> 2013-03-26
+cw
+com.cw
+edu.cw
+net.cw
+org.cw
+
+// cx : http://en.wikipedia.org/wiki/.cx
+// list of other 2nd level tlds ?
+cx
+gov.cx
+
+// cy : http://en.wikipedia.org/wiki/.cy
+*.cy
+
+// cz : http://en.wikipedia.org/wiki/.cz
+cz
+
+// de : http://en.wikipedia.org/wiki/.de
+// Confirmed by registry <op...@denic.de> (with technical
+// reservations) 2008-07-01
+de
+
+// dj : http://en.wikipedia.org/wiki/.dj
+dj
+
+// dk : http://en.wikipedia.org/wiki/.dk
+// Confirmed by registry <ro...@dk-hostmaster.dk> 2008-06-17
+dk
+
+// dm : http://en.wikipedia.org/wiki/.dm
+dm
+com.dm
+net.dm
+org.dm
+edu.dm
+gov.dm
+
+// do : http://en.wikipedia.org/wiki/.do
+do
+art.do
+com.do
+edu.do
+gob.do
+gov.do
+mil.do
+net.do
+org.do
+sld.do
+web.do
+
+// dz : http://en.wikipedia.org/wiki/.dz
+dz
+com.dz
+org.dz
+net.dz
+gov.dz
+edu.dz
+asso.dz
+pol.dz
+art.dz
+
+// ec : http://www.nic.ec/reg/paso1.asp
+// Submitted by registry <va...@nic.ec> 2008-07-04
+ec
+com.ec
+info.ec
+net.ec
+fin.ec
+k12.ec
+med.ec
+pro.ec
+org.ec
+edu.ec
+gov.ec
+gob.ec
+mil.ec
+
+// edu : http://en.wikipedia.org/wiki/.edu
+edu
+
+// ee : http://www.eenet.ee/EENet/dom_reeglid.html#lisa_B
+ee
+edu.ee
+gov.ee
+riik.ee
+lib.ee
+med.ee
+com.ee
+pri.ee
+aip.ee
+org.ee
+fie.ee
+
+// eg : http://en.wikipedia.org/wiki/.eg
+eg
+com.eg
+edu.eg
+eun.eg
+gov.eg
+mil.eg
+name.eg
+net.eg
+org.eg
+sci.eg
+
+// er : http://en.wikipedia.org/wiki/.er
+*.er
+
+// es : https://www.nic.es/site_ingles/ingles/dominios/index.html
+es
+com.es
+nom.es
+org.es
+gob.es
+edu.es
+
+// et : http://en.wikipedia.org/wiki/.et
+et
+com.et
+gov.et
+org.et
+edu.et
+biz.et
+name.et
+info.et
+
+// eu : http://en.wikipedia.org/wiki/.eu
+eu
+
+// fi : http://en.wikipedia.org/wiki/.fi
+fi
+// aland.fi : http://en.wikipedia.org/wiki/.ax
+// This domain is being phased out in favor of .ax. As there are still many
+// domains under aland.fi, we still keep it on the list until aland.fi is
+// completely removed.
+// TODO: Check for updates (expected to be phased out around Q1/2009)
+aland.fi
+
+// fj : http://en.wikipedia.org/wiki/.fj
+*.fj
+
+// fk : http://en.wikipedia.org/wiki/.fk
+*.fk
+
+// fm : http://en.wikipedia.org/wiki/.fm
+fm
+
+// fo : http://en.wikipedia.org/wiki/.fo
+fo
+
+// fr : http://www.afnic.fr/
+// domaines descriptifs : http://www.afnic.fr/obtenir/chartes/nommage-fr/annexe-descriptifs
+fr
+com.fr
+asso.fr
+nom.fr
+prd.fr
+presse.fr
+tm.fr
+// domaines sectoriels : http://www.afnic.fr/obtenir/chartes/nommage-fr/annexe-sectoriels
+aeroport.fr
+assedic.fr
+avocat.fr
+avoues.fr
+cci.fr
+chambagri.fr
+chirurgiens-dentistes.fr
+experts-comptables.fr
+geometre-expert.fr
+gouv.fr
+greta.fr
+huissier-justice.fr
+medecin.fr
+notaires.fr
+pharmacien.fr
+port.fr
+veterinaire.fr
+
+// ga : http://en.wikipedia.org/wiki/.ga
+ga
+
+// gb : This registry is effectively dormant
+// Submitted by registry <Da...@ja.net> 2008-06-12
+gb
+
+// gd : http://en.wikipedia.org/wiki/.gd
+gd
+
+// ge : http://www.nic.net.ge/policy_en.pdf
+ge
+com.ge
+edu.ge
+gov.ge
+org.ge
+mil.ge
+net.ge
+pvt.ge
+
+// gf : http://en.wikipedia.org/wiki/.gf
+gf
+
+// gg : http://www.channelisles.net/register-domains/
+// Confirmed by registry <ni...@channelisles.net> 2013-11-28
+gg
+co.gg
+net.gg
+org.gg
+
+// gh : http://en.wikipedia.org/wiki/.gh
+// see also: http://www.nic.gh/reg_now.php
+// Although domains directly at second level are not possible at the moment,
+// they have been possible for some time and may come back.
+gh
+com.gh
+edu.gh
+gov.gh
+org.gh
+mil.gh
+
+// gi : http://www.nic.gi/rules.html
+gi
+com.gi
+ltd.gi
+gov.gi
+mod.gi
+edu.gi
+org.gi
+
+// gl : http://en.wikipedia.org/wiki/.gl
+// http://nic.gl
+gl
+
+// gm : http://www.nic.gm/htmlpages%5Cgm-policy.htm
+gm
+
+// gn : http://psg.com/dns/gn/gn.txt
+// Submitted by registry <ra...@psg.com> 2008-06-17
+gn
+ac.gn
+com.gn
+edu.gn
+gov.gn
+org.gn
+net.gn
+
+// gov : http://en.wikipedia.org/wiki/.gov
+gov
+
+// gp : http://www.nic.gp/index.php?lang=en
+gp
+com.gp
+net.gp
+mobi.gp
+edu.gp
+org.gp
+asso.gp
+
+// gq : http://en.wikipedia.org/wiki/.gq
+gq
+
+// gr : https://grweb.ics.forth.gr/english/1617-B-2005.html
+// Submitted by registry <se...@ics.forth.gr> 2008-06-09
+gr
+com.gr
+edu.gr
+net.gr
+org.gr
+gov.gr
+
+// gs : http://en.wikipedia.org/wiki/.gs
+gs
+
+// gt : http://www.gt/politicas_de_registro.html
+gt
+com.gt
+edu.gt
+gob.gt
+ind.gt
+mil.gt
+net.gt
+org.gt
+
+// gu : http://gadao.gov.gu/registration.txt
+*.gu
+
+// gw : http://en.wikipedia.org/wiki/.gw
+gw
+
+// gy : http://en.wikipedia.org/wiki/.gy
+// http://registry.gy/
+gy
+co.gy
+com.gy
+net.gy
+
+// hk : https://www.hkdnr.hk
+// Submitted by registry <hk...@hkirc.hk> 2008-06-11
+hk
+com.hk
+edu.hk
+gov.hk
+idv.hk
+net.hk
+org.hk
+公司.hk
+教育.hk
+敎育.hk
+政府.hk
+個人.hk
+个人.hk
+箇人.hk
+網络.hk
+网络.hk
+组織.hk
+網絡.hk
+网絡.hk
+组织.hk
+組織.hk
+組织.hk
+
+// hm : http://en.wikipedia.org/wiki/.hm
+hm
+
+// hn : http://www.nic.hn/politicas/ps02,,05.html
+hn
+com.hn
+edu.hn
+org.hn
+net.hn
+mil.hn
+gob.hn
+
+// hr : http://www.dns.hr/documents/pdf/HRTLD-regulations.pdf
+hr
+iz.hr
+from.hr
+name.hr
+com.hr
+
+// ht : http://www.nic.ht/info/charte.cfm
+ht
+com.ht
+shop.ht
+firm.ht
+info.ht
+adult.ht
+net.ht
+pro.ht
+org.ht
+med.ht
+art.ht
+coop.ht
+pol.ht
+asso.ht
+edu.ht
+rel.ht
+gouv.ht
+perso.ht
+
+// hu : http://www.domain.hu/domain/English/sld.html
+// Confirmed by registry <pa...@iszt.hu> 2008-06-12
+hu
+co.hu
+info.hu
+org.hu
+priv.hu
+sport.hu
+tm.hu
+2000.hu
+agrar.hu
+bolt.hu
+casino.hu
+city.hu
+erotica.hu
+erotika.hu
+film.hu
+forum.hu
+games.hu
+hotel.hu
+ingatlan.hu
+jogasz.hu
+konyvelo.hu
+lakas.hu
+media.hu
+news.hu
+reklam.hu
+sex.hu
+shop.hu
+suli.hu
+szex.hu
+tozsde.hu
+utazas.hu
+video.hu
+
+// id : https://register.pandi.or.id/
+id
+ac.id
+biz.id
+co.id
+desa.id
+go.id
+mil.id
+my.id
+net.id
+or.id
+sch.id
+web.id
+
+// ie : http://en.wikipedia.org/wiki/.ie
+ie
+gov.ie
+
+// il : http://en.wikipedia.org/wiki/.il
+*.il
+
+// im : https://www.nic.im/
+// Submitted by registry <in...@nic.im> 2013-11-15
+im
+ac.im
+co.im
+com.im
+ltd.co.im
+net.im
+org.im
+plc.co.im
+tt.im
+tv.im
+
+// in : http://en.wikipedia.org/wiki/.in
+// see also: https://registry.in/Policies
+// Please note, that nic.in is not an offical eTLD, but used by most
+// government institutions.
+in
+co.in
+firm.in
+net.in
+org.in
+gen.in
+ind.in
+nic.in
+ac.in
+edu.in
+res.in
+gov.in
+mil.in
+
+// info : http://en.wikipedia.org/wiki/.info
+info
+
+// int : http://en.wikipedia.org/wiki/.int
+// Confirmed by registry <ia...@icann.org> 2008-06-18
+int
+eu.int
+
+// io : http://www.nic.io/rules.html
+// list of other 2nd level tlds ?
+io
+com.io
+
+// iq : http://www.cmc.iq/english/iq/iqregister1.htm
+iq
+gov.iq
+edu.iq
+mil.iq
+com.iq
+org.iq
+net.iq
+
+// ir : http://www.nic.ir/Terms_and_Conditions_ir,_Appendix_1_Domain_Rules
+// Also see http://www.nic.ir/Internationalized_Domain_Names
+// Two <iran>.ir entries added at request of <te...@nic.ir>, 2010-04-16
+ir
+ac.ir
+co.ir
+gov.ir
+id.ir
+net.ir
+org.ir
+sch.ir
+// xn--mgba3a4f16a.ir (<iran>.ir, Persian YEH)
+ایران.ir
+// xn--mgba3a4fra.ir (<iran>.ir, Arabic YEH)
+ايران.ir
+
+// is : http://www.isnic.is/domain/rules.php
+// Confirmed by registry <ma...@isgate.is> 2008-12-06
+is
+net.is
+com.is
+edu.is
+gov.is
+org.is
+int.is
+
+// it : http://en.wikipedia.org/wiki/.it
+it
+gov.it
+edu.it
+// Reserved geo-names:
+// http://www.nic.it/documenti/regolamenti-e-linee-guida/regolamento-assegnazione-versione-6.0.pdf
+// There is also a list of reserved geo-names corresponding to Italian municipalities
+// http://www.nic.it/documenti/appendice-c.pdf, but it is not included here.
+// Regions
+abr.it
+abruzzo.it
+aosta-valley.it
+aostavalley.it
+bas.it
+basilicata.it
+cal.it
+calabria.it
+cam.it
+campania.it
+emilia-romagna.it
+emiliaromagna.it
+emr.it
+friuli-v-giulia.it
+friuli-ve-giulia.it
+friuli-vegiulia.it
+friuli-venezia-giulia.it
+friuli-veneziagiulia.it
+friuli-vgiulia.it
+friuliv-giulia.it
+friulive-giulia.it
+friulivegiulia.it
+friulivenezia-giulia.it
+friuliveneziagiulia.it
+friulivgiulia.it
+fvg.it
+laz.it
+lazio.it
+lig.it
+liguria.it
+lom.it
+lombardia.it
+lombardy.it
+lucania.it
+mar.it
+marche.it
+mol.it
+molise.it
+piedmont.it
+piemonte.it
+pmn.it
+pug.it
+puglia.it
+sar.it
+sardegna.it
+sardinia.it
+sic.it
+sicilia.it
+sicily.it
+taa.it
+tos.it
+toscana.it
+trentino-a-adige.it
+trentino-aadige.it
+trentino-alto-adige.it
+trentino-altoadige.it
+trentino-s-tirol.it
+trentino-stirol.it
+trentino-sud-tirol.it
+trentino-sudtirol.it
+trentino-sued-tirol.it
+trentino-suedtirol.it
+trentinoa-adige.it
+trentinoaadige.it
+trentinoalto-adige.it
+trentinoaltoadige.it
+trentinos-tirol.it
+trentinostirol.it
+trentinosud-tirol.it
+trentinosudtirol.it
+trentinosued-tirol.it
+trentinosuedtirol.it
+tuscany.it
+umb.it
+umbria.it
+val-d-aosta.it
+val-daosta.it
+vald-aosta.it
+valdaosta.it
+valle-aosta.it
+valle-d-aosta.it
+valle-daosta.it
+valleaosta.it
+valled-aosta.it
+valledaosta.it
+vallee-aoste.it
+valleeaoste.it
+vao.it
+vda.it
+ven.it
+veneto.it
+// Provinces
+ag.it
+agrigento.it
+al.it
+alessandria.it
+alto-adige.it
+altoadige.it
+an.it
+ancona.it
+andria-barletta-trani.it
+andria-trani-barletta.it
+andriabarlettatrani.it
+andriatranibarletta.it
+ao.it
+aosta.it
+aoste.it
+ap.it
+aq.it
+aquila.it
+ar.it
+arezzo.it
+ascoli-piceno.it
+ascolipiceno.it
+asti.it
+at.it
+av.it
+avellino.it
+ba.it
+balsan.it
+bari.it
+barletta-trani-andria.it
+barlettatraniandria.it
+belluno.it
+benevento.it
+bergamo.it
+bg.it
+bi.it
+biella.it
+bl.it
+bn.it
+bo.it
+bologna.it
+bolzano.it
+bozen.it
+br.it
+brescia.it
+brindisi.it
+bs.it
+bt.it
+bz.it
+ca.it
+cagliari.it
+caltanissetta.it
+campidano-medio.it
+campidanomedio.it
+campobasso.it
+carbonia-iglesias.it
+carboniaiglesias.it
+carrara-massa.it
+carraramassa.it
+caserta.it
+catania.it
+catanzaro.it
+cb.it
+ce.it
+cesena-forli.it
+cesenaforli.it
+ch.it
+chieti.it
+ci.it
+cl.it
+cn.it
+co.it
+como.it
+cosenza.it
+cr.it
+cremona.it
+crotone.it
+cs.it
+ct.it
+cuneo.it
+cz.it
+dell-ogliastra.it
+dellogliastra.it
+en.it
+enna.it
+fc.it
+fe.it
+fermo.it
+ferrara.it
+fg.it
+fi.it
+firenze.it
+florence.it
+fm.it
+foggia.it
+forli-cesena.it
+forlicesena.it
+fr.it
+frosinone.it
+ge.it
+genoa.it
+genova.it
+go.it
+gorizia.it
+gr.it
+grosseto.it
+iglesias-carbonia.it
+iglesiascarbonia.it
+im.it
+imperia.it
+is.it
+isernia.it
+kr.it
+la-spezia.it
+laquila.it
+laspezia.it
+latina.it
+lc.it
+le.it
+lecce.it
+lecco.it
+li.it
+livorno.it
+lo.it
+lodi.it
+lt.it
+lu.it
+lucca.it
+macerata.it
+mantova.it
+massa-carrara.it
+massacarrara.it
+matera.it
+mb.it
+mc.it
+me.it
+medio-campidano.it
+mediocampidano.it
+messina.it
+mi.it
+milan.it
+milano.it
+mn.it
+mo.it
+modena.it
+monza-brianza.it
+monza-e-della-brianza.it
+monza.it
+monzabrianza.it
+monzaebrianza.it
+monzaedellabrianza.it
+ms.it
+mt.it
+na.it
+naples.it
+napoli.it
+no.it
+novara.it
+nu.it
+nuoro.it
+og.it
+ogliastra.it
+olbia-tempio.it
+olbiatempio.it
+or.it
+oristano.it
+ot.it
+pa.it
+padova.it
+padua.it
+palermo.it
+parma.it
+pavia.it
+pc.it
+pd.it
+pe.it
+perugia.it
+pesaro-urbino.it
+pesarourbino.it
+pescara.it
+pg.it
+pi.it
+piacenza.it
+pisa.it
+pistoia.it
+pn.it
+po.it
+pordenone.it
+potenza.it
+pr.it
+prato.it
+pt.it
+pu.it
+pv.it
+pz.it
+ra.it
+ragusa.it
+ravenna.it
+rc.it
+re.it
+reggio-calabria.it
+reggio-emilia.it
+reggiocalabria.it
+reggioemilia.it
+rg.it
+ri.it
+rieti.it
+rimini.it
+rm.it
+rn.it
+ro.it
+roma.it
+rome.it
+rovigo.it
+sa.it
+salerno.it
+sassari.it
+savona.it
+si.it
+siena.it
+siracusa.it
+so.it
+sondrio.it
+sp.it
+sr.it
+ss.it
+suedtirol.it
+sv.it
+ta.it
+taranto.it
+te.it
+tempio-olbia.it
+tempioolbia.it
+teramo.it
+terni.it
+tn.it
+to.it
+torino.it
+tp.it
+tr.it
+trani-andria-barletta.it
+trani-barletta-andria.it
+traniandriabarletta.it
+tranibarlettaandria.it
+trapani.it
+trentino.it
+trento.it
+treviso.it
+trieste.it
+ts.it
+turin.it
+tv.it
+ud.it
+udine.it
+urbino-pesaro.it
+urbinopesaro.it
+va.it
+varese.it
+vb.it
+vc.it
+ve.it
+venezia.it
+venice.it
+verbania.it
+vercelli.it
+verona.it
+vi.it
+vibo-valentia.it
+vibovalentia.it
+vicenza.it
+viterbo.it
+vr.it
+vs.it
+vt.it
+vv.it
+
+// je : http://www.channelisles.net/register-domains/
+// Confirmed by registry <ni...@channelisles.net> 2013-11-28
+je
+co.je
+net.je
+org.je
+
+// jm : http://www.com.jm/register.html
+*.jm
+
+// jo : http://www.dns.jo/Registration_policy.aspx
+jo
+com.jo
+org.jo
+net.jo
+edu.jo
+sch.jo
+gov.jo
+mil.jo
+name.jo
+
+// jobs : http://en.wikipedia.org/wiki/.jobs
+jobs
+
+// jp : http://en.wikipedia.org/wiki/.jp
+// http://jprs.co.jp/en/jpdomain.html
+// Submitted by registry <in...@jprs.jp> 2014-10-30
+jp
+// jp organizational type names
+ac.jp
+ad.jp
+co.jp
+ed.jp
+go.jp
+gr.jp
+lg.jp
+ne.jp
+or.jp
+// jp prefecture type names
+aichi.jp
+akita.jp
+aomori.jp
+chiba.jp
+ehime.jp
+fukui.jp
+fukuoka.jp
+fukushima.jp
+gifu.jp
+gunma.jp
+hiroshima.jp
+hokkaido.jp
+hyogo.jp
+ibaraki.jp
+ishikawa.jp
+iwate.jp
+kagawa.jp
+kagoshima.jp
+kanagawa.jp
+kochi.jp
+kumamoto.jp
+kyoto.jp
+mie.jp
+miyagi.jp
+miyazaki.jp
+nagano.jp
+nagasaki.jp
+nara.jp
+niigata.jp
+oita.jp
+okayama.jp
+okinawa.jp
+osaka.jp
+saga.jp
+saitama.jp
+shiga.jp
+shimane.jp
+shizuoka.jp
+tochigi.jp
+tokushima.jp
+tokyo.jp
+tottori.jp
+toyama.jp
+wakayama.jp
+yamagata.jp
+yamaguchi.jp
+yamanashi.jp
+栃木.jp
+愛知.jp
+愛媛.jp
+兵庫.jp
+熊本.jp
+茨城.jp
+北海道.jp
+千葉.jp
+和歌山.jp
+長崎.jp
+長野.jp
+新潟.jp
+青森.jp
+静岡.jp
+東京.jp
+石川.jp
+埼玉.jp
+三重.jp
+京都.jp
+佐賀.jp
+大分.jp
+大阪.jp
+奈良.jp
+宮城.jp
+宮崎.jp
+富山.jp
+山口.jp
+山形.jp
+山梨.jp
+岩手.jp
+岐阜.jp
+岡山.jp
+島根.jp
+広島.jp
+徳島.jp
+沖縄.jp
+滋賀.jp
+神奈川.jp
+福井.jp
+福岡.jp
+福島.jp
+秋田.jp
+群馬.jp
+香川.jp
+高知.jp
+鳥取.jp
+鹿児島.jp
+// jp geographic type names
+// http://jprs.jp/doc/rule/saisoku-1.html
+*.kawasaki.jp
+*.kitakyushu.jp
+*.kobe.jp
+*.nagoya.jp
+*.sapporo.jp
+*.sendai.jp
+*.yokohama.jp
+!city.kawasaki.jp
+!city.kitakyushu.jp
+!city.kobe.jp
+!city.nagoya.jp
+!city.sapporo.jp
+!city.sendai.jp
+!city.yokohama.jp
+// 4th level registration
+aisai.aichi.jp
+ama.aichi.jp
+anjo.aichi.jp
+asuke.aichi.jp
+chiryu.aichi.jp
+chita.aichi.jp
+fuso.aichi.jp
+gamagori.aichi.jp
+handa.aichi.jp
+hazu.aichi.jp
+hekinan.aichi.jp
+higashiura.aichi.jp
+ichinomiya.aichi.jp
+inazawa.aichi.jp
+inuyama.aichi.jp
+isshiki.aichi.jp
+iwakura.aichi.jp
+kanie.aichi.jp
+kariya.aichi.jp
+kasugai.aichi.jp
+kira.aichi.jp
+kiyosu.aichi.jp
+komaki.aichi.jp
+konan.aichi.jp
+kota.aichi.jp
+mihama.aichi.jp
+miyoshi.aichi.jp
+nishio.aichi.jp
+nisshin.aichi.jp
+obu.aichi.jp
+oguchi.aichi.jp
+oharu.aichi.jp
+okazaki.aichi.jp
+owariasahi.aichi.jp
+seto.aichi.jp
+shikatsu.aichi.jp
+shinshiro.aichi.jp
+shitara.aichi.jp
+tahara.aichi.jp
+takahama.aichi.jp
+tobishima.aichi.jp
+toei.aichi.jp
+togo.aichi.jp
+tokai.aichi.jp
+tokoname.aichi.jp
+toyoake.aichi.jp
+toyohashi.aichi.jp
+toyokawa.aichi.jp
+toyone.aichi.jp
+toyota.aichi.jp
+tsushima.aichi.jp
+yatomi.aichi.jp
+akita.akita.jp
+daisen.akita.jp
+fujisato.akita.jp
+gojome.akita.jp
+hachirogata.akita.jp
+happou.akita.jp
+higashinaruse.akita.jp
+honjo.akita.jp
+honjyo.akita.jp
+ikawa.akita.jp
+kamikoani.akita.jp
+kamioka.akita.jp
+katagami.akita.jp
+kazuno.akita.jp
+kitaakita.akita.jp
+kosaka.akita.jp
+kyowa.akita.jp
+misato.akita.jp
+mitane.akita.jp
+moriyoshi.akita.jp
+nikaho.akita.jp
+noshiro.akita.jp
+odate.akita.jp
+oga.akita.jp
+ogata.akita.jp
+semboku.akita.jp
+yokote.akita.jp
+yurihonjo.akita.jp
+aomori.aomori.jp
+gonohe.aomori.jp
+hachinohe.aomori.jp
+hashikami.aomori.jp
+hiranai.aomori.jp
+hirosaki.aomori.jp
+itayanagi.aomori.jp
+kuroishi.aomori.jp
+misawa.aomori.jp
+mutsu.aomori.jp
+nakadomari.aomori.jp
+noheji.aomori.jp
+oirase.aomori.jp
+owani.aomori.jp
+rokunohe.aomori.jp
+sannohe.aomori.jp
+shichinohe.aomori.jp
+shingo.aomori.jp
+takko.aomori.jp
+towada.aomori.jp
+tsugaru.aomori.jp
+tsuruta.aomori.jp
+abiko.chiba.jp
+asahi.chiba.jp
+chonan.chiba.jp
+chosei.chiba.jp
+choshi.chiba.jp
+chuo.chiba.jp
+funabashi.chiba.jp
+futtsu.chiba.jp
+hanamigawa.chiba.jp
+ichihara.chiba.jp
+ichikawa.chiba.jp
+ichinomiya.chiba.jp
+inzai.chiba.jp
+isumi.chiba.jp
+kamagaya.chiba.jp
+kamogawa.chiba.jp
+kashiwa.chiba.jp
+katori.chiba.jp
+katsuura.chiba.jp
+kimitsu.chiba.jp
+kisarazu.chiba.jp
+kozaki.chiba.jp
+kujukuri.chiba.jp
+kyonan.chiba.jp
+matsudo.chiba.jp
+midori.chiba.jp
+mihama.chiba.jp
+minamiboso.chiba.jp
+mobara.chiba.jp
+mutsuzawa.chiba.jp
+nagara.chiba.jp
+nagareyama.chiba.jp
+narashino.chiba.jp
+narita.chiba.jp
+noda.chiba.jp
+oamishirasato.chiba.jp
+omigawa.chiba.jp
+onjuku.chiba.jp
+otaki.chiba.jp
+sakae.chiba.jp
+sakura.chiba.jp
+shimofusa.chiba.jp
+shirako.chiba.jp
+shiroi.chiba.jp
+shisui.chiba.jp
+sodegaura.chiba.jp
+sosa.chiba.jp
+tako.chiba.jp
+tateyama.chiba.jp
+togane.chiba.jp
+tohnosho.chiba.jp
+tomisato.chiba.jp
+urayasu.chiba.jp
+yachimata.chiba.jp
+yachiyo.chiba.jp
+yokaichiba.chiba.jp
+yokoshibahikari.chiba.jp
+yotsukaido.chiba.jp
+ainan.ehime.jp
+honai.ehime.jp
+ikata.ehime.jp
+imabari.ehime.jp
+iyo.ehime.jp
+kamijima.ehime.jp
+kihoku.ehime.jp
+kumakogen.ehime.jp
+masaki.ehime.jp
+matsuno.ehime.jp
+matsuyama.ehime.jp
+namikata.ehime.jp
+niihama.ehime.jp
+ozu.ehime.jp
+saijo.ehime.jp
+seiyo.ehime.jp
+shikokuchuo.ehime.jp
+tobe.ehime.jp
+toon.ehime.jp
+uchiko.ehime.jp
+uwajima.ehime.jp
+yawatahama.ehime.jp
+echizen.fukui.jp
+eiheiji.fukui.jp
+fukui.fukui.jp
+ikeda.fukui.jp
+katsuyama.fukui.jp
+mihama.fukui.jp
+minamiechizen.fukui.jp
+obama.fukui.jp
+ohi.fukui.jp
+ono.fukui.jp
+sabae.fukui.jp
+sakai.fukui.jp
+takahama.fukui.jp
+tsuruga.fukui.jp
+wakasa.fukui.jp
+ashiya.fukuoka.jp
+buzen.fukuoka.jp
+chikugo.fukuoka.jp
+chikuho.fukuoka.jp
+chikujo.fukuoka.jp
+chikushino.fukuoka.jp
+chikuzen.fukuoka.jp
+chuo.fukuoka.jp
+dazaifu.fukuoka.jp
+fukuchi.fukuoka.jp
+hakata.fukuoka.jp
+higashi.fukuoka.jp
+hirokawa.fukuoka.jp
+hisayama.fukuoka.jp
+iizuka.fukuoka.jp
+inatsuki.fukuoka.jp
+kaho.fukuoka.jp
+kasuga.fukuoka.jp
+kasuya.fukuoka.jp
+kawara.fukuoka.jp
+keisen.fukuoka.jp
+koga.fukuoka.jp
+kurate.fukuoka.jp
+kurogi.fukuoka.jp
+kurume.fukuoka.jp
+minami.fukuoka.jp
+miyako.fukuoka.jp
+miyama.fukuoka.jp
+miyawaka.fukuoka.jp
+mizumaki.fukuoka.jp
+munakata.fukuoka.jp
+nakagawa.fukuoka.jp
+nakama.fukuoka.jp
+nishi.fukuoka.jp
+nogata.fukuoka.jp
+ogori.fukuoka.jp
+okagaki.fukuoka.jp
+okawa.fukuoka.jp
+oki.fukuoka.jp
+omuta.fukuoka.jp
+onga.fukuoka.jp
+onojo.fukuoka.jp
+oto.fukuoka.jp
+saigawa.fukuoka.jp
+sasaguri.fukuoka.jp
+shingu.fukuoka.jp
+shinyoshitomi.fukuoka.jp
+shonai.fukuoka.jp
+soeda.fukuoka.jp
+sue.fukuoka.jp
+tachiarai.fukuoka.jp
+tagawa.fukuoka.jp
+takata.fukuoka.jp
+toho.fukuoka.jp
+toyotsu.fukuoka.jp
+tsuiki.fukuoka.jp
+ukiha.fukuoka.jp
+umi.fukuoka.jp
+usui.fukuoka.jp
+yamada.fukuoka.jp
+yame.fukuoka.jp
+yanagawa.fukuoka.jp
+yukuhashi.fukuoka.jp
+aizubange.fukushima.jp
+aizumisato.fukushima.jp
+aizuwakamatsu.fukushima.jp
+asakawa.fukushima.jp
+bandai.fukushima.jp
+date.fukushima.jp
+fukushima.fukushima.jp
+furudono.fukushima.jp
+futaba.fukushima.jp
+hanawa.fukushima.jp
+higashi.fukushima.jp
+hirata.fukushima.jp
+hirono.fukushima.jp
+iitate.fukushima.jp
+inawashiro.fukushima.jp
+ishikawa.fukushima.jp
+iwaki.fukushima.jp
+izumizaki.fukushima.jp
+kagamiishi.fukushima.jp
+kaneyama.fukushima.jp
+kawamata.fukushima.jp
+kitakata.fukushima.jp
+kitashiobara.fukushima.jp
+koori.fukushima.jp
+koriyama.fukushima.jp
+kunimi.fukushima.jp
+miharu.fukushima.jp
+mishima.fukushima.jp
+namie.fukushima.jp
+nango.fukushima.jp
+nishiaizu.fukushima.jp
+nishigo.fukushima.jp
+okuma.fukushima.jp
+omotego.fukushima.jp
+ono.fukushima.jp
+otama.fukushima.jp
+samegawa.fukushima.jp
+shimogo.fukushima.jp
+shirakawa.fukushima.jp
+showa.fukushima.jp
+soma.fukushima.jp
+sukagawa.fukushima.jp
+taishin.fukushima.jp
+tamakawa.fukushima.jp
+tanagura.fukushima.jp
+tenei.fukushima.jp
+yabuki.fukushima.jp
+yamato.fukushima.jp
+yamatsuri.fukushima.jp
+yanaizu.fukushima.jp
+yugawa.fukushima.jp
+anpachi.gifu.jp
+ena.gifu.jp
+gifu.gifu.jp
+ginan.gifu.jp
+godo.gifu.jp
+gujo.gifu.jp
+hashima.gifu.jp
+hichiso.gifu.jp
+hida.gifu.jp
+higashishirakawa.gifu.jp
+ibigawa.gifu.jp
+ikeda.gifu.jp
+kakamigahara.gifu.jp
+kani.gifu.jp
+kasahara.gifu.jp
+kasamatsu.gifu.jp
+kawaue.gifu.jp
+kitagata.gifu.jp
+mino.gifu.jp
+minokamo.gifu.jp
+mitake.gifu.jp
+mizunami.gifu.jp
+motosu.gifu.jp
+nakatsugawa.gifu.jp
+ogaki.gifu.jp
+sakahogi.gifu.jp
+seki.gifu.jp
+sekigahara.gifu.jp
+shirakawa.gifu.jp
+tajimi.gifu.jp
+takayama.gifu.jp
+tarui.gifu.jp
+toki.gifu.jp
+tomika.gifu.jp
+wanouchi.gifu.jp
+yamagata.gifu.jp
+yaotsu.gifu.jp
+yoro.gifu.jp
+annaka.gunma.jp
+chiyoda.gunma.jp
+fujioka.gunma.jp
+higashiagatsuma.gunma.jp
+isesaki.gunma.jp
+itakura.gunma.jp
+kanna.gunma.jp
+kanra.gunma.jp
+katashina.gunma.jp
+kawaba.gunma.jp
+kiryu.gunma.jp
+kusatsu.gunma.jp
+maebashi.gunma.jp
+meiwa.gunma.jp
+midori.gunma.jp
+minakami.gunma.jp
+naganohara.gunma.jp
+nakanojo.gunma.jp
+nanmoku.gunma.jp
+numata.gunma.jp
+oizumi.gunma.jp
+ora.gunma.jp
+ota.gunma.jp
+shibukawa.gunma.jp
+shimonita.gunma.jp
+shinto.gunma.jp
+showa.gunma.jp
+takasaki.gunma.jp
+takayama.gunma.jp
+tamamura.gunma.jp
+tatebayashi.gunma.jp
+tomioka.gunma.jp
+tsukiyono.gunma.jp
+tsumagoi.gunma.jp
+ueno.gunma.jp
+yoshioka.gunma.jp
+asaminami.hiroshima.jp
+daiwa.hiroshima.jp
+etajima.hiroshima.jp
+fuchu.hiroshima.jp
+fukuyama.hiroshima.jp
+hatsukaichi.hiroshima.jp
+higashihiroshima.hiroshima.jp
+hongo.hiroshima.jp
+jinsekikogen.hiroshima.jp
+kaita.hiroshima.jp
+kui.hiroshima.jp
+kumano.hiroshima.jp
+kure.hiroshima.jp
+mihara.hiroshima.jp
+miyoshi.hiroshima.jp
+naka.hiroshima.jp
+onomichi.hiroshima.jp
+osakikamijima.hiroshima.jp
+otake.hiroshima.jp
+saka.hiroshima.jp
+sera.hiroshima.jp
+seranishi.hiroshima.jp
+shinichi.hiroshima.jp
+shobara.hiroshima.jp
+takehara.hiroshima.jp
+abashiri.hokkaido.jp
+abira.hokkaido.jp
+aibetsu.hokkaido.jp
+akabira.hokkaido.jp
+akkeshi.hokkaido.jp
+asahikawa.hokkaido.jp
+ashibetsu.hokkaido.jp
+ashoro.hokkaido.jp
+assabu.hokkaido.jp
+atsuma.hokkaido.jp
+bibai.hokkaido.jp
+biei.hokkaido.jp
+bifuka.hokkaido.jp
+bihoro.hokkaido.jp
+biratori.hokkaido.jp
+chippubetsu.hokkaido.jp
+chitose.hokkaido.jp
+date.hokkaido.jp
+ebetsu.hokkaido.jp
+embetsu.hokkaido.jp
+eniwa.hokkaido.jp
+erimo.hokkaido.jp
+esan.hokkaido.jp
+esashi.hokkaido.jp
+fukagawa.hokkaido.jp
+fukushima.hokkaido.jp
+furano.hokkaido.jp
+furubira.hokkaido.jp
+haboro.hokkaido.jp
+hakodate.hokkaido.jp
+hamatonbetsu.hokkaido.jp
+hidaka.hokkaido.jp
+higashikagura.hokkaido.jp
+higashikawa.hokkaido.jp
+hiroo.hokkaido.jp
+hokuryu.hokkaido.jp
+hokuto.hokkaido.jp
+honbetsu.hokkaido.jp
+horokanai.hokkaido.jp
+horonobe.hokkaido.jp
+ikeda.hokkaido.jp
+imakane.hokkaido.jp
+ishikari.hokkaido.jp
+iwamizawa.hokkaido.jp
+iwanai.hokkaido.jp
+kamifurano.hokkaido.jp
+kamikawa.hokkaido.jp
+kamishihoro.hokkaido.jp
+kamisunagawa.hokkaido.jp
+kamoenai.hokkaido.jp
+kayabe.hokkaido.jp
+kembuchi.hokkaido.jp
+kikonai.hokkaido.jp
+kimobetsu.hokkaido.jp
+kitahiroshima.hokkaido.jp
+kitami.hokkaido.jp
+kiyosato.hokkaido.jp
+koshimizu.hokkaido.jp
+kunneppu.hokkaido.jp
+kuriyama.hokkaido.jp
+kuromatsunai.hokkaido.jp
+kushiro.hokkaido.jp
+kutchan.hokkaido.jp
+kyowa.hokkaido.jp
+mashike.hokkaido.jp
+matsumae.hokkaido.jp
+mikasa.hokkaido.jp
+minamifurano.hokkaido.jp
+mombetsu.hokkaido.jp
+moseushi.hokkaido.jp
+mukawa.hokkaido.jp
+muroran.hokkaido.jp
+naie.hokkaido.jp
+nakagawa.hokkaido.jp
+nakasatsunai.hokkaido.jp
+nakatombetsu.hokkaido.jp
+nanae.hokkaido.jp
+nanporo.hokkaido.jp
+nayoro.hokkaido.jp
+nemuro.hokkaido.jp
+niikappu.hokkaido.jp
+niki.hokkaido.jp
+nishiokoppe.hokkaido.jp
+noboribetsu.hokkaido.jp
+numata.hokkaido.jp
+obihiro.hokkaido.jp
+obira.hokkaido.jp
+oketo.hokkaido.jp
+okoppe.hokkaido.jp
+otaru.hokkaido.jp
+otobe.hokkaido.jp
+otofuke.hokkaido.jp
+otoineppu.hokkaido.jp
+oumu.hokkaido.jp
+ozora.hokkaido.jp
+pippu.hokkaido.jp
+rankoshi.hokkaido.jp
+rebun.hokkaido.jp
+rikubetsu.hokkaido.jp
+rishiri.hokkaido.jp
+rishirifuji.hokkaido.jp
+saroma.hokkaido.jp
+sarufutsu.hokkaido.jp
+shakotan.hokkaido.jp
+shari.hokkaido.jp
+shibecha.hokkaido.jp
+shibetsu.hokkaido.jp
+shikabe.hokkaido.jp
+shikaoi.hokkaido.jp
+shimamaki.hokkaido.jp
+shimizu.hokkaido.jp
+shimokawa.hokkaido.jp
+shinshinotsu.hokkaido.jp
+shintoku.hokkaido.jp
+shiranuka.hokkaido.jp
+shiraoi.hokkaido.jp
+shiriuchi.hokkaido.jp
+sobetsu.hokkaido.jp
+sunagawa.hokkaido.jp
+taiki.hokkaido.jp
+takasu.hokkaido.jp
+takikawa.hokkaido.jp
+takinoue.hokkaido.jp
+teshikaga.hokkaido.jp
+tobetsu.hokkaido.jp
+tohma.hokkaido.jp
+tomakomai.hokkaido.jp
+tomari.hokkaido.jp
+toya.hokkaido.jp
+toyako.hokkaido.jp
+toyotomi.hokkaido.jp
+toyoura.hokkaido.jp
+tsubetsu.hokkaido.jp
+tsukigata.hokkaido.jp
+urakawa.hokkaido.jp
+urausu.hokkaido.jp
+uryu.hokkaido.jp
+utashinai.hokkaido.jp
+wakkanai.hokkaido.jp
+wassamu.hokkaido.jp
+yakumo.hokkaido.jp
+yoichi.hokkaido.jp
+aioi.hyogo.jp
+akashi.hyogo.jp
+ako.hyogo.jp
+amagasaki.hyogo.jp
+aogaki.hyogo.jp
+asago.hyogo.jp
+ashiya.hyogo.jp
+awaji.hyogo.jp
+fukusaki.hyogo.jp
+goshiki.hyogo.jp
+harima.hyogo.jp
+himeji.hyogo.jp
+ichikawa.hyogo.jp
+inagawa.hyogo.jp
+itami.hyogo.jp
+kakogawa.hyogo.jp
+kamigori.hyogo.jp
+kamikawa.hyogo.jp
+kasai.hyogo.jp
+kasuga.hyogo.jp
+kawanishi.hyogo.jp
+miki.hyogo.jp
+minamiawaji.hyogo.jp
+nishinomiya.hyogo.jp
+nishiwaki.hyogo.jp
+ono.hyogo.jp
+sanda.hyogo.jp
+sannan.hyogo.jp
+sasayama.hyogo.jp
+sayo.hyogo.jp
+shingu.hyogo.jp
+shinonsen.hyogo.jp
+shiso.hyogo.jp
+sumoto.hyogo.jp
+taishi.hyogo.jp
+taka.hyogo.jp
+takarazuka.hyogo.jp
+takasago.hyogo.jp
+takino.hyogo.jp
+tamba.hyogo.jp
+tatsuno.hyogo.jp
+toyooka.hyogo.jp
+yabu.hyogo.jp
+yashiro.hyogo.jp
+yoka.hyogo.jp
+yokawa.hyogo.jp
+ami.ibaraki.jp
+asahi.ibaraki.jp
+bando.ibaraki.jp
+chikusei.ibaraki.jp
+daigo.ibaraki.jp
+fujishiro.ibaraki.jp
+hitachi.ibaraki.jp
+hitachinaka.ibaraki.jp
+hitachiomiya.ibaraki.jp
+hitachiota.ibaraki.jp
+ibaraki.ibaraki.jp
+ina.ibaraki.jp
+inashiki.ibaraki.jp
+itako.ibaraki.jp
+iwama.ibaraki.jp
+joso.ibaraki.jp
+kamisu.ibaraki.jp
+kasama.ibaraki.jp
+kashima.ibaraki.jp
+kasumigaura.ibaraki.jp
+koga.ibaraki.jp
+miho.ibaraki.jp
+mito.ibaraki.jp
+moriya.ibaraki.jp
+naka.ibaraki.jp
+namegata.ibaraki.jp
+oarai.ibaraki.jp
+ogawa.ibaraki.jp
+omitama.ibaraki.jp
+ryugasaki.ibaraki.jp
+sakai.ibaraki.jp
+sakuragawa.ibaraki.jp
+shimodate.ibaraki.jp
+shimotsuma.ibaraki.jp
+shirosato.ibaraki.jp
+sowa.ibaraki.jp
+suifu.ibaraki.jp
+takahagi.ibaraki.jp
+tamatsukuri.ibaraki.jp
+tokai.ibaraki.jp
+tomobe.ibaraki.jp
+tone.ibaraki.jp
+toride.ibaraki.jp
+tsuchiura.ibaraki.jp
+tsukuba.ibaraki.jp
+uchihara.ibaraki.jp
+ushiku.ibaraki.jp
+yachiyo.ibaraki.jp
+yamagata.ibaraki.jp
+yawara.ibaraki.jp
+yuki.ibaraki.jp
+anamizu.ishikawa.jp
+hakui.ishikawa.jp
+hakusan.ishikawa.jp
+kaga.ishikawa.jp
+kahoku.ishikawa.jp
+kanazawa.ishikawa.jp
+kawakita.ishikawa.jp
+komatsu.ishikawa.jp
+nakanoto.ishikawa.jp
+nanao.ishikawa.jp
+nomi.ishikawa.jp
+nonoichi.ishikawa.jp
+noto.ishikawa.jp
+shika.ishikawa.jp
+suzu.ishikawa.jp
+tsubata.ishikawa.jp
+tsurugi.ishikawa.jp
+uchinada.ishikawa.jp
+wajima.ishikawa.jp
+fudai.iwate.jp
+fujisawa.iwate.jp
+hanamaki.iwate.jp
+hiraizumi.iwate.jp
+hirono.iwate.jp
+ichinohe.iwate.jp
+ichinoseki.iwate.jp
+iwaizumi.iwate.jp
+iwate.iwate.jp
+joboji.iwate.jp
+kamaishi.iwate.jp
+kanegasaki.iwate.jp
+karumai.iwate.jp
+kawai.iwate.jp
+kitakami.iwate.jp
+kuji.iwate.jp
+kunohe.iwate.jp
+kuzumaki.iwate.jp
+miyako.iwate.jp
+mizusawa.iwate.jp
+morioka.iwate.jp
+ninohe.iwate.jp
+noda.iwate.jp
+ofunato.iwate.jp
+oshu.iwate.jp
+otsuchi.iwate.jp
+rikuzentakata.iwate.jp
+shiwa.iwate.jp
+shizukuishi.iwate.jp
+sumita.iwate.jp
+tanohata.iwate.jp
+tono.iwate.jp
+yahaba.iwate.jp
+yamada.iwate.jp
+ayagawa.kagawa.jp
+higashikagawa.kagawa.jp
+kanonji.kagawa.jp
+kotohira.kagawa.jp
+manno.kagawa.jp
+marugame.kagawa.jp
+mitoyo.kagawa.jp
+naoshima.kagawa.jp
+sanuki.kagawa.jp
+tadotsu.kagawa.jp
+takamatsu.kagawa.jp
+tonosho.kagawa.jp
+uchinomi.kagawa.jp
+utazu.kagawa.jp
+zentsuji.kagawa.jp
+akune.kagoshima.jp
+amami.kagoshima.jp
+hioki.kagoshima.jp
+isa.kagoshima.jp
+isen.kagoshima.jp
+izumi.kagoshima.jp
+kagoshima.kagoshima.jp
+kanoya.kagoshima.jp
+kawanabe.kagoshima.jp
+kinko.kagoshima.jp
+kouyama.kagoshima.jp
+makurazaki.kagoshima.jp
+matsumoto.kagoshima.jp
+minamitane.kagoshima.jp
+nakatane.kagoshima.jp
+nishinoomote.kagoshima.jp
+satsumasendai.kagoshima.jp
+soo.kagoshima.jp
+tarumizu.kagoshima.jp
+yusui.kagoshima.jp
+aikawa.kanagawa.jp
+atsugi.kanagawa.jp
+ayase.kanagawa.jp
+chigasaki.kanagawa.jp
+ebina.kanagawa.jp
+fujisawa.kanagawa.jp
+hadano.kanagawa.jp
+hakone.kanagawa.jp
+hiratsuka.kanagawa.jp
+isehara.kanagawa.jp
+kaisei.kanagawa.jp
+kamakura.kanagawa.jp
+kiyokawa.kanagawa.jp
+matsuda.kanagawa.jp
+minamiashigara.kanagawa.jp
+miura.kanagawa.jp
+nakai.kanagawa.jp
+ninomiya.kanagawa.jp
+odawara.kanagawa.jp
+oi.kanagawa.jp
+oiso.kanagawa.jp
+sagamihara.kanagawa.jp
+samukawa.kanagawa.jp
+tsukui.kanagawa.jp
+yamakita.kanagawa.jp
+yamato.kanagawa.jp
+yokosuka.kanagawa.jp
+yugawara.kanagawa.jp
+zama.kanagawa.jp
+zushi.kanagawa.jp
+aki.kochi.jp
+geisei.kochi.jp
+hidaka.kochi.jp
+higashitsuno.kochi.jp
+ino.kochi.jp
+kagami.kochi.jp
+kami.kochi.jp
+kitagawa.kochi.jp
+kochi.kochi.jp
+mihara.kochi.jp
+motoyama.kochi.jp
+muroto.kochi.jp
+nahari.kochi.jp
+nakamura.kochi.jp
+nankoku.kochi.jp
+nishitosa.kochi.jp
+niyodogawa.kochi.jp
+ochi.kochi.jp
+okawa.kochi.jp
+otoyo.kochi.jp
+otsuki.kochi.jp
+sakawa.kochi.jp
+sukumo.kochi.jp
+susaki.kochi.jp
+tosa.kochi.jp
+tosashimizu.kochi.jp
+toyo.kochi.jp
+tsuno.kochi.jp
+umaji.kochi.jp
+yasuda.kochi.jp
+yusuhara.kochi.jp
+amakusa.kumamoto.jp
+arao.kumamoto.jp
+aso.kumamoto.jp
+choyo.kumamoto.jp
+gyokuto.kumamoto.jp
+hitoyoshi.kumamoto.jp
+kamiamakusa.kumamoto.jp
+kashima.kumamoto.jp
+kikuchi.kumamoto.jp
+kosa.kumamoto.jp
+kumamoto.kumamoto.jp
+mashiki.kumamoto.jp
+mifune.kumamoto.jp
+minamata.kumamoto.jp
+minamioguni.kumamoto.jp
+nagasu.kumamoto.jp
+nishihara.kumamoto.jp
+oguni.kumamoto.jp
+ozu.kumamoto.jp
+sumoto.kumamoto.jp
+takamori.kumamoto.jp
+uki.kumamoto.jp
+uto.kumamoto.jp
+yamaga.kumamoto.jp
+yamato.kumamoto.jp
+yatsushiro.kumamoto.jp
+ayabe.kyoto.jp
+fukuchiyama.kyoto.jp
+higashiyama.kyoto.jp
+ide.kyoto.jp
+ine.kyoto.jp
+joyo.kyoto.jp
+kameoka.kyoto.jp
+kamo.kyoto.jp
+kita.kyoto.jp
+kizu.kyoto.jp
+kumiyama.kyoto.jp
+kyotamba.kyoto.jp
+kyotanabe.kyoto.jp
+kyotango.kyoto.jp
+maizuru.kyoto.jp
+minami.kyoto.jp
+minamiyamashiro.kyoto.jp
+miyazu.kyoto.jp
+muko.kyoto.jp
+nagaokakyo.kyoto.jp
+nakagyo.kyoto.jp
+nantan.kyoto.jp
+oyamazaki.kyoto.jp
+sakyo.kyoto.jp
+seika.kyoto.jp
+tanabe.kyoto.jp
+uji.kyoto.jp
+ujitawara.kyoto.jp
+wazuka.kyoto.jp
+yamashina.kyoto.jp
+yawata.kyoto.jp
+asahi.mie.jp
+inabe.mie.jp
+ise.mie.jp
+kameyama.mie.jp
+kawagoe.mie.jp
+kiho.mie.jp
+kisosaki.mie.jp
+kiwa.mie.jp
+komono.mie.jp
+kumano.mie.jp
+kuwana.mie.jp
+matsusaka.mie.jp
+meiwa.mie.jp
+mihama.mie.jp
+minamiise.mie.jp
+misugi.mie.jp
+miyama.mie.jp
+nabari.mie.jp
+shima.mie.jp
+suzuka.mie.jp
+tado.mie.jp
+taiki.mie.jp
+taki.mie.jp
+tamaki.mie.jp
+toba.mie.jp
+tsu.mie.jp
+udono.mie.jp
+ureshino.mie.jp
+watarai.mie.jp
+yokkaichi.mie.jp
+furukawa.miyagi.jp
+higashimatsushima.miyagi.jp
+ishinomaki.miyagi.jp
+iwanuma.miyagi.jp
+kakuda.miyagi.jp
+kami.miyagi.jp
+kawasaki.miyagi.jp
+kesennuma.miyagi.jp
+marumori.miyagi.jp
+matsushima.miyagi.jp
+minamisanriku.miyagi.jp
+misato.miyagi.jp
+murata.miyagi.jp
+natori.miyagi.jp
+ogawara.miyagi.jp
+ohira.miyagi.jp
+onagawa.miyagi.jp
+osaki.miyagi.jp
+rifu.miyagi.jp
+semine.miyagi.jp
+shibata.miyagi.jp
+shichikashuku.miyagi.jp
+shikama.miyagi.jp
+shiogama.miyagi.jp
+shiroishi.miyagi.jp
+tagajo.miyagi.jp
+taiwa.miyagi.jp
+tome.miyagi.jp
+tomiya.miyagi.jp
+wakuya.miyagi.jp
+watari.miyagi.jp
+yamamoto.miyagi.jp
+zao.miyagi.jp
+aya.miyazaki.jp
+ebino.miyazaki.jp
+gokase.miyazaki.jp
+hyuga.miyazaki.jp
+kadogawa.miyazaki.jp
+kawaminami.miyazaki.jp
+kijo.miyazaki.jp
+kitagawa.miyazaki.jp
+kitakata.miyazaki.jp
+kitaura.miyazaki.jp
+kobayashi.miyazaki.jp
+kunitomi.miyazaki.jp
+kushima.miyazaki.jp
+mimata.miyazaki.jp
+miyakonojo.miyazaki.jp
+miyazaki.miyazaki.jp
+morotsuka.miyazaki.jp
+nichinan.miyazaki.jp
+nishimera.miyazaki.jp
+nobeoka.miyazaki.jp
+saito.miyazaki.jp
+shiiba.miyazaki.jp
+shintomi.miyazaki.jp
+takaharu.miyazaki.jp
+takanabe.miyazaki.jp
+takazaki.miyazaki.jp
+tsuno.miyazaki.jp
+achi.nagano.jp
+agematsu.nagano.jp
+anan.nagano.jp
+aoki.nagano.jp
+asahi.nagano.jp
+azumino.nagano.jp
+chikuhoku.nagano.jp
+chikuma.nagano.jp
+chino.nagano.jp
+fujimi.nagano.jp
+hakuba.nagano.jp
+hara.nagano.jp
+hiraya.nagano.jp
+iida.nagano.jp
+iijima.nagano.jp
+iiyama.nagano.jp
+iizuna.nagano.jp
+ikeda.nagano.jp
+ikusaka.nagano.jp
+ina.nagano.jp
+karuizawa.nagano.jp
+kawakami.nagano.jp
+kiso.nagano.jp
+kisofukushima.nagano.jp
+kitaaiki.nagano.jp
+komagane.nagano.jp
+komoro.nagano.jp
+matsukawa.nagano.jp
+matsumoto.nagano.jp
+miasa.nagano.jp
+minamiaiki.nagano.jp
+minamimaki.nagano.jp
+minamiminowa.nagano.jp
+minowa.nagano.jp
+miyada.nagano.jp
+miyota.nagano.jp
+mochizuki.nagano.jp
+nagano.nagano.jp
+nagawa.nagano.jp
+nagiso.nagano.jp
+nakagawa.nagano.jp
+nakano.nagano.jp
+nozawaonsen.nagano.jp
+obuse.nagano.jp
+ogawa.nagano.jp
+okaya.nagano.jp
+omachi.nagano.jp
+omi.nagano.jp
+ookuwa.nagano.jp
+ooshika.nagano.jp
+otaki.nagano.jp
+otari.nagano.jp
+sakae.nagano.jp
+sakaki.nagano.jp
+saku.nagano.jp
+sakuho.nagano.jp
+shimosuwa.nagano.jp
+shinanomachi.nagano.jp
+shiojiri.nagano.jp
+suwa.nagano.jp
+suzaka.nagano.jp
+takagi.nagano.jp
+takamori.nagano.jp
+takayama.nagano.jp
+tateshina.nagano.jp
+tatsuno.nagano.jp
+togakushi.nagano.jp
+togura.nagano.jp
+tomi.nagano.jp
+ueda.nagano.jp
+wada.nagano.jp
+yamagata.nagano.jp
+yamanouchi.nagano.jp
+yasaka.nagano.jp
+yasuoka.nagano.jp
+chijiwa.nagasaki.jp
+futsu.nagasaki.jp
+goto.nagasaki.jp
+hasami.nagasaki.jp
+hirado.nagasaki.jp
+iki.nagasaki.jp
+isahaya.nagasaki.jp
+kawatana.nagasaki.jp
+kuchinotsu.nagasaki.jp
+matsuura.nagasaki.jp
+nagasaki.nagasaki.jp
+obama.nagasaki.jp
+omura.nagasaki.jp
+oseto.nagasaki.jp
+saikai.nagasaki.jp
+sasebo.nagasaki.jp
+seihi.nagasaki.jp
+shimabara.nagasaki.jp
+shinkamigoto.nagasaki.jp
+togitsu.nagasaki.jp
+tsushima.nagasaki.jp
+unzen.nagasaki.jp
+ando.nara.jp
+gose.nara.jp
+heguri.nara.jp
+higashiyoshino.nara.jp
+ikaruga.nara.jp
+ikoma.nara.jp
+kamikitayama.nara.jp
+kanmaki.nara.jp
+kashiba.nara.jp
+kashihara.nara.jp
+katsuragi.nara.jp
+kawai.nara.jp
+kawakami.nara.jp
+kawanishi.nara.jp
+koryo.nara.jp
+kurotaki.nara.jp
+mitsue.nara.jp
+miyake.nara.jp
+nara.nara.jp
+nosegawa.nara.jp
+oji.nara.jp
+ouda.nara.jp
+oyodo.nara.jp
+sakurai.nara.jp
+sango.nara.jp
+shimoichi.nara.jp
+shimokitayama.nara.jp
+shinjo.nara.jp
+soni.nara.jp
+takatori.nara.jp
+tawaramoto.nara.jp
+tenkawa.nara.jp
+tenri.nara.jp
+uda.nara.jp
+yamatokoriyama.nara.jp
+yamatotakada.nara.jp
+yamazoe.nara.jp
+yoshino.nara.jp
+aga.niigata.jp
+agano.niigata.jp
+gosen.niigata.jp
+itoigawa.niigata.jp
+izumozaki.niigata.jp
+joetsu.niigata.jp
+kamo.niigata.jp
+kariwa.niigata.jp
+kashiwazaki.niigata.jp
+minamiuonuma.niigata.jp
+mitsuke.niigata.jp
+muika.niigata.jp
+murakami.niigata.jp
+myoko.niigata.jp
+nagaoka.niigata.jp
+niigata.niigata.jp
+ojiya.niigata.jp
+omi.niigata.jp
+sado.niigata.jp
+sanjo.niigata.jp
+seiro.niigata.jp
+seirou.niigata.jp
+sekikawa.niigata.jp
+shibata.niigata.jp
+tagami.niigata.jp
+tainai.niigata.jp
+tochio.niigata.jp
+tokamachi.niigata.jp
+tsubame.niigata.jp
+tsunan.niigata.jp
+uonuma.niigata.jp
+yahiko.niigata.jp
+yoita.niigata.jp
+yuzawa.niigata.jp
+beppu.oita.jp
+bungoono.oita.jp
+bungotakada.oita.jp
+hasama.oita.jp
+hiji.oita.jp
+himeshima.oita.jp
+hita.oita.jp
+kamitsue.oita.jp
+kokonoe.oita.jp
+kuju.oita.jp
+kunisaki.oita.jp
+kusu.oita.jp
+oita.oita.jp
+saiki.oita.jp
+taketa.oita.jp
+tsukumi.oita.jp
+usa.oita.jp
+usuki.oita.jp
+yufu.oita.jp
+akaiwa.okayama.jp
+asakuchi.okayama.jp
+bizen.okayama.jp
+hayashima.okayama.jp
+ibara.okayama.jp
+kagamino.okayama.jp
+kasaoka.okayama.jp
+kibichuo.okayama.jp
+kumenan.okayama.jp
+kurashiki.okayama.jp
+maniwa.okayama.jp
+misaki.okayama.jp
+nagi.okayama.jp
+niimi.okayama.jp
+nishiawakura.okayama.jp
+okayama.okayama.jp
+satosho.okayama.jp
+setouchi.okayama.jp
+shinjo.okayama.jp
+shoo.okayama.jp
+soja.okayama.jp
+takahashi.okayama.jp
+tamano.okayama.jp
+tsuyama.okayama.jp
+wake.okayama.jp
+yakage.okayama.jp
+aguni.okinawa.jp
+ginowan.okinawa.jp
+ginoza.okinawa.jp
+gushikami.okinawa.jp
+haebaru.okinawa.jp
+higashi.okinawa.jp
+hirara.okinawa.jp
+iheya.okinawa.jp
+ishigaki.okinawa.jp
+ishikawa.okinawa.jp
+itoman.okinawa.jp
+izena.okinawa.jp
+kadena.okinawa.jp
+kin.okinawa.jp
+kitadaito.okinawa.jp
+kitanakagusuku.okinawa.jp
+kumejima.okinawa.jp
+kunigami.okinawa.jp
+minamidaito.okinawa.jp
+motobu.okinawa.jp
+nago.okinawa.jp
+naha.okinawa.jp
+nakagusuku.okinawa.jp
+nakijin.okinawa.jp
+nanjo.okinawa.jp
+nishihara.okinawa.jp
+ogimi.okinawa.jp
+okinawa.okinawa.jp
+onna.okinawa.jp
+shimoji.okinawa.jp
+taketomi.okinawa.jp
+tarama.okinawa.jp
+tokashiki.okinawa.jp
+tomigusuku.okinawa.jp
+tonaki.okinawa.jp
+urasoe.okinawa.jp
+uruma.okinawa.jp
+yaese.okinawa.jp
+yomitan.okinawa.jp
+yonabaru.okinawa.jp
+yonaguni.okinawa.jp
+zamami.okinawa.jp
+abeno.osaka.jp
+chihayaakasaka.osaka.jp
+chuo.osaka.jp
+daito.osaka.jp
+fujiidera.osaka.jp
+habikino.osaka.jp
+hannan.osaka.jp
+higashiosaka.osaka.jp
+higashisumiyoshi.osaka.jp
+higashiyodogawa.osaka.jp
+hirakata.osaka.jp
+ibaraki.osaka.jp
+ikeda.osaka.jp
+izumi.osaka.jp
+izumiotsu.osaka.jp
+izumisano.osaka.jp
+kadoma.osaka.jp
+kaizuka.osaka.jp
+kanan.osaka.jp
+kashiwara.osaka.jp
+katano.osaka.jp
+kawachinagano.osaka.jp
+kishiwada.osaka.jp
+kita.osaka.jp
+kumatori.osaka.jp
+matsubara.osaka.jp
+minato.osaka.jp
+minoh.osaka.jp
+misaki.osaka.jp
+moriguchi.osaka.jp
+neyagawa.osaka.jp
+nishi.osaka.jp
+nose.osaka.jp
+osakasayama.osaka.jp
+sakai.osaka.jp
+sayama.osaka.jp
+sennan.osaka.jp
+settsu.osaka.jp
+shijonawate.osaka.jp
+shimamoto.osaka.jp
+suita.osaka.jp
+tadaoka.osaka.jp
+taishi.osaka.jp
+tajiri.osaka.jp
+takaishi.osaka.jp
+takatsuki.osaka.jp
+tondabayashi.osaka.jp
+toyonaka.osaka.jp
+toyono.osaka.jp
+yao.osaka.jp
+ariake.saga.jp
+arita.saga.jp
+fukudomi.saga.jp
+genkai.saga.jp
+hamatama.saga.jp
+hizen.saga.jp
+imari.saga.jp
+kamimine.saga.jp
+kanzaki.saga.jp
+karatsu.saga.jp
+kashima.saga.jp
+kitagata.saga.jp
+kitahata.saga.jp
+kiyama.saga.jp
+kouhoku.saga.jp
+kyuragi.saga.jp
+nishiarita.saga.jp
+ogi.saga.jp
+omachi.saga.jp
+ouchi.saga.jp
+saga.saga.jp
+shiroishi.saga.jp
+taku.saga.jp
+tara.saga.jp
+tosu.saga.jp
+yoshinogari.saga.jp
+arakawa.saitama.jp
+asaka.saitama.jp
+chichibu.saitama.jp
+fujimi.saitama.jp
+fujimino.saitama.jp
+fukaya.saitama.jp
+hanno.saitama.jp
+hanyu.saitama.jp
+hasuda.saitama.jp
+hatogaya.saitama.jp
+hatoyama.saitama.jp
+hidaka.saitama.jp
+higashichichibu.saitama.jp
+higashimatsuyama.saitama.jp
+honjo.saitama.jp
+ina.saitama.jp
+iruma.saitama.jp
+iwatsuki.saitama.jp
+kamiizumi.saitama.jp
+kamikawa.saitama.jp
+kamisato.saitama.jp
+kasukabe.saitama.jp
+kawagoe.saitama.jp
+kawaguchi.saitama.jp
+kawajima.saitama.jp
+kazo.saitama.jp
+kitamoto.saitama.jp
+koshigaya.saitama.jp
+kounosu.saitama.jp
+kuki.saitama.jp
+kumagaya.saitama.jp
+matsubushi.saitama.jp
+minano.saitama.jp
+misato.saitama.jp
+miyashiro.saitama.jp
+miyoshi.saitama.jp
+moroyama.saitama.jp
+nagatoro.saitama.jp
+namegawa.saitama.jp
+niiza.saitama.jp
+ogano.saitama.jp
+ogawa.saitama.jp
+ogose.saitama.jp
+okegawa.saitama.jp
+omiya.saitama.jp
+otaki.saitama.jp
+ranzan.saitama.jp
+ryokami.saitama.jp
+saitama.saitama.jp
+sakado.saitama.jp
+satte.saitama.jp
+sayama.saitama.jp
+shiki.saitama.jp
+shiraoka.saitama.jp
+soka.saitama.jp
+sugito.saitama.jp
+toda.saitama.jp
+tokigawa.saitama.jp
+tokorozawa.saitama.jp
+tsurugashima.saitama.jp
+urawa.saitama.jp
+warabi.saitama.jp
+yashio.saitama.jp
+yokoze.saitama.jp
+yono.saitama.jp
+yorii.saitama.jp
+yoshida.saitama.jp
+yoshikawa.saitama.jp
+yoshimi.saitama.jp
+aisho.shiga.jp
+gamo.shiga.jp
+higashiomi.shiga.jp
+hikone.shiga.jp
+koka.shiga.jp
+konan.shiga.jp
+kosei.shiga.jp
+koto.shiga.jp
+kusatsu.shiga.jp
+maibara.shiga.jp
+moriyama.shiga.jp
+nagahama.shiga.jp
+nishiazai.shiga.jp
+notogawa.shiga.jp
+omihachiman.shiga.jp
+otsu.shiga.jp
+ritto.shiga.jp
+ryuoh.shiga.jp
+takashima.shiga.jp
+takatsuki.shiga.jp
+torahime.shiga.jp
+toyosato.shiga.jp
+yasu.shiga.jp
+akagi.shimane.jp
+ama.shimane.jp
+gotsu.shimane.jp
+hamada.shimane.jp
+higashiizumo.shimane.jp
+hikawa.shimane.jp
+hikimi.shimane.jp
+izumo.shimane.jp
+kakinoki.shimane.jp
+masuda.shimane.jp
+matsue.shimane.jp
+misato.shimane.jp
+nishinoshima.shimane.jp
+ohda.shimane.jp
+okinoshima.shimane.jp
+okuizumo.shimane.jp
+shimane.shimane.jp
+tamayu.shimane.jp
+tsuwano.shimane.jp
+unnan.shimane.jp
+yakumo.shimane.jp
+yasugi.shimane.jp
+yatsuka.shimane.jp
+arai.shizuoka.jp
+atami.shizuoka.jp
+fuji.shizuoka.jp
+fujieda.shizuoka.jp
+fujikawa.shizuoka.jp
+fujinomiya.shizuoka.jp
+fukuroi.shizuoka.jp
+gotemba.shizuoka.jp
+haibara.shizuoka.jp
+hamamatsu.shizuoka.jp
+higashiizu.shizuoka.jp
+ito.shizuoka.jp
+iwata.shizuoka.jp
+izu.shizuoka.jp
+izunokuni.shizuoka.jp
+kakegawa.shizuoka.jp
+kannami.shizuoka.jp
+kawanehon.shizuoka.jp
+kawazu.shizuoka.jp
+kikugawa.shizuoka.jp
+kosai.shizuoka.jp
+makinohara.shizuoka.jp
+matsuzaki.shizuoka.jp
+minamiizu.shizuoka.jp
+mishima.shizuoka.jp
+morimachi.shizuoka.jp
+nishiizu.shizuoka.jp
+numazu.shizuoka.jp
+omaezaki.shizuoka.jp
+shimada.shizuoka.jp
+shimizu.shizuoka.jp
+shimoda.shizuoka.jp
+shizuoka.shizuoka.jp
+susono.shizuoka.jp
+yaizu.shizuoka.jp
+yoshida.shizuoka.jp
+ashikaga.tochigi.jp
+bato.tochigi.jp
+haga.tochigi.jp
+ichikai.tochigi.jp
+iwafune.tochigi.jp
+kaminokawa.tochigi.jp
+kanuma.tochigi.jp
+karasuyama.tochigi.jp
+kuroiso.tochigi.jp
+mashiko.tochigi.jp
+mibu.tochigi.jp
+moka.tochigi.jp
+motegi.tochigi.jp
+nasu.tochigi.jp
+nasushiobara.tochigi.jp
+nikko.tochigi.jp
+nishikata.tochigi.jp
+nogi.tochigi.jp
+ohira.tochigi.jp
+ohtawara.tochigi.jp
+oyama.tochigi.jp
+sakura.tochigi.jp
+sano.tochigi.jp
+shimotsuke.tochigi.jp
+shioya.tochigi.jp
+takanezawa.tochigi.jp
+tochigi.tochigi.jp
+tsuga.tochigi.jp
+ujiie.tochigi.jp
+utsunomiya.tochigi.jp
+yaita.tochigi.jp
+aizumi.tokushima.jp
+anan.tokushima.jp
+ichiba.tokushima.jp
+itano.tokushima.jp
+kainan.tokushima.jp
+komatsushima.tokushima.jp
+matsushige.tokushima.jp
+mima.tokushima.jp
+minami.tokushima.jp
+miyoshi.tokushima.jp
+mugi.tokushima.jp
+nakagawa.tokushima.jp
+naruto.tokushima.jp
+sanagochi.tokushima.jp
+shishikui.tokushima.jp
+tokushima.tokushima.jp
+wajiki.tokushima.jp
+adachi.tokyo.jp
+akiruno.tokyo.jp
+akishima.tokyo.jp
+aogashima.tokyo.jp
+arakawa.tokyo.jp
+bunkyo.tokyo.jp
+chiyoda.tokyo.jp
+chofu.tokyo.jp
+chuo.tokyo.jp
+edogawa.tokyo.jp
+fuchu.tokyo.jp
+fussa.tokyo.jp
+hachijo.tokyo.jp
+hachioji.tokyo.jp
+hamura.tokyo.jp
+higashikurume.tokyo.jp
+higashimurayama.tokyo.jp
+higashiyamato.tokyo.jp
+hino.tokyo.jp
+hinode.tokyo.jp
+hinohara.tokyo.jp
+inagi.tokyo.jp
+itabashi.tokyo.jp
+katsushika.tokyo.jp
+kita.tokyo.jp
+kiyose.tokyo.jp
+kodaira.tokyo.jp
+koganei.tokyo.jp
+kokubunji.tokyo.jp
+komae.tokyo.jp
+koto.tokyo.jp
+kouzushima.tokyo.jp
+kunitachi.tokyo.jp
+machida.tokyo.jp
+meguro.tokyo.jp
+minato.tokyo.jp
+mitaka.tokyo.jp
+mizuho.tokyo.jp
+musashimurayama.tokyo.jp
+musashino.tokyo.jp
+nakano.tokyo.jp
+nerima.tokyo.jp
+ogasawara.tokyo.jp
+okutama.tokyo.jp
+ome.tokyo.jp
+oshima.tokyo.jp
+ota.tokyo.jp
+setagaya.tokyo.jp
+shibuya.tokyo.jp
+shinagawa.tokyo.jp
+shinjuku.tokyo.jp
+suginami.tokyo.jp
+sumida.tokyo.jp
+tachikawa.tokyo.jp
+taito.tokyo.jp
+tama.tokyo.jp
+toshima.tokyo.jp
+chizu.tottori.jp
+hino.tottori.jp
+kawahara.tottori.jp
+koge.tottori.jp
+kotoura.tottori.jp
+misasa.tottori.jp
+nanbu.tottori.jp
+nichinan.tottori.jp
+sakaiminato.tottori.jp
+tottori.tottori.jp
+wakasa.tottori.jp
+yazu.tottori.jp
+yonago.tottori.jp
+asahi.toyama.jp
+fuchu.toyama.jp
+fukumitsu.toyama.jp
+funahashi.toyama.jp
+himi.toyama.jp
+imizu.toyama.jp
+inami.toyama.jp
+johana.toyama.jp
+kamiichi.toyama.jp
+kurobe.toyama.jp
+nakaniikawa.toyama.jp
+namerikawa.toyama.jp
+nanto.toyama.jp
+nyuzen.toyama.jp
+oyabe.toyama.jp
+taira.toyama.jp
+takaoka.toyama.jp
+tateyama.toyama.jp
+toga.toyama.jp
+tonami.toyama.jp
+toyama.toyama.jp
+unazuki.toyama.jp
+uozu.toyama.jp
+yamada.toyama.jp
+arida.wakayama.jp
+aridagawa.wakayama.jp
+gobo.wakayama.jp
+hashimoto.wakayama.jp
+hidaka.wakayama.jp
+hirogawa.wakayama.jp
+inami.wakayama.jp
+iwade.wakayama.jp
+kainan.wakayama.jp
+kamitonda.wakayama.jp
+katsuragi.wakayama.jp
+kimino.wakayama.jp
+kinokawa.wakayama.jp
+kitayama.wakayama.jp
+koya.wakayama.jp
+koza.wakayama.jp
+kozagawa.wakayama.jp
+kudoyama.wakayama.jp
+kushimoto.wakayama.jp
+mihama.wakayama.jp
+misato.wakayama.jp
+nachikatsuura.wakayama.jp
+shingu.wakayama.jp
+shirahama.wakayama.jp
+taiji.wakayama.jp
+tanabe.wakayama.jp
+wakayama.wakayama.jp
+yuasa.wakayama.jp
+yura.wakayama.jp
+asahi.yamagata.jp
+funagata.yamagata.jp
+higashine.yamagata.jp
+iide.yamagata.jp
+kahoku.yamagata.jp
+kaminoyama.yamagata.jp
+kaneyama.yamagata.jp
+kawanishi.yamagata.jp
+mamurogawa.yamagata.jp
+mikawa.yamagata.jp
+murayama.yamagata.jp
+nagai.yamagata.jp
+nakayama.yamagata.jp
+nanyo.yamagata.jp
+nishikawa.yamagata.jp
+obanazawa.yamagata.jp
+oe.yamagata.jp
+oguni.yamagata.jp
+ohkura.yamagata.jp
+oishida.yamagata.jp
+sagae.yamagata.jp
+sakata.yamagata.jp
+sakegawa.yamagata.jp
+shinjo.yamagata.jp
+shirataka.yamagata.jp
+shonai.yamagata.jp
+takahata.yamagata.jp
+tendo.yamagata.jp
+tozawa.yamagata.jp
+tsuruoka.yamagata.jp
+yamagata.yamagata.jp
+yamanobe.yamagata.jp
+yonezawa.yamagata.jp
+yuza.yamagata.jp
+abu.yamaguchi.jp
+hagi.yamaguchi.jp
+hikari.yamaguchi.jp
+hofu.yamaguchi.jp
+iwakuni.yamaguchi.jp
+kudamatsu.yamaguchi.jp
+mitou.yamaguchi.jp
+nagato.yamaguchi.jp
+oshima.yamaguchi.jp
+shimonoseki.yamaguchi.jp
+shunan.yamaguchi.jp
+tabuse.yamaguchi.jp
+tokuyama.yamaguchi.jp
+toyota.yamaguchi.jp
+ube.yamaguchi.jp
+yuu.yamaguchi.jp
+chuo.yamanashi.jp
+doshi.yamanashi.jp
+fuefuki.yamanashi.jp
+fujikawa.yamanashi.jp
+fujikawaguchiko.yamanashi.jp
+fujiyoshida.yamanashi.jp
+hayakawa.yamanashi.jp
+hokuto.yamanashi.jp
+ichikawamisato.yamanashi.jp
+kai.yamanashi.jp
+kofu.yamanashi.jp
+koshu.yamanashi.jp
+kosuge.yamanashi.jp
+minami-alps.yamanashi.jp
+minobu.yamanashi.jp
+nakamichi.yamanashi.jp
+nanbu.yamanashi.jp
+narusawa.yamanashi.jp
+nirasaki.yamanashi.jp
+nishikatsura.yamanashi.jp
+oshino.yamanashi.jp
+otsuki.yamanashi.jp
+showa.yamanashi.jp
+tabayama.yamanashi.jp
+tsuru.yamanashi.jp
+uenohara.yamanashi.jp
+yamanakako.yamanashi.jp
+yamanashi.yamanashi.jp
+
+// ke : http://www.kenic.or.ke/index.php?option=com_content&task=view&id=117&Itemid=145
+*.ke
+
+// kg : http://www.domain.kg/dmn_n.html
+kg
+org.kg
+net.kg
+com.kg
+edu.kg
+gov.kg
+mil.kg
+
+// kh : http://www.mptc.gov.kh/dns_registration.htm
+*.kh
+
+// ki : http://www.ki/dns/index.html
+ki
+edu.ki
+biz.ki
+net.ki
+org.ki
+gov.ki
+info.ki
+com.ki
+
+// km : http://en.wikipedia.org/wiki/.km
+// http://www.domaine.km/documents/charte.doc
+km
+org.km
+nom.km
+gov.km
+prd.km
+tm.km
+edu.km
+mil.km
+ass.km
+com.km
+// These are only mentioned as proposed suggestions at domaine.km, but
+// http://en.wikipedia.org/wiki/.km says they're available for registration:
+coop.km
+asso.km
+presse.km
+medecin.km
+notaires.km
+pharmaciens.km
+veterinaire.km
+gouv.km
+
+// kn : http://en.wikipedia.org/wiki/.kn
+// http://www.dot.kn/domainRules.html
+kn
+net.kn
+org.kn
+edu.kn
+gov.kn
+
+// kp : http://www.kcce.kp/en_index.php
+kp
+com.kp
+edu.kp
+gov.kp
+org.kp
+rep.kp
+tra.kp
+
+// kr : http://en.wikipedia.org/wiki/.kr
+// see also: http://domain.nida.or.kr/eng/registration.jsp
+kr
+ac.kr
+co.kr
+es.kr
+go.kr
+hs.kr
+kg.kr
+mil.kr
+ms.kr
+ne.kr
+or.kr
+pe.kr
+re.kr
+sc.kr
+// kr geographical names
+busan.kr
+chungbuk.kr
+chungnam.kr
+daegu.kr
+daejeon.kr
+gangwon.kr
+gwangju.kr
+gyeongbuk.kr
+gyeonggi.kr
+gyeongnam.kr
+incheon.kr
+jeju.kr
+jeonbuk.kr
+jeonnam.kr
+seoul.kr
+ulsan.kr
+
+// kw : http://en.wikipedia.org/wiki/.kw
+*.kw
+
+// ky : http://www.icta.ky/da_ky_reg_dom.php
+// Confirmed by registry <ky...@perimeterusa.com> 2008-06-17
+ky
+edu.ky
+gov.ky
+com.ky
+org.ky
+net.ky
+
+// kz : http://en.wikipedia.org/wiki/.kz
+// see also: http://www.nic.kz/rules/index.jsp
+kz
+org.kz
+edu.kz
+net.kz
+gov.kz
+mil.kz
+com.kz
+
+// la : http://en.wikipedia.org/wiki/.la
+// Submitted by registry <ga...@nic.la> 2008-06-10
+la
+int.la
+net.la
+info.la
+edu.la
+gov.la
+per.la
+com.la
+org.la
+
+// lb : http://en.wikipedia.org/wiki/.lb
+// Submitted by registry <ra...@psg.com> 2008-06-17
+lb
+com.lb
+edu.lb
+gov.lb
+net.lb
+org.lb
+
+// lc : http://en.wikipedia.org/wiki/.lc
+// see also: http://www.nic.lc/rules.htm
+lc
+com.lc
+net.lc
+co.lc
+org.lc
+edu.lc
+gov.lc
+
+// li : http://en.wikipedia.org/wiki/.li
+li
+
+// lk : http://www.nic.lk/seclevpr.html
+lk
+gov.lk
+sch.lk
+net.lk
+int.lk
+com.lk
+org.lk
+edu.lk
+ngo.lk
+soc.lk
+web.lk
+ltd.lk
+assn.lk
+grp.lk
+hotel.lk
+
+// lr : http://psg.com/dns/lr/lr.txt
+// Submitted by registry <ra...@psg.com> 2008-06-17
+lr
+com.lr
+edu.lr
+gov.lr
+org.lr
+net.lr
+
+// ls : http://en.wikipedia.org/wiki/.ls
+ls
+co.ls
+org.ls
+
+// lt : http://en.wikipedia.org/wiki/.lt
+lt
+// gov.lt : http://www.gov.lt/index_en.php
+gov.lt
+
+// lu : http://www.dns.lu/en/
+lu
+
+// lv : http://www.nic.lv/DNS/En/generic.php
+lv
+com.lv
+edu.lv
+gov.lv
+org.lv
+mil.lv
+id.lv
+net.lv
+asn.lv
+conf.lv
+
+// ly : http://www.nic.ly/regulations.php
+ly
+com.ly
+net.ly
+gov.ly
+plc.ly
+edu.ly
+sch.ly
+med.ly
+org.ly
+id.ly
+
+// ma : http://en.wikipedia.org/wiki/.ma
+// http://www.anrt.ma/fr/admin/download/upload/file_fr782.pdf
+ma
+co.ma
+net.ma
+gov.ma
+org.ma
+ac.ma
+press.ma
+
+// mc : http://www.nic.mc/
+mc
+tm.mc
+asso.mc
+
+// md : http://en.wikipedia.org/wiki/.md
+md
+
+// me : http://en.wikipedia.org/wiki/.me
+me
+co.me
+net.me
+org.me
+edu.me
+ac.me
+gov.me
+its.me
+priv.me
+
+// mg : http://www.nic.mg/tarif.htm
+mg
+org.mg
+nom.mg
+gov.mg
+prd.mg
+tm.mg
+edu.mg
+mil.mg
+com.mg
+
+// mh : http://en.wikipedia.org/wiki/.mh
+mh
+
+// mil : http://en.wikipedia.org/wiki/.mil
+mil
+
+// mk : http://en.wikipedia.org/wiki/.mk
+// see also: http://dns.marnet.net.mk/postapka.php
+mk
+com.mk
+org.mk
+net.mk
+edu.mk
+gov.mk
+inf.mk
+name.mk
+
+// ml : http://www.gobin.info/domainname/ml-template.doc
+// see also: http://en.wikipedia.org/wiki/.ml
+ml
+com.ml
+edu.ml
+gouv.ml
+gov.ml
+net.ml
+org.ml
+presse.ml
+
+// mm : http://en.wikipedia.org/wiki/.mm
+*.mm
+
+// mn : http://en.wikipedia.org/wiki/.mn
+mn
+gov.mn
+edu.mn
+org.mn
+
+// mo : http://www.monic.net.mo/
+mo
+com.mo
+net.mo
+org.mo
+edu.mo
+gov.mo
+
+// mobi : http://en.wikipedia.org/wiki/.mobi
+mobi
+
+// mp : http://www.dot.mp/
+// Confirmed by registry <dc...@saipan.com> 2008-06-17
+mp
+
+// mq : http://en.wikipedia.org/wiki/.mq
+mq
+
+// mr : http://en.wikipedia.org/wiki/.mr
+mr
+gov.mr
+
+// ms : http://www.nic.ms/pdf/MS_Domain_Name_Rules.pdf
+ms
+com.ms
+edu.ms
+gov.ms
+net.ms
+org.ms
+
+// mt : https://www.nic.org.mt/go/policy
+// Submitted by registry <he...@nic.org.mt> 2013-11-19
+mt
+com.mt
+edu.mt
+net.mt
+org.mt
+
+// mu : http://en.wikipedia.org/wiki/.mu
+mu
+com.mu
+net.mu
+org.mu
+gov.mu
+ac.mu
+co.mu
+or.mu
+
+// museum : http://about.museum/naming/
+// http://index.museum/
+museum
+academy.museum
+agriculture.museum
+air.museum
+airguard.museum
+alabama.museum
+alaska.museum
+amber.museum
+ambulance.museum
+american.museum
+americana.museum
+americanantiques.museum
+americanart.museum
+amsterdam.museum
+and.museum
+annefrank.museum
+anthro.museum
+anthropology.museum
+antiques.museum
+aquarium.museum
+arboretum.museum
+archaeological.museum
+archaeology.museum
+architecture.museum
+art.museum
+artanddesign.museum
+artcenter.museum
+artdeco.museum
+arteducation.museum
+artgallery.museum
+arts.museum
+artsandcrafts.museum
+asmatart.museum
+assassination.museum
+assisi.museum
+association.museum
+astronomy.museum
+atlanta.museum
+austin.museum
+australia.museum
+automotive.museum
+aviation.museum
+axis.museum
+badajoz.museum
+baghdad.museum
+bahn.museum
+bale.museum
+baltimore.museum
+barcelona.museum
+baseball.museum
+basel.museum
+baths.museum
+bauern.museum
+beauxarts.museum
+beeldengeluid.museum
+bellevue.museum
+bergbau.museum
+berkeley.museum
+berlin.museum
+bern.museum
+bible.museum
+bilbao.museum
+bill.museum
+birdart.museum
+birthplace.museum
+bonn.museum
+boston.museum
+botanical.museum
+botanicalgarden.museum
+botanicgarden.museum
+botany.museum
+brandywinevalley.museum
+brasil.museum
+bristol.museum
+british.museum
+britishcolumbia.museum
+broadcast.museum
+brunel.museum
+brussel.museum
+brussels.museum
+bruxelles.museum
+building.museum
+burghof.museum
+bus.museum
+bushey.museum
+cadaques.museum
+california.museum
+cambridge.museum
+can.museum
+canada.museum
+capebreton.museum
+carrier.museum
+cartoonart.museum
+casadelamoneda.museum
+castle.museum
+castres.museum
+celtic.museum
+center.museum
+chattanooga.museum
+cheltenham.museum
+chesapeakebay.museum
+chicago.museum
+children.museum
+childrens.museum
+childrensgarden.museum
+chiropractic.museum
+chocolate.museum
+christiansburg.museum
+cincinnati.museum
+cinema.museum
+circus.museum
+civilisation.museum
+civilization.museum
+civilwar.museum
+clinton.museum
+clock.museum
+coal.museum
+coastaldefence.museum
+cody.museum
+coldwar.museum
+collection.museum
+colonialwilliamsburg.museum
+coloradoplateau.museum
+columbia.museum
+columbus.museum
+communication.museum
+communications.museum
+community.museum
+computer.museum
+computerhistory.museum
+comunicações.museum
+contemporary.museum
+contemporaryart.museum
+convent.museum
+copenhagen.museum
+corporation.museum
+correios-e-telecomunicações.museum
+corvette.museum
+costume.museum
+countryestate.museum
+county.museum
+crafts.museum
+cranbrook.museum
+creation.museum
+cultural.museum
+culturalcenter.museum
+culture.museum
+cyber.museum
+cymru.museum
+dali.museum
+dallas.museum
+database.museum
+ddr.museum
+decorativearts.museum
+delaware.museum
+delmenhorst.museum
+denmark.museum
+depot.museum
+design.museum
+detroit.museum
+dinosaur.museum
+discovery.museum
+dolls.museum
+donostia.museum
+durham.museum
+eastafrica.museum
+eastcoast.museum
+education.museum
+educational.museum
+egyptian.museum
+eisenbahn.museum
+elburg.museum
+elvendrell.museum
+embroidery.museum
+encyclopedic.museum
+england.museum
+entomology.museum
+environment.museum
+environmentalconservation.museum
+epilepsy.museum
+essex.museum
+estate.museum
+ethnology.museum
+exeter.museum
+exhibition.museum
+family.museum
+farm.museum
+farmequipment.museum
+farmers.museum
+farmstead.museum
+field.museum
+figueres.museum
+filatelia.museum
+film.museum
+fineart.museum
+finearts.museum
+finland.museum
+flanders.museum
+florida.museum
+force.museum
+fortmissoula.museum
+fortworth.museum
+foundation.museum
+francaise.museum
+frankfurt.museum
+franziskaner.museum
+freemasonry.museum
+freiburg.museum
+fribourg.museum
+frog.museum
+fundacio.museum
+furniture.museum
+gallery.museum
+garden.museum
+gateway.museum
+geelvinck.museum
+gemological.museum
+geology.museum
+georgia.museum
+giessen.museum
+glas.museum
+glass.museum
+gorge.museum
+grandrapids.museum
+graz.museum
+guernsey.museum
+halloffame.museum
+hamburg.museum
+handson.museum
+harvestcelebration.museum
+hawaii.museum
+health.museum
+heimatunduhren.museum
+hellas.museum
+helsinki.museum
+hembygdsforbund.museum
+heritage.museum
+histoire.museum
+historical.museum
+historicalsociety.museum
+historichouses.museum
+historisch.museum
+historisches.museum
+history.museum
+historyofscience.museum
+horology.museum
+house.museum
+humanities.museum
+illustration.museum
+imageandsound.museum
+indian.museum
+indiana.museum
+indianapolis.museum
+indianmarket.museum
+intelligence.museum
+interactive.museum
+iraq.museum
+iron.museum
+isleofman.museum
+jamison.museum
+jefferson.museum
+jerusalem.museum
+jewelry.museum
+jewish.museum
+jewishart.museum
+jfk.museum
+journalism.museum
+judaica.museum
+judygarland.museum
+juedisches.museum
+juif.museum
+karate.museum
+karikatur.museum
+kids.museum
+koebenhavn.museum
+koeln.museum
+kunst.museum
+kunstsammlung.museum
+kunstunddesign.museum
+labor.museum
+labour.museum
+lajolla.museum
+lancashire.museum
+landes.museum
+lans.museum
+läns.museum
+larsson.museum
+lewismiller.museum
+lincoln.museum
+linz.museum
+living.museum
+livinghistory.museum
+localhistory.museum
+london.museum
+losangeles.museum
+louvre.museum
+loyalist.museum
+lucerne.museum
+luxembourg.museum
+luzern.museum
+mad.museum
+madrid.museum
+mallorca.museum
+manchester.museum
+mansion.museum
+mansions.museum
+manx.museum
+marburg.museum
+maritime.museum
+maritimo.museum
+maryland.museum
+marylhurst.museum
+media.museum
+medical.museum
+medizinhistorisches.museum
+meeres.museum
+memorial.museum
+mesaverde.museum
+michigan.museum
+midatlantic.museum
+military.museum
+mill.museum
+miners.museum
+mining.museum
+minnesota.museum
+missile.museum
+missoula.museum
+modern.museum
+moma.museum
+money.museum
+monmouth.museum
+monticello.museum
+montreal.museum
+moscow.museum
+motorcycle.museum
+muenchen.museum
+muenster.museum
+mulhouse.museum
+muncie.museum
+museet.museum
+museumcenter.museum
+museumvereniging.museum
+music.museum
+national.museum
+nationalfirearms.museum
+nationalheritage.museum
+nativeamerican.museum
+naturalhistory.museum
+naturalhistorymuseum.museum
+naturalsciences.museum
+nature.museum
+naturhistorisches.museum
+natuurwetenschappen.museum
+naumburg.museum
+naval.museum
+nebraska.museum
+neues.museum
+newhampshire.museum
+newjersey.museum
+newmexico.museum
+newport.museum
+newspaper.museum
+newyork.museum
+niepce.museum
+norfolk.museum
+north.museum
+nrw.museum
+nuernberg.museum
+nuremberg.museum
+nyc.museum
+nyny.museum
+oceanographic.museum
+oceanographique.museum
+omaha.museum
+online.museum
+ontario.museum
+openair.museum
+oregon.museum
+oregontrail.museum
+otago.museum
+oxford.museum
+pacific.museum
+paderborn.museum
+palace.museum
+paleo.museum
+palmsprings.museum
+panama.museum
+paris.museum
+pasadena.museum
+pharmacy.museum
+philadelphia.museum
+philadelphiaarea.museum
+philately.museum
+phoenix.museum
+photography.museum
+pilots.museum
+pittsburgh.museum
+planetarium.museum
+plantation.museum
+plants.museum
+plaza.museum
+portal.museum
+portland.museum
+portlligat.museum
+posts-and-telecommunications.museum
+preservation.museum
+presidio.museum
+press.museum
+project.museum
+public.museum
+pubol.museum
+quebec.museum
+railroad.museum
+railway.museum
+research.museum
+resistance.museum
+riodejaneiro.museum
+rochester.museum
+rockart.museum
+roma.museum
+russia.museum
+saintlouis.museum
+salem.museum
+salvadordali.museum
+salzburg.museum
+sandiego.museum
+sanfrancisco.museum
+santabarbara.museum
+santacruz.museum
+santafe.museum
+saskatchewan.museum
+satx.museum
+savannahga.museum
+schlesisches.museum
+schoenbrunn.museum
+schokoladen.museum
+school.museum
+schweiz.museum
+science.museum
+scienceandhistory.museum
+scienceandindustry.museum
+sciencecenter.museum
+sciencecenters.museum
+science-fiction.museum
+sciencehistory.museum
+sciences.museum
+sciencesnaturelles.museum
+scotland.museum
+seaport.museum
+settlement.museum
+settlers.museum
+shell.museum
+sherbrooke.museum
+sibenik.museum
+silk.museum
+ski.museum
+skole.museum
+society.museum
+sologne.museum
+soundandvision.museum
+southcarolina.museum
+southwest.museum
+space.museum
+spy.museum
+square.museum
+stadt.museum
+stalbans.museum
+starnberg.museum
+state.museum
+stateofdelaware.museum
+station.museum
+steam.museum
+steiermark.museum
+stjohn.museum
+stockholm.museum
+stpetersburg.museum
+stuttgart.museum
+suisse.museum
+surgeonshall.museum
+surrey.museum
+svizzera.museum
+sweden.museum
+sydney.museum
+tank.museum
+tcm.museum
+technology.museum
+telekommunikation.museum
+television.museum
+texas.museum
+textile.museum
+theater.museum
+time.museum
+timekeeping.museum
+topology.museum
+torino.museum
+touch.museum
+town.museum
+transport.museum
+tree.museum
+trolley.museum
+trust.museum
+trustee.museum
+uhren.museum
+ulm.museum
+undersea.museum
+university.museum
+usa.museum
+usantiques.museum
+usarts.museum
+uscountryestate.museum
+usculture.museum
+usdecorativearts.museum
+usgarden.museum
+ushistory.museum
+ushuaia.museum
+uslivinghistory.museum
+utah.museum
+uvic.museum
+valley.museum
+vantaa.museum
+versailles.museum
+viking.museum
+village.museum
+virginia.museum
+virtual.museum
+virtuel.museum
+vlaanderen.museum
+volkenkunde.museum
+wales.museum
+wallonie.museum
+war.museum
+washingtondc.museum
+watchandclock.museum
+watch-and-clock.museum
+western.museum
+westfalen.museum
+whaling.museum
+wildlife.museum
+williamsburg.museum
+windmill.museum
+workshop.museum
+york.museum
+yorkshire.museum
+yosemite.museum
+youth.museum
+zoological.museum
+zoology.museum
+ירושלים.museum
+иком.museum
+
+// mv : http://en.wikipedia.org/wiki/.mv
+// "mv" included because, contra Wikipedia, google.mv exists.
+mv
+aero.mv
+biz.mv
+com.mv
+coop.mv
+edu.mv
+gov.mv
+info.mv
+int.mv
+mil.mv
+museum.mv
+name.mv
+net.mv
+org.mv
+pro.mv
+
+// mw : http://www.registrar.mw/
+mw
+ac.mw
+biz.mw
+co.mw
+com.mw
+coop.mw
+edu.mw
+gov.mw
+int.mw
+museum.mw
+net.mw
+org.mw
+
+// mx : http://www.nic.mx/
+// Submitted by registry <fa...@nic.mx> 2008-06-19
+mx
+com.mx
+org.mx
+gob.mx
+edu.mx
+net.mx
+
+// my : http://www.mynic.net.my/
+my
+com.my
+net.my
+org.my
+gov.my
+edu.my
+mil.my
+name.my
+
+// mz : http://www.gobin.info/domainname/mz-template.doc
+*.mz
+!teledata.mz
+
+// na : http://www.na-nic.com.na/
+// http://www.info.na/domain/
+na
+info.na
+pro.na
+name.na
+school.na
+or.na
+dr.na
+us.na
+mx.na
+ca.na
+in.na
+cc.na
+tv.na
+ws.na
+mobi.na
+co.na
+com.na
+org.na
+
+// name : has 2nd-level tlds, but there's no list of them
+name
+
+// nc : http://www.cctld.nc/
+nc
+asso.nc
+
+// ne : http://en.wikipedia.org/wiki/.ne
+ne
+
+// net : http://en.wikipedia.org/wiki/.net
+net
+
+// nf : http://en.wikipedia.org/wiki/.nf
+nf
+com.nf
+net.nf
+per.nf
+rec.nf
+web.nf
+arts.nf
+firm.nf
+info.nf
+other.nf
+store.nf
+
+// ng : http://psg.com/dns/ng/
+ng
+com.ng
+edu.ng
+name.ng
+net.ng
+org.ng
+sch.ng
+gov.ng
+mil.ng
+mobi.ng
+
+// ni : http://www.nic.ni/dominios.htm
+*.ni
+
+// nl : http://www.domain-registry.nl/ace.php/c,728,122,,,,Home.html
+// Confirmed by registry <An...@sidn.nl> (with technical
+// reservations) 2008-06-08
+nl
+
+// BV.nl will be a registry for dutch BV's (besloten vennootschap)
+bv.nl
+
+// no : http://www.norid.no/regelverk/index.en.html
+// The Norwegian registry has declined to notify us of updates. The web pages
+// referenced below are the official source of the data. There is also an
+// announce mailing list:
+// https://postlister.uninett.no/sympa/info/norid-diskusjon
+no
+// Norid generic domains : http://www.norid.no/regelverk/vedlegg-c.en.html
+fhs.no
+vgs.no
+fylkesbibl.no
+folkebibl.no
+museum.no
+idrett.no
+priv.no
+// Non-Norid generic domains : http://www.norid.no/regelverk/vedlegg-d.en.html
+mil.no
+stat.no
+dep.no
+kommune.no
+herad.no
+// no geographical names : http://www.norid.no/regelverk/vedlegg-b.en.html
+// counties
+aa.no
+ah.no
+bu.no
+fm.no
+hl.no
+hm.no
+jan-mayen.no
+mr.no
+nl.no
+nt.no
+of.no
+ol.no
+oslo.no
+rl.no
+sf.no
+st.no
+svalbard.no
+tm.no
+tr.no
+va.no
+vf.no
+// primary and lower secondary schools per county
+gs.aa.no
+gs.ah.no
+gs.bu.no
+gs.fm.no
+gs.hl.no
+gs.hm.no
+gs.jan-mayen.no
+gs.mr.no
+gs.nl.no
+gs.nt.no
+gs.of.no
+gs.ol.no
+gs.oslo.no
+gs.rl.no
+gs.sf.no
+gs.st.no
+gs.svalbard.no
+gs.tm.no
+gs.tr.no
+gs.va.no
+gs.vf.no
+// cities
+akrehamn.no
+åkrehamn.no
+algard.no
+ålgård.no
+arna.no
+brumunddal.no
+bryne.no
+bronnoysund.no
+brønnøysund.no
+drobak.no
+drøbak.no
+egersund.no
+fetsund.no
+floro.no
+florø.no
+fredrikstad.no
+hokksund.no
+honefoss.no
+hønefoss.no
+jessheim.no
+jorpeland.no
+jørpeland.no
+kirkenes.no
+kopervik.no
+krokstadelva.no
+langevag.no
+langevåg.no
+leirvik.no
+mjondalen.no
+mjøndalen.no
+mo-i-rana.no
+mosjoen.no
+mosjøen.no
+nesoddtangen.no
+orkanger.no
+osoyro.no
+osøyro.no
+raholt.no
+råholt.no
+sandnessjoen.no
+sandnessjøen.no
+skedsmokorset.no
+slattum.no
+spjelkavik.no
+stathelle.no
+stavern.no
+stjordalshalsen.no
+stjørdalshalsen.no
+tananger.no
+tranby.no
+vossevangen.no
+// communities
+afjord.no
+åfjord.no
+agdenes.no
+al.no
+ål.no
+alesund.no
+ålesund.no
+alstahaug.no
+alta.no
+áltá.no
+alaheadju.no
+álaheadju.no
+alvdal.no
+amli.no
+åmli.no
+amot.no
+åmot.no
+andebu.no
+andoy.no
+andøy.no
+andasuolo.no
+ardal.no
+årdal.no
+aremark.no
+arendal.no
+ås.no
+aseral.no
+åseral.no
+asker.no
+askim.no
+askvoll.no
+askoy.no
+askøy.no
+asnes.no
+åsnes.no
+audnedaln.no
+aukra.no
+aure.no
+aurland.no
+aurskog-holand.no
+aurskog-høland.no
+austevoll.no
+austrheim.no
+averoy.no
+averøy.no
+balestrand.no
+ballangen.no
+balat.no
+bálát.no
+balsfjord.no
+bahccavuotna.no
+báhccavuotna.no
+bamble.no
+bardu.no
+beardu.no
+beiarn.no
+bajddar.no
+bájddar.no
+baidar.no
+báidár.no
+berg.no
+bergen.no
+berlevag.no
+berlevåg.no
+bearalvahki.no
+bearalváhki.no
+bindal.no
+birkenes.no
+bjarkoy.no
+bjarkøy.no
+bjerkreim.no
+bjugn.no
+bodo.no
+bodø.no
+badaddja.no
+bådåddjå.no
+budejju.no
+bokn.no
+bremanger.no
+bronnoy.no
+brønnøy.no
+bygland.no
+bykle.no
+barum.no
+bærum.no
+bo.telemark.no
+bø.telemark.no
+bo.nordland.no
+bø.nordland.no
+bievat.no
+bievát.no
+bomlo.no
+bømlo.no
+batsfjord.no
+båtsfjord.no
+bahcavuotna.no
+báhcavuotna.no
+dovre.no
+drammen.no
+drangedal.no
+dyroy.no
+dyrøy.no
+donna.no
+dønna.no
+eid.no
+eidfjord.no
+eidsberg.no
+eidskog.no
+eidsvoll.no
+eigersund.no
+elverum.no
+enebakk.no
+engerdal.no
+etne.no
+etnedal.no
+evenes.no
+evenassi.no
+evenášši.no
+evje-og-hornnes.no
+farsund.no
+fauske.no
+fuossko.no
+fuoisku.no
+fedje.no
+fet.no
+finnoy.no
+finnøy.no
+fitjar.no
+fjaler.no
+fjell.no
+flakstad.no
+flatanger.no
+flekkefjord.no
+flesberg.no
+flora.no
+fla.no
+flå.no
+folldal.no
+forsand.no
+fosnes.no
+frei.no
+frogn.no
+froland.no
+frosta.no
+frana.no
+fræna.no
+froya.no
+frøya.no
+fusa.no
+fyresdal.no
+forde.no
+førde.no
+gamvik.no
+gangaviika.no
+gáŋgaviika.no
+gaular.no
+gausdal.no
+gildeskal.no
+gildeskål.no
+giske.no
+gjemnes.no
+gjerdrum.no
+gjerstad.no
+gjesdal.no
+gjovik.no
+gjøvik.no
+gloppen.no
+gol.no
+gran.no
+grane.no
+granvin.no
+gratangen.no
+grimstad.no
+grong.no
+kraanghke.no
+kråanghke.no
+grue.no
+gulen.no
+hadsel.no
+halden.no
+halsa.no
+hamar.no
+hamaroy.no
+habmer.no
+hábmer.no
+hapmir.no
+hápmir.no
+hammerfest.no
+hammarfeasta.no
+hámmárfeasta.no
+haram.no
+hareid.no
+harstad.no
+hasvik.no
+aknoluokta.no
+ákŋoluokta.no
+hattfjelldal.no
+aarborte.no
+haugesund.no
+hemne.no
+hemnes.no
+hemsedal.no
+heroy.more-og-romsdal.no
+herøy.møre-og-romsdal.no
+heroy.nordland.no
+herøy.nordland.no
+hitra.no
+hjartdal.no
+hjelmeland.no
+hobol.no
+hobøl.no
+hof.no
+hol.no
+hole.no
+holmestrand.no
+holtalen.no
+holtålen.no
+hornindal.no
+horten.no
+hurdal.no
+hurum.no
+hvaler.no
+hyllestad.no
+hagebostad.no
+hægebostad.no
+hoyanger.no
+høyanger.no
+hoylandet.no
+høylandet.no
+ha.no
+hå.no
+ibestad.no
+inderoy.no
+inderøy.no
+iveland.no
+jevnaker.no
+jondal.no
+jolster.no
+jølster.no
+karasjok.no
+karasjohka.no
+kárášjohka.no
+karlsoy.no
+galsa.no
+gálsá.no
+karmoy.no
+karmøy.no
+kautokeino.no
+guovdageaidnu.no
+klepp.no
+klabu.no
+klæbu.no
+kongsberg.no
+kongsvinger.no
+kragero.no
+kragerø.no
+kristiansand.no
+kristiansund.no
+krodsherad.no
+krødsherad.no
+kvalsund.no
+rahkkeravju.no
+ráhkkerávju.no
+kvam.no
+kvinesdal.no
+kvinnherad.no
+kviteseid.no
+kvitsoy.no
+kvitsøy.no
+kvafjord.no
+kvæfjord.no
+giehtavuoatna.no
+kvanangen.no
+kvænangen.no
+navuotna.no
+návuotna.no
+kafjord.no
+kåfjord.no
+gaivuotna.no
+gáivuotna.no
+larvik.no
+lavangen.no
+lavagis.no
+loabat.no
+loabát.no
+lebesby.no
+davvesiida.no
+leikanger.no
+leirfjord.no
+leka.no
+leksvik.no
+lenvik.no
+leangaviika.no
+leaŋgaviika.no
+lesja.no
+levanger.no
+lier.no
+lierne.no
+lillehammer.no
+lillesand.no
+lindesnes.no
+lindas.no
+lindås.no
+lom.no
+loppa.no
+lahppi.no
+láhppi.no
+lund.no
+lunner.no
+luroy.no
+lurøy.no
+luster.no
+lyngdal.no
+lyngen.no
+ivgu.no
+lardal.no
+lerdal.no
+lærdal.no
+lodingen.no
+lødingen.no
+lorenskog.no
+lørenskog.no
+loten.no
+løten.no
+malvik.no
+masoy.no
+måsøy.no
+muosat.no
+muosát.no
+mandal.no
+marker.no
+marnardal.no
+masfjorden.no
+meland.no
+meldal.no
+melhus.no
+meloy.no
+meløy.no
+meraker.no
+meråker.no
+moareke.no
+moåreke.no
+midsund.no
+midtre-gauldal.no
+modalen.no
+modum.no
+molde.no
+moskenes.no
+moss.no
+mosvik.no
+malselv.no
+målselv.no
+malatvuopmi.no
+málatvuopmi.no
+namdalseid.no
+aejrie.no
+namsos.no
+namsskogan.no
+naamesjevuemie.no
+nååmesjevuemie.no
+laakesvuemie.no
+nannestad.no
+narvik.no
+narviika.no
+naustdal.no
+nedre-eiker.no
+nes.akershus.no
+nes.buskerud.no
+nesna.no
+nesodden.no
+nesseby.no
+unjarga.no
+unjárga.no
+nesset.no
+nissedal.no
+nittedal.no
+nord-aurdal.no
+nord-fron.no
+nord-odal.no
+norddal.no
+nordkapp.no
+davvenjarga.no
+davvenjárga.no
+nordre-land.no
+nordreisa.no
+raisa.no
+ráisa.no
+nore-og-uvdal.no
+notodden.no
+naroy.no
+nærøy.no
+notteroy.no
+nøtterøy.no
+odda.no
+oksnes.no
+øksnes.no
+oppdal.no
+oppegard.no
+oppegård.no
+orkdal.no
+orland.no
+ørland.no
+orskog.no
+ørskog.no
+orsta.no
+ørsta.no
+os.hedmark.no
+os.hordaland.no
+osen.no
+osteroy.no
+osterøy.no
+ostre-toten.no
+østre-toten.no
+overhalla.no
+ovre-eiker.no
+øvre-eiker.no
+oyer.no
+øyer.no
+oygarden.no
+øygarden.no
+oystre-slidre.no
+øystre-slidre.no
+porsanger.no
+porsangu.no
+porsáŋgu.no
+porsgrunn.no
+radoy.no
+radøy.no
+rakkestad.no
+rana.no
+ruovat.no
+randaberg.no
+rauma.no
+rendalen.no
+rennebu.no
+rennesoy.no
+rennesøy.no
+rindal.no
+ringebu.no
+ringerike.no
+ringsaker.no
+rissa.no
+risor.no
+risør.no
+roan.no
+rollag.no
+rygge.no
+ralingen.no
+rælingen.no
+rodoy.no
+rødøy.no
+romskog.no
+rømskog.no
+roros.no
+røros.no
+rost.no
+røst.no
+royken.no
+røyken.no
+royrvik.no
+røyrvik.no
+rade.no
+råde.no
+salangen.no
+siellak.no
+saltdal.no
+salat.no
+sálát.no
+sálat.no
+samnanger.no
+sande.more-og-romsdal.no
+sande.møre-og-romsdal.no
+sande.vestfold.no
+sandefjord.no
+sandnes.no
+sandoy.no
+sandøy.no
+sarpsborg.no
+sauda.no
+sauherad.no
+sel.no
+selbu.no
+selje.no
+seljord.no
+sigdal.no
+siljan.no
+sirdal.no
+skaun.no
+skedsmo.no
+ski.no
+skien.no
+skiptvet.no
+skjervoy.no
+skjervøy.no
+skierva.no
+skiervá.no
+skjak.no
+skjåk.no
+skodje.no
+skanland.no
+skånland.no
+skanit.no
+skánit.no
+smola.no
+smøla.no
+snillfjord.no
+snasa.no
+snåsa.no
+snoasa.no
+snaase.no
+snåase.no
+sogndal.no
+sokndal.no
+sola.no
+solund.no
+songdalen.no
+sortland.no
+spydeberg.no
+stange.no
+stavanger.no
+steigen.no
+steinkjer.no
+stjordal.no
+stjørdal.no
+stokke.no
+stor-elvdal.no
+stord.no
+stordal.no
+storfjord.no
+omasvuotna.no
+strand.no
+stranda.no
+stryn.no
+sula.no
+suldal.no
+sund.no
+sunndal.no
+surnadal.no
+sveio.no
+svelvik.no
+sykkylven.no
+sogne.no
+søgne.no
+somna.no
+sømna.no
+sondre-land.no
+søndre-land.no
+sor-aurdal.no
+sør-aurdal.no
+sor-fron.no
+sør-fron.no
+sor-odal.no
+sør-odal.no
+sor-varanger.no
+sør-varanger.no
+matta-varjjat.no
+mátta-várjjat.no
+sorfold.no
+sørfold.no
+sorreisa.no
+sørreisa.no
+sorum.no
+sørum.no
+tana.no
+deatnu.no
+time.no
+tingvoll.no
+tinn.no
+tjeldsund.no
+dielddanuorri.no
+tjome.no
+tjøme.no
+tokke.no
+tolga.no
+torsken.no
+tranoy.no
+tranøy.no
+tromso.no
+tromsø.no
+tromsa.no
+romsa.no
+trondheim.no
+troandin.no
+trysil.no
+trana.no
+træna.no
+trogstad.no
+trøgstad.no
+tvedestrand.no
+tydal.no
+tynset.no
+tysfjord.no
+divtasvuodna.no
+divttasvuotna.no
+tysnes.no
+tysvar.no
+tysvær.no
+tonsberg.no
+tønsberg.no
+ullensaker.no
+ullensvang.no
+ulvik.no
+utsira.no
+vadso.no
+vadsø.no
+cahcesuolo.no
+čáhcesuolo.no
+vaksdal.no
+valle.no
+vang.no
+vanylven.no
+vardo.no
+vardø.no
+varggat.no
+várggát.no
+vefsn.no
+vaapste.no
+vega.no
+vegarshei.no
+vegårshei.no
+vennesla.no
+verdal.no
+verran.no
+vestby.no
+vestnes.no
+vestre-slidre.no
+vestre-toten.no
+vestvagoy.no
+vestvågøy.no
+vevelstad.no
+vik.no
+vikna.no
+vindafjord.no
+volda.no
+voss.no
+varoy.no
+værøy.no
+vagan.no
+vågan.no
+voagat.no
+vagsoy.no
+vågsøy.no
+vaga.no
+vågå.no
+valer.ostfold.no
+våler.østfold.no
+valer.hedmark.no
+våler.hedmark.no
+
+// np : http://www.mos.com.np/register.html
+*.np
+
+// nr : http://cenpac.net.nr/dns/index.html
+// Confirmed by registry <te...@cenpac.net.nr> 2008-06-17
+nr
+biz.nr
+info.nr
+gov.nr
+edu.nr
+org.nr
+net.nr
+com.nr
+
+// nu : http://en.wikipedia.org/wiki/.nu
+nu
+
+// nz : http://en.wikipedia.org/wiki/.nz
+// Confirmed by registry <ja...@nzrs.net.nz> 2014-05-19
+nz
+ac.nz
+co.nz
+cri.nz
+geek.nz
+gen.nz
+govt.nz
+health.nz
+iwi.nz
+kiwi.nz
+maori.nz
+mil.nz
+māori.nz
+net.nz
+org.nz
+parliament.nz
+school.nz
+
+// om : http://en.wikipedia.org/wiki/.om
+om
+co.om
+com.om
+edu.om
+gov.om
+med.om
+museum.om
+net.om
+org.om
+pro.om
+
+// org : http://en.wikipedia.org/wiki/.org
+org
+
+// pa : http://www.nic.pa/
+// Some additional second level "domains" resolve directly as hostnames, such as
+// pannet.pa, so we add a rule for "pa".
+pa
+ac.pa
+gob.pa
+com.pa
+org.pa
+sld.pa
+edu.pa
+net.pa
+ing.pa
+abo.pa
+med.pa
+nom.pa
+
+// pe : https://www.nic.pe/InformeFinalComision.pdf
+pe
+edu.pe
+gob.pe
+nom.pe
+mil.pe
+org.pe
+com.pe
+net.pe
+
+// pf : http://www.gobin.info/domainname/formulaire-pf.pdf
+pf
+com.pf
+org.pf
+edu.pf
+
+// pg : http://en.wikipedia.org/wiki/.pg
+*.pg
+
+// ph : http://www.domains.ph/FAQ2.asp
+// Submitted by registry <je...@email.com.ph> 2008-06-13
+ph
+com.ph
+net.ph
+org.ph
+gov.ph
+edu.ph
+ngo.ph
+mil.ph
+i.ph
+
+// pk : http://pk5.pknic.net.pk/pk5/msgNamepk.PK
+pk
+com.pk
+net.pk
+edu.pk
+org.pk
+fam.pk
+biz.pk
+web.pk
+gov.pk
+gob.pk
+gok.pk
+gon.pk
+gop.pk
+gos.pk
+info.pk
+
+// pl http://www.dns.pl/english/index.html
+// confirmed on 26.09.2014 from Bogna Tchórzewska <pa...@dns.pl>
+pl
+com.pl
+net.pl
+org.pl
+info.pl
+waw.pl
+gov.pl
+// pl functional domains (http://www.dns.pl/english/index.html)
+aid.pl
+agro.pl
+atm.pl
+auto.pl
+biz.pl
+edu.pl
+gmina.pl
+gsm.pl
+mail.pl
+miasta.pl
+media.pl
+mil.pl
+nieruchomosci.pl
+nom.pl
+pc.pl
+powiat.pl
+priv.pl
+realestate.pl
+rel.pl
+sex.pl
+shop.pl
+sklep.pl
+sos.pl
+szkola.pl
+targi.pl
+tm.pl
+tourism.pl
+travel.pl
+turystyka.pl
+// Government domains (administred by ippt.gov.pl)
+uw.gov.pl
+um.gov.pl
+ug.gov.pl
+upow.gov.pl
+starostwo.gov.pl
+so.gov.pl
+sr.gov.pl
+po.gov.pl
+pa.gov.pl
+// pl regional domains (http://www.dns.pl/english/index.html)
+augustow.pl
+babia-gora.pl
+bedzin.pl
+beskidy.pl
+bialowieza.pl
+bialystok.pl
+bielawa.pl
+bieszczady.pl
+boleslawiec.pl
+bydgoszcz.pl
+bytom.pl
+cieszyn.pl
+czeladz.pl
+czest.pl
+dlugoleka.pl
+elblag.pl
+elk.pl
+glogow.pl
+gniezno.pl
+gorlice.pl
+grajewo.pl
+ilawa.pl
+jaworzno.pl
+jelenia-gora.pl
+jgora.pl
+kalisz.pl
+kazimierz-dolny.pl
+karpacz.pl
+kartuzy.pl
+kaszuby.pl
+katowice.pl
+kepno.pl
+ketrzyn.pl
+klodzko.pl
+kobierzyce.pl
+kolobrzeg.pl
+konin.pl
+konskowola.pl
+kutno.pl
+lapy.pl
+lebork.pl
+legnica.pl
+lezajsk.pl
+limanowa.pl
+lomza.pl
+lowicz.pl
+lubin.pl
+lukow.pl
+malbork.pl
+malopolska.pl
+mazowsze.pl
+mazury.pl
+mielec.pl
+mielno.pl
+mragowo.pl
+naklo.pl
+nowaruda.pl
+nysa.pl
+olawa.pl
+olecko.pl
+olkusz.pl
+olsztyn.pl
+opoczno.pl
+opole.pl
+ostroda.pl
+ostroleka.pl
+ostrowiec.pl
+ostrowwlkp.pl
+pila.pl
+pisz.pl
+podhale.pl
+podlasie.pl
+polkowice.pl
+pomorze.pl
+pomorskie.pl
+prochowice.pl
+pruszkow.pl
+przeworsk.pl
+pulawy.pl
+radom.pl
+rawa-maz.pl
+rybnik.pl
+rzeszow.pl
+sanok.pl
+sejny.pl
+slask.pl
+slupsk.pl
+sosnowiec.pl
+stalowa-wola.pl
+skoczow.pl
+starachowice.pl
+stargard.pl
+suwalki.pl
+swidnica.pl
+swiebodzin.pl
+swinoujscie.pl
+szczecin.pl
+szczytno.pl
+tarnobrzeg.pl
+tgory.pl
+turek.pl
+tychy.pl
+ustka.pl
+walbrzych.pl
+warmia.pl
+warszawa.pl
+wegrow.pl
+wielun.pl
+wlocl.pl
+wloclawek.pl
+wodzislaw.pl
+wolomin.pl
+wroclaw.pl
+zachpomor.pl
+zagan.pl
+zarow.pl
+zgora.pl
+zgorzelec.pl
+
+// pm : http://www.afnic.fr/medias/documents/AFNIC-naming-policy2012.pdf
+pm
+
+// pn : http://www.government.pn/PnRegistry/policies.htm
+pn
+gov.pn
+co.pn
+org.pn
+edu.pn
+net.pn
+
+// post : http://en.wikipedia.org/wiki/.post
+post
+
+// pr : http://www.nic.pr/index.asp?f=1
+pr
+com.pr
+net.pr
+org.pr
+gov.pr
+edu.pr
+isla.pr
+pro.pr
+biz.pr
+info.pr
+name.pr
+// these aren't mentioned on nic.pr, but on http://en.wikipedia.org/wiki/.pr
+est.pr
+prof.pr
+ac.pr
+
+// pro : http://www.nic.pro/support_faq.htm
+pro
+aca.pro
+bar.pro
+cpa.pro
+jur.pro
+law.pro
+med.pro
+eng.pro
+
+// ps : http://en.wikipedia.org/wiki/.ps
+// http://www.nic.ps/registration/policy.html#reg
+ps
+edu.ps
+gov.ps
+sec.ps
+plo.ps
+com.ps
+org.ps
+net.ps
+
+// pt : http://online.dns.pt/dns/start_dns
+pt
+net.pt
+gov.pt
+org.pt
+edu.pt
+int.pt
+publ.pt
+com.pt
+nome.pt
+
+// pw : http://en.wikipedia.org/wiki/.pw
+pw
+co.pw
+ne.pw
+or.pw
+ed.pw
+go.pw
+belau.pw
+
+// py : http://www.nic.py/pautas.html#seccion_9
+// Confirmed by registry 2012-10-03
+py
+com.py
+coop.py
+edu.py
+gov.py
+mil.py
+net.py
+org.py
+
+// qa : http://domains.qa/en/
+qa
+com.qa
+edu.qa
+gov.qa
+mil.qa
+name.qa
+net.qa
+org.qa
+sch.qa
+
+// re : http://www.afnic.re/obtenir/chartes/nommage-re/annexe-descriptifs
+re
+com.re
+asso.re
+nom.re
+
+// ro : http://www.rotld.ro/
+ro
+com.ro
+org.ro
+tm.ro
+nt.ro
+nom.ro
+info.ro
+rec.ro
+arts.ro
+firm.ro
+store.ro
+www.ro
+
+// rs : http://en.wikipedia.org/wiki/.rs
+rs
+co.rs
+org.rs
+edu.rs
+ac.rs
+gov.rs
+in.rs
+
+// ru : http://www.cctld.ru/ru/docs/aktiv_8.php
+// Industry domains
+ru
+ac.ru
+com.ru
+edu.ru
+int.ru
+net.ru
+org.ru
+pp.ru
+// Geographical domains
+adygeya.ru
+altai.ru
+amur.ru
+arkhangelsk.ru
+astrakhan.ru
+bashkiria.ru
+belgorod.ru
+bir.ru
+bryansk.ru
+buryatia.ru
+cbg.ru
+chel.ru
+chelyabinsk.ru
+chita.ru
+chukotka.ru
+chuvashia.ru
+dagestan.ru
+dudinka.ru
+e-burg.ru
+grozny.ru
+irkutsk.ru
+ivanovo.ru
+izhevsk.ru
+jar.ru
+joshkar-ola.ru
+kalmykia.ru
+kaluga.ru
+kamchatka.ru
+karelia.ru
+kazan.ru
+kchr.ru
+kemerovo.ru
+khabarovsk.ru
+khakassia.ru
+khv.ru
+kirov.ru
+koenig.ru
+komi.ru
+kostroma.ru
+krasnoyarsk.ru
+kuban.ru
+kurgan.ru
+kursk.ru
+lipetsk.ru
+magadan.ru
+mari.ru
+mari-el.ru
+marine.ru
+mordovia.ru
+// mosreg.ru  Bug 1090800 - removed at request of Aleksey Konstantinov <ko...@mosreg.ru>
+msk.ru
+murmansk.ru
+nalchik.ru
+nnov.ru
+nov.ru
+novosibirsk.ru
+nsk.ru
+omsk.ru
+orenburg.ru
+oryol.ru
+palana.ru
+penza.ru
+perm.ru
+ptz.ru
+rnd.ru
+ryazan.ru
+sakhalin.ru
+samara.ru
+saratov.ru
+simbirsk.ru
+smolensk.ru
+spb.ru
+stavropol.ru
+stv.ru
+surgut.ru
+tambov.ru
+tatarstan.ru
+tom.ru
+tomsk.ru
+tsaritsyn.ru
+tsk.ru
+tula.ru
+tuva.ru
+tver.ru
+tyumen.ru
+udm.ru
+udmurtia.ru
+ulan-ude.ru
+vladikavkaz.ru
+vladimir.ru
+vladivostok.ru
+volgograd.ru
+vologda.ru
+voronezh.ru
+vrn.ru
+vyatka.ru
+yakutia.ru
+yamal.ru
+yaroslavl.ru
+yekaterinburg.ru
+yuzhno-sakhalinsk.ru
+// More geographical domains
+amursk.ru
+baikal.ru
+cmw.ru
+fareast.ru
+jamal.ru
+kms.ru
+k-uralsk.ru
+kustanai.ru
+kuzbass.ru
+magnitka.ru
+mytis.ru
+nakhodka.ru
+nkz.ru
+norilsk.ru
+oskol.ru
+pyatigorsk.ru
+rubtsovsk.ru
+snz.ru
+syzran.ru
+vdonsk.ru
+zgrad.ru
+// State domains
+gov.ru
+mil.ru
+// Technical domains
+test.ru
+
+// rw : http://www.nic.rw/cgi-bin/policy.pl
+rw
+gov.rw
+net.rw
+edu.rw
+ac.rw
+com.rw
+co.rw
+int.rw
+mil.rw
+gouv.rw
+
+// sa : http://www.nic.net.sa/
+sa
+com.sa
+net.sa
+org.sa
+gov.sa
+med.sa
+pub.sa
+edu.sa
+sch.sa
+
+// sb : http://www.sbnic.net.sb/
+// Submitted by registry <le...@telekom.com.sb> 2008-06-08
+sb
+com.sb
+edu.sb
+gov.sb
+net.sb
+org.sb
+
+// sc : http://www.nic.sc/
+sc
+com.sc
+gov.sc
+net.sc
+org.sc
+edu.sc
+
+// sd : http://www.isoc.sd/sudanic.isoc.sd/billing_pricing.htm
+// Submitted by registry <ad...@isoc.sd> 2008-06-17
+sd
+com.sd
+net.sd
+org.sd
+edu.sd
+med.sd
+tv.sd

<TRUNCATED>


[44/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/extra_modules/ambari_cluster_state.py
----------------------------------------------------------------------
diff --git a/metron-deployment/extra_modules/ambari_cluster_state.py b/metron-deployment/extra_modules/ambari_cluster_state.py
new file mode 100644
index 0000000..14c2004
--- /dev/null
+++ b/metron-deployment/extra_modules/ambari_cluster_state.py
@@ -0,0 +1,392 @@
+#!/usr/bin/python
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+DOCUMENTATION = '''
+---
+module: ambari_cluster_state
+version_added: "2.1"
+author: Mark Bittmann (https://github.com/mbittmann)
+short_description: Create, delete, start or stop an ambari cluster
+description:
+    - Create, delete, start or stop an ambari cluster
+options:
+  host:
+    description:
+      The hostname for the ambari web server
+  port:
+    description:
+      The port for the ambari web server
+  username:
+    description:
+      The username for the ambari web server
+  password:
+    description:
+      The name of the cluster in web server
+    required: yes
+  cluster_name:
+    description:
+      The name of the cluster in ambari
+    required: yes
+  cluster_state:
+    description:
+      The desired state for the ambari cluster ['present', 'absent', 'started', 'stopped']. Setting the cluster
+      state to absent will first stop the cluster.
+    required: yes
+  blueprint_var:
+    description:
+      The path to the file defining the cluster blueprint and host mapping. Required when state == 'present'
+    required: no
+  blueprint_name:
+    description:
+      The name of the blueprint. Required when state == 'present'
+    required: no
+  wait_for_complete:
+    description:
+      Whether to wait for the request to complete before returning. Default is False.
+    required: no
+  requirements: [ 'requests']
+'''
+
+EXAMPLES = '''
+# must use full relative path to any files in stored in roles/role_name/files/
+- name: Create a new ambari cluster
+    ambari_cluster_state:
+      host: localhost
+      port: 8080
+      username: admin
+      password: admin
+      cluster_name: my_cluster
+      cluster_state: present
+      blueprint_var: roles/my_role/files/blueprint.yml
+      blueprint_name: hadoop
+      wait_for_complete: True
+- name: Start the ambari cluster
+  ambari_cluster_state:
+    host: localhost
+    port: 8080
+    username: admin
+    password: admin
+    cluster_name: my_cluster
+    cluster_state: started
+    wait_for_complete: True
+- name: Stop the ambari cluster
+  ambari_cluster_state:
+    host: localhost
+    port: 8080
+    username: admin
+    password: admin
+    cluster_name: my_cluster
+    cluster_state: stopped
+    wait_for_complete: True
+- name: Delete the ambari cluster
+  ambari_cluster_state:
+    host: localhost
+    port: 8080
+    username: admin
+    password: admin
+    cluster_name: my_cluster
+    cluster_state: absent
+'''
+
+RETURN = '''
+results:
+    description: The content of the requests object returned from the RESTful call
+    returned: success
+    type: string
+created_blueprint:
+    description: Whether a blueprint was created
+    returned: success
+    type: boolean
+status:
+    description: The status of the blueprint creation process
+    returned: success
+    type: string
+'''
+
+__author__ = 'mbittmann'
+
+import json
+try:
+    import requests
+except ImportError:
+    REQUESTS_FOUND = False
+else:
+    REQUESTS_FOUND = True
+
+
+def main():
+
+    argument_spec = dict(
+        host=dict(type='str', default=None, required=True),
+        port=dict(type='int', default=None, required=True),
+        username=dict(type='str', default=None, required=True),
+        password=dict(type='str', default=None, required=True),
+        cluster_name=dict(type='str', default=None, required=True),
+        cluster_state=dict(type='str', default=None, required=True,
+                           choices=['present', 'absent', 'started', 'stopped']),
+        blueprint_var=dict(type='dict', required=False),
+        blueprint_name=dict(type='str', default=None, required=False),
+        configurations=dict(type='list', default=None, required=False),
+        wait_for_complete=dict(default=False, required=False, choices=BOOLEANS),
+    )
+
+    required_together = ['blueprint_var', 'blueprint_name']
+
+    module = AnsibleModule(
+        argument_spec=argument_spec,
+        required_together=required_together
+    )
+
+    if not REQUESTS_FOUND:
+        module.fail_json(
+            msg='requests library is required for this module')
+
+    p = module.params
+
+    host = p.get('host')
+    port = p.get('port')
+    username = p.get('password')
+    password = p.get('password')
+    cluster_name = p.get('cluster_name')
+    cluster_state = p.get('cluster_state')
+    blueprint_name = p.get('blueprint_name')
+    wait_for_complete = p.get('wait_for_complete')
+
+    ambari_url = 'http://{0}:{1}'.format(host, port)
+
+    try:
+        if cluster_state in ['started', 'stopped']:
+            if not cluster_exists(ambari_url, username, password, cluster_name):
+                module.fail_json(msg="Cluster name {0} does not exist".format(cluster_name))
+            state = ''
+            if cluster_state == 'started':
+                state = 'STARTED'
+            elif cluster_state == 'stopped':
+                state = 'INSTALLED'
+
+            request = set_cluster_state(ambari_url, username, password, cluster_name, state)
+            if wait_for_complete:
+                request_id = json.loads(request.content)['Requests']['id']
+                status = wait_for_request_complete(ambari_url, username, password, cluster_name, request_id, 2)
+                if status != 'COMPLETED':
+                    module.fail_json(msg="Request failed with status {0}".format(status))
+            module.exit_json(changed=True, results=request.content)
+        elif cluster_state == 'absent':
+            if not cluster_exists(ambari_url, username, password, cluster_name):
+                module.exit_json(changed=False, msg='Skipping. Cluster does not exist')
+            if not can_delete_cluster(ambari_url, username, password, cluster_name):
+                request = set_cluster_state(ambari_url, username, password, cluster_name, 'INSTALLED')
+                request_id = json.loads(request.content)['Requests']['id']
+                status = wait_for_request_complete(ambari_url, username, password, cluster_name, request_id, 2)
+                if status != 'COMPLETED':
+                    module.fail_json(msg="Request failed with status {0}".format(status))
+            request = delete_cluster(ambari_url, username, password, cluster_name)
+            module.exit_json(changed=True, results=request.content)
+        elif cluster_state == 'present':
+            if not p.get('blueprint_var') or not blueprint_name:  # have neither name nor file
+                module.fail_json(msg="Must provide blueprint_var and blueprint_name when cluster_state=='present'")
+
+            blueprint_var = p.get('blueprint_var')
+            blueprint, host_map = blueprint_var_to_ambari_converter(blueprint_var)
+            created_blueprint = False
+
+            if not blueprint_exists(ambari_url, username, password, blueprint_name):
+                create_blueprint(ambari_url, username, password, blueprint_name, blueprint)
+                created_blueprint = True
+
+            if cluster_exists(ambari_url, username, password, cluster_name):
+                module.exit_json(changed=False, msg='Cluster {0} already exists'.format(cluster_name),
+                                 created_blueprint=created_blueprint)
+
+            configurations = p.get('configurations')
+            request = create_cluster(ambari_url, username, password, cluster_name, blueprint_name, configurations, host_map)
+            request_id = json.loads(request.content)['Requests']['id']
+            if wait_for_complete:
+                status = wait_for_request_complete(ambari_url, username, password, cluster_name, request_id, 2)
+                if status != 'COMPLETED':
+                    module.fail_json(msg="Request failed with status {0}".format(status))
+            request_status = get_request_status(ambari_url, username, password, cluster_name, request_id)
+            module.exit_json(changed=True, results=request.content,
+                             created_blueprint=created_blueprint, status=request_status)
+
+    except requests.ConnectionError, e:
+        module.fail_json(msg="Could not connect to Ambari client: " + str(e.message))
+    except Exception, e:
+        module.fail_json(msg="Ambari client exception occurred: " + str(e.message))
+
+
+def get_clusters(ambari_url, user, password):
+    r = get(ambari_url, user, password, '/api/v1/clusters')
+    if r.status_code != 200:
+        msg = 'Coud not get cluster list: request code {0}, \
+                    request message {1}'.format(r.status_code, r.content)
+        raise Exception(msg)
+    clusters = json.loads(r.content)
+    return clusters['items']
+
+
+def cluster_exists(ambari_url, user, password, cluster_name):
+    clusters = get_clusters(ambari_url, user, password)
+    return cluster_name in [item['Clusters']['cluster_name'] for item in clusters]
+
+
+def set_cluster_state(ambari_url, user, password, cluster_name, cluster_state):
+    path = '/api/v1/clusters/{0}/services'.format(cluster_name)
+    request = {"RequestInfo": {"context": "Setting cluster state"},
+               "Body": {"ServiceInfo": {"state": "{0}".format(cluster_state)}}}
+    payload = json.dumps(request)
+    r = put(ambari_url, user, password, path, payload)
+    if r.status_code not in [202, 200]:
+        msg = 'Coud not set cluster state: request code {0}, \
+                    request message {1}'.format(r.status_code, r.content)
+        raise Exception(msg)
+    return r
+
+
+def create_cluster(ambari_url, user, password, cluster_name, blueprint_name, configurations, hosts_json):
+    path = '/api/v1/clusters/{0}'.format(cluster_name)
+    data = json.dumps({'blueprint': blueprint_name, 'configurations': configurations, 'host_groups': hosts_json})
+    f = open('cluster.log', 'w')
+    f.write(data)
+    f.close()
+    r = post(ambari_url, user, password, path, data)
+    if r.status_code != 202:
+        msg = 'Coud not create cluster: request code {0}, \
+                    request message {1}'.format(r.status_code, r.content)
+        raise Exception(msg)
+    return r
+
+
+def get_request_status(ambari_url, user, password, cluster_name, request_id):
+    path = '/api/v1/clusters/{0}/requests/{1}'.format(cluster_name, request_id)
+    r = get(ambari_url, user, password, path)
+    if r.status_code != 200:
+        msg = 'Coud not get cluster request status: request code {0}, \
+                    request message {1}'.format(r.status_code, r.content)
+        raise Exception(msg)
+    service = json.loads(r.content)
+    return service['Requests']['request_status']
+
+
+def wait_for_request_complete(ambari_url, user, password, cluster_name, request_id, sleep_time):
+    while True:
+        status = get_request_status(ambari_url, user, password, cluster_name, request_id)
+        if status == 'COMPLETED':
+            return status
+        elif status in ['FAILED', 'TIMEDOUT', 'ABORTED', 'SKIPPED_FAILED']:
+            return status
+        else:
+            time.sleep(sleep_time)
+
+
+def can_delete_cluster(ambari_url, user, password, cluster_name):
+    path = '/api/v1/clusters/{0}/services?ServiceInfo/state=STARTED'.format(cluster_name)
+    r = get(ambari_url, user, password, path)
+    items = json.loads(r.content)['items']
+    return len(items) > 0
+
+
+def get_blueprints(ambari_url, user, password):
+    path = '/api/v1/blueprints'
+    r = get(ambari_url, user, password, path)
+    if r.status_code != 200:
+        msg = 'Coud not get blueprint list: request code {0}, \
+                    request message {1}'.format(r.status_code, r.content)
+        raise Exception(msg)
+
+    services = json.loads(r.content)
+    return services['items']
+
+
+def create_blueprint(ambari_url, user, password, blueprint_name, blueprint_data):
+    data = json.dumps(blueprint_data)
+    f = open('blueprint.log', 'w')
+    f.write(data)
+    f.close()
+    path = "/api/v1/blueprints/" + blueprint_name
+    r = post(ambari_url, user, password, path, data)
+    if r.status_code != 201:
+        msg = 'Coud not create blueprint: request code {0}, \
+                    request message {1}'.format(r.status_code, r.content)
+        raise Exception(msg)
+    return r
+
+
+def blueprint_exists(ambari_url, user, password, blueprint_name):
+    blueprints = get_blueprints(ambari_url, user, password)
+    return blueprint_name in [item['Blueprints']['blueprint_name'] for item in blueprints]
+
+
+def delete_cluster(ambari_url, user, password, cluster_name):
+    path = '/api/v1/clusters/{0}'.format(cluster_name)
+    r = delete(ambari_url, user, password, path)
+    if r.status_code != 200:
+        msg = 'Coud not delete cluster: request code {0}, \
+                    request message {1}'.format(r.status_code, r.content)
+        raise Exception(msg)
+    return r
+
+
+def get(ambari_url, user, password, path):
+    r = requests.get(ambari_url + path, auth=(user, password))
+    return r
+
+
+def put(ambari_url, user, password, path, data):
+    headers = {'X-Requested-By': 'ambari'}
+    r = requests.put(ambari_url + path, data=data, auth=(user, password), headers=headers)
+    return r
+
+
+def post(ambari_url, user, password, path, data):
+    headers = {'X-Requested-By': 'ambari'}
+    r = requests.post(ambari_url + path, data=data, auth=(user, password), headers=headers)
+    return r
+
+
+def delete(ambari_url, user, password, path):
+    headers = {'X-Requested-By': 'ambari'}
+    r = requests.delete(ambari_url + path, auth=(user, password), headers=headers)
+    return r
+
+
+def blueprint_var_to_ambari_converter(blueprint_var):
+    groups = blueprint_var['groups']
+    new_groups = []
+    host_map = []
+    for group in groups:
+        components = []
+        for component in group['components']:
+            components.append({'name': component})
+        group['components'] = components
+        hosts = group.pop('hosts')
+        new_groups.append(group)
+        this_host_map = dict()
+        this_host_map['name'] = group['name']
+        this_host_list = [{'fqdn': host} for host in hosts]
+        this_host_map['hosts'] = this_host_list
+        host_map.append(this_host_map)
+    blueprint = dict()
+    blueprint['host_groups'] = new_groups
+    blueprint['Blueprints'] = {'stack_name': blueprint_var['stack_name'], 'stack_version': blueprint_var['stack_version']}
+
+    return blueprint, host_map
+
+from ansible.module_utils.basic import *
+if __name__ == '__main__':
+    main()

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/inventory/dev-vagrant/group_vars/all
----------------------------------------------------------------------
diff --git a/metron-deployment/inventory/dev-vagrant/group_vars/all b/metron-deployment/inventory/dev-vagrant/group_vars/all
new file mode 100644
index 0000000..b85f327
--- /dev/null
+++ b/metron-deployment/inventory/dev-vagrant/group_vars/all
@@ -0,0 +1,86 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+#Ambari variables
+ambari_host: "{{ groups.ambari_master[0] }}"
+hdp_host_group: "{{ groups.ambari_slave }}"
+ambari_port: 8080
+ambari_user: admin
+ambari_password: admin
+cluster_type: single_node_vm
+
+# hbase
+pcap_hbase_table: pcap
+tracker_hbase_table: access_tracker
+threatintel_hbase_table: threatintel
+enrichment_hbase_table: enrichment
+
+# metron variables
+metron_version: 0.1BETA
+metron_directory: /usr/metron/{{ metron_version }}
+java_home: /usr/jdk64/jdk1.8.0_40
+bro_version: "2.4.1"
+fixbuf_version: "1.7.1"
+yaf_version: "2.8.0"
+daq_version: "2.0.6-1"
+pycapa_repo: "https://github.com/OpenSOC/pycapa.git"
+pycapa_home: "/opt/pycapa"
+snort_version: "2.9.8.0-1"
+snort_alert_csv_path: "/var/log/snort/alert.csv"
+
+#data directories - only required to override defaults
+zookeeper_data_dir: "/data1/hadoop/zookeeper"
+namenode_checkpoint_dir: "/data1/hadoop/hdfs/namesecondary"
+namenode_name_dir: "/data1/hadoop/hdfs/namenode"
+datanode_data_dir: "/data1/hadoop/hdfs/data,/data2/hadoop/hdfs/data"
+journalnode_edits_dir: "/data1/hadoop/hdfs/journalnode"
+nodemanager_local_dirs: "/data1/hadoop/yarn/local"
+timeline_ldb_store_path: "/data1/hadoop/yarn/timeline"
+timeline_ldb_state_path: "/data1/hadoop/yarn/timeline"
+nodemanager_log_dirs: "/data1/hadoop/yarn/log"
+jhs_recovery_store_ldb_path: "/data1/hadoop/mapreduce/jhs"
+storm_local_dir: "/data1/hadoop/storm"
+kafka_log_dirs: "/data1/kafka-log"
+elasticsearch_data_dir: "/data1/elasticsearch,/data2/elasticsearch"
+
+ambari_server_mem: 512
+threat_intel_bulk_load: False
+
+#Sensors
+install_pycapa: False
+install_bro: False
+install_snort: False
+install_yaf: False
+pcap_replay: True
+sniff_interface: eth1
+pcap_replay_interface: "{{ sniff_interface }}"
+storm_topologies:
+    - "{{ metron_directory }}/config/topologies/bro/remote.yaml"
+    - "{{ metron_directory }}/config/topologies/snort/remote.yaml"
+    - "{{ metron_directory }}/config/topologies/yaf/remote.yaml"
+    - "{{ metron_directory }}/config/topologies/enrichment/remote.yaml"
+pcapservice_port: 8081
+
+#Search
+install_elasticsearch: True
+install_solr: False
+solr_collection_name: Metron
+solr_number_shards: 1
+solr_replication_factor: 1
+elasticsearch_transport_port: 9300
+elasticsearch_network_interface: eth1
+elasticsearch_web_port: 9200
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/inventory/dev-vagrant/hosts
----------------------------------------------------------------------
diff --git a/metron-deployment/inventory/dev-vagrant/hosts b/metron-deployment/inventory/dev-vagrant/hosts
new file mode 100644
index 0000000..6fd8b18
--- /dev/null
+++ b/metron-deployment/inventory/dev-vagrant/hosts
@@ -0,0 +1,48 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+[ambari_master]
+node1
+
+[ambari_slave]
+node1
+
+[hadoop_client]
+node1
+
+[enrichment]
+node1
+
+[search]
+node1
+
+[web]
+node1
+
+[sensors]
+node1
+
+[mysql]
+node1
+
+[metron:children]
+enrichment
+search
+web
+sensors
+mysql
+hadoop_client

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/inventory/metron_example/group_vars/all
----------------------------------------------------------------------
diff --git a/metron-deployment/inventory/metron_example/group_vars/all b/metron-deployment/inventory/metron_example/group_vars/all
new file mode 100644
index 0000000..097516d
--- /dev/null
+++ b/metron-deployment/inventory/metron_example/group_vars/all
@@ -0,0 +1,77 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+#Ansible Variables
+ansible_ssh_private_key_file: /Path/to/private/key/file #Change This
+ansible_ssh_user: root
+
+#Ambari variables
+ambari_host: "{{ groups.ambari_master[0] }}"
+ambari_port: 8080
+ambari_user: admin
+ambari_password: admin
+cluster_type: small_cluster
+
+# hbase
+pcap_hbase_table: pcap
+tracker_hbase_table: access_tracker
+threatintel_hbase_table: threatintel
+enrichment_hbase_table: enrichment
+
+# metron variables
+metron_version: 0.1BETA
+java_home: /usr/jdk64/jdk1.8.0_40
+pcapservice_port: 8081
+
+# sensors
+sensor_test_mode: True
+sniff_interface: eth0
+bro_version: "2.4.1"
+fixbuf_version: "1.7.1"
+yaf_version: "2.8.0"
+daq_version: "2.0.6-1"
+iface: "eth0"
+pycapa_repo: "https://github.com/OpenSOC/pycapa.git"
+pycapa_home: "/opt/pycapa"
+snort_version: "2.9.8.0-1"
+snort_alert_csv_path: "/var/log/snort/alert.csv"
+
+#PCAP Replay
+pcap_replay: True
+pcap_replay_interface: eth1
+
+#data directories - only required to override defaults
+#zookeeper_data_dir: "/newdir/hadoop/zookeeper"
+#namenode_checkpoint_dir: "/newdir/hadoop/hdfs/namesecondary"
+#namenode_name_dir: "/newdir/hadoop/hdfs/namenode"
+#datanode_data_dir: "/newdir/hadoop/hdfs/data"
+#journalnode_edits_dir: "/newdir/hadoop/hdfs/journalnode"
+#nodemanager_local_dirs: "/newdir/hadoop/yarn/local"
+#timeline_ldb_store_path: "/newdir/hadoop/yarn/timeline"
+#timeline_ldb_state_path: "/newdir/hadoop/yarn/timeline"
+#nodemanager_log_dirs: "/newdir/hadoop/yarn/log"
+#jhs_recovery_store_ldb_path: "/newdir/hadoop/mapreduce/jhs"
+#storm_local_dir: "/newdir/hadoop/storm"
+#kafka_log_dirs: "/newdir/kafka-log"
+#elasticsearch_data_dir: "/newdir1/elasticsearch"
+
+#Search
+install_elasticsearch: True
+install_solr: False
+elasticsearch_transport_port: 9300
+elasticsearch_network_interface: eth1
+elasticsearch_web_port: 9200

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/inventory/metron_example/hosts
----------------------------------------------------------------------
diff --git a/metron-deployment/inventory/metron_example/hosts b/metron-deployment/inventory/metron_example/hosts
new file mode 100644
index 0000000..0d01327
--- /dev/null
+++ b/metron-deployment/inventory/metron_example/hosts
@@ -0,0 +1,63 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+[ambari_master]
+node1
+
+#minimum of 3 - 6 from 12 node cluser
+[ambari_slave]
+node2
+node3
+node4
+node5
+node6
+node7
+node8
+
+#last ambari_slave
+[hadoop_client]
+node9
+
+#3rd ambari_slave
+[enrichment]
+node1
+
+#1 or more
+[search]
+node10
+node11
+node12
+
+#1 only
+[sensors]
+node1
+
+#same as mysql in 12 node topology
+[web]
+node12
+
+[mysql]
+node12
+
+[metron:children]
+enrichment
+search
+web
+sensors
+mysql
+hadoop_client
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/inventory/multinode-vagrant/group_vars/all
----------------------------------------------------------------------
diff --git a/metron-deployment/inventory/multinode-vagrant/group_vars/all b/metron-deployment/inventory/multinode-vagrant/group_vars/all
new file mode 100644
index 0000000..a4a6af5
--- /dev/null
+++ b/metron-deployment/inventory/multinode-vagrant/group_vars/all
@@ -0,0 +1,75 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+#Ambari variables
+ambari_host: "{{ groups.ambari_master[0] }}"
+hdp_host_group: "{{ groups.ambari_slave }}"
+ambari_port: 8080
+ambari_user: admin
+ambari_password: admin
+cluster_type: multi_vagrant_cluster
+
+# hbase
+pcap_hbase_table: pcap
+tracker_hbase_table: access_tracker
+threatintel_hbase_table: threatintel
+enrichment_hbase_table: enrichment
+
+#elasticsearch
+elasticsearch_transport_port: 9300
+elasticsearch_network_interface: eth1
+elasticsearch_web_port: 9200
+
+# metron variables
+metron_version: 0.1BETA
+java_home: /usr/jdk64/jdk1.8.0_40
+pcapservice_port: 8081
+
+# sensors
+sensor_test_mode: True
+sniff_interface: eth1
+bro_version: "2.4.1"
+fixbuf_version: "1.7.1"
+yaf_version: "2.8.0"
+daq_version: "2.0.6-1"
+iface: "eth0"
+pycapa_repo: "https://github.com/OpenSOC/pycapa.git"
+pycapa_home: "/opt/pycapa"
+snort_version: "2.9.8.0-1"
+snort_alert_csv_path: "/var/log/snort/alert.csv"
+
+#data directories
+#zookeeper_data_dir: "/newdir/hadoop/zookeeper"
+#namenode_checkpoint_dir: "/newdir/hadoop/hdfs/namesecondary"
+#namenode_name_dir: "/newdir/hadoop/hdfs/namenode"
+#datanode_data_dir: "/newdir/hadoop/hdfs/data"
+#journalnode_edits_dir: "/newdir/hadoop/hdfs/journalnode"
+#nodemanager_local_dirs: "/newdir/hadoop/yarn/local"
+#timeline_ldb_store_path: "/newdir/hadoop/yarn/timeline"
+#timeline_ldb_state_path: "/newdir/hadoop/yarn/timeline"
+#nodemanager_log_dirs: "/newdir/hadoop/yarn/log"
+#jhs_recovery_store_ldb_path: "/newdir/hadoop/mapreduce/jhs"
+#storm_local_dir: "/newdir/hadoop/storm"
+#kafka_log_dirs: "/newdir/kafka-log"
+#elasticsearch_data_dir: "/newdir1/elasticsearch"
+
+#Search
+install_elasticsearch: True
+install_solr: False
+elasticsearch_transport_port: 9300
+elasticsearch_network_interface: eth1
+elasticsearch_web_port: 9200

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/inventory/multinode-vagrant/hosts
----------------------------------------------------------------------
diff --git a/metron-deployment/inventory/multinode-vagrant/hosts b/metron-deployment/inventory/multinode-vagrant/hosts
new file mode 100644
index 0000000..d84ab1e
--- /dev/null
+++ b/metron-deployment/inventory/multinode-vagrant/hosts
@@ -0,0 +1,59 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+[ambari_master]
+node1
+
+#minimum of 3 - 6 from 12 node cluser
+[ambari_slave]
+node2
+node3
+node4
+
+#last ambari_slave
+[hadoop_client]
+node4
+
+[enrichment]
+node4
+
+#1 or more
+[search]
+node1
+
+#1 only
+[sensors]
+node1
+
+#same as mysql in 12 node topology
+[web]
+node3
+
+[mysql]
+node3
+
+[metron:children]
+enrichment
+search
+web
+sensors
+mysql
+hadoop_client
+
+
+
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/inventory/singlenode-vagrant/group_vars/all
----------------------------------------------------------------------
diff --git a/metron-deployment/inventory/singlenode-vagrant/group_vars/all b/metron-deployment/inventory/singlenode-vagrant/group_vars/all
new file mode 100644
index 0000000..9b85aeb
--- /dev/null
+++ b/metron-deployment/inventory/singlenode-vagrant/group_vars/all
@@ -0,0 +1,86 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+#Ambari variables
+ambari_host: "{{ groups.ambari_master[0] }}"
+hdp_host_group: "{{ groups.ambari_slave }}"
+ambari_port: 8080
+ambari_user: admin
+ambari_password: admin
+cluster_type: single_node_vm
+
+# hbase
+pcap_hbase_table: pcap
+tracker_hbase_table: access_tracker
+threatintel_hbase_table: threatintel
+enrichment_hbase_table: enrichment
+
+# metron variables
+metron_version: 0.1BETA
+metron_directory: /usr/metron/{{ metron_version }}
+java_home: /usr/jdk64/jdk1.8.0_40
+bro_version: "2.4.1"
+fixbuf_version: "1.7.1"
+yaf_version: "2.8.0"
+daq_version: "2.0.6-1"
+pycapa_repo: "https://github.com/OpenSOC/pycapa.git"
+pycapa_home: "/opt/pycapa"
+snort_version: "2.9.8.0-1"
+snort_alert_csv_path: "/var/log/snort/alert.csv"
+
+#data directories - only required to override defaults
+zookeeper_data_dir: "/data1/hadoop/zookeeper"
+namenode_checkpoint_dir: "/data1/hadoop/hdfs/namesecondary"
+namenode_name_dir: "/data1/hadoop/hdfs/namenode"
+datanode_data_dir: "/data1/hadoop/hdfs/data,/data2/hadoop/hdfs/data"
+journalnode_edits_dir: "/data1/hadoop/hdfs/journalnode"
+nodemanager_local_dirs: "/data1/hadoop/yarn/local"
+timeline_ldb_store_path: "/data1/hadoop/yarn/timeline"
+timeline_ldb_state_path: "/data1/hadoop/yarn/timeline"
+nodemanager_log_dirs: "/data1/hadoop/yarn/log"
+jhs_recovery_store_ldb_path: "/data1/hadoop/mapreduce/jhs"
+storm_local_dir: "/data1/hadoop/storm"
+kafka_log_dirs: "/data1/kafka-log"
+elasticsearch_data_dir: "/data1/elasticsearch,/data2/elasticsearch"
+
+ambari_server_mem: 512
+threat_intel_bulk_load: False
+
+# sensors
+sensor_test_mode: True
+install_pycapa: False
+install_bro: True
+install_snort: True
+install_yaf: True
+pcap_replay: True
+sniff_interface: eth1
+pcap_replay_interface: "{{ sniff_interface }}"
+storm_parser_topologies:
+    - "{{ metron_directory }}/flux/bro/remote.yaml"
+    - "{{ metron_directory }}/flux/snort/remote.yaml"
+    - "{{ metron_directory }}/flux/yaf/remote.yaml"
+pcapservice_port: 8081
+
+#Search
+install_elasticsearch: True
+install_solr: False
+solr_collection_name: Metron
+solr_number_shards: 1
+solr_replication_factor: 1
+elasticsearch_transport_port: 9300
+elasticsearch_network_interface: eth1
+elasticsearch_web_port: 9200
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/inventory/singlenode-vagrant/hosts
----------------------------------------------------------------------
diff --git a/metron-deployment/inventory/singlenode-vagrant/hosts b/metron-deployment/inventory/singlenode-vagrant/hosts
new file mode 100644
index 0000000..6fd8b18
--- /dev/null
+++ b/metron-deployment/inventory/singlenode-vagrant/hosts
@@ -0,0 +1,48 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+[ambari_master]
+node1
+
+[ambari_slave]
+node1
+
+[hadoop_client]
+node1
+
+[enrichment]
+node1
+
+[search]
+node1
+
+[web]
+node1
+
+[sensors]
+node1
+
+[mysql]
+node1
+
+[metron:children]
+enrichment
+search
+web
+sensors
+mysql
+hadoop_client

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/playbooks/ambari_install.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/playbooks/ambari_install.yml b/metron-deployment/playbooks/ambari_install.yml
new file mode 100644
index 0000000..685753c
--- /dev/null
+++ b/metron-deployment/playbooks/ambari_install.yml
@@ -0,0 +1,55 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- hosts: ec2
+  become: true
+  tasks:
+    - include_vars: ../amazon-ec2/conf/defaults.yml
+  tags:
+    - ec2
+
+- hosts: ambari_*
+  become: true
+  roles:
+    - role: ambari_common
+  tags:
+    - ambari-prereqs
+    - hdp-install
+
+- hosts: ambari_master
+  become: true
+  roles:
+    - role:  ambari_master
+  tags:
+    - ambari-server
+    - hdp-install
+
+- hosts: ambari_slave
+  become: true
+  roles:
+    - role: ambari_slave
+  tags:
+    - ambari-agent
+    - hdp-install
+
+- hosts: ambari_master
+  become: true
+  roles:
+    - role: ambari_config
+  tags:
+    - hdp-install
+    - hdp-deploy

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/playbooks/metron_full_install.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/playbooks/metron_full_install.yml b/metron-deployment/playbooks/metron_full_install.yml
new file mode 100644
index 0000000..26ffd62
--- /dev/null
+++ b/metron-deployment/playbooks/metron_full_install.yml
@@ -0,0 +1,23 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- include: ambari_install.yml
+  tags:
+    - ambari
+- include: metron_install.yml
+  tags:
+    - metron

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/playbooks/metron_install.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/playbooks/metron_install.yml b/metron-deployment/playbooks/metron_install.yml
new file mode 100644
index 0000000..f6bc492
--- /dev/null
+++ b/metron-deployment/playbooks/metron_install.yml
@@ -0,0 +1,96 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- hosts: ec2
+  become: true
+  tasks:
+    - include_vars: ../amazon-ec2/conf/defaults.yml
+  tags:
+    - ec2
+
+- hosts: metron
+  become: true
+  roles:
+    - role: metron_common
+  tags:
+    - metron-prereqs
+
+- hosts: hadoop_client
+  become: true
+  roles:
+    - role: hadoop_setup
+  tags:
+    - metron-prereqs
+
+- hosts: search
+  become: true
+  vars:
+    es_hosts: "{% set comma = joiner(',') %}{% for host in groups['search'] -%}{{ comma() }}{{ host }}{%- endfor %}"
+  roles:
+    - { role: elasticsearch, when: install_elasticsearch | default(True) == True }
+  tags:
+    - search
+
+- hosts: search
+  become: true
+  roles:
+    - { role: solr, when: install_solr | default(False) == True  }
+  tags:
+    - search
+
+- hosts: mysql
+  become: true
+  roles:
+    - role: mysql_server
+  tags:
+    - mysql-server
+
+- hosts: ambari_slave
+  become: true
+  roles:
+    - role: mysql_client
+  tags:
+    - mysql-client
+
+- hosts: sensors
+  become: true
+  roles:
+    - { role: tap_interface, when: install_tap | default(False) == True }
+    - { role: pycapa, when: install_pycapa | default(True) == True }
+    - { role: bro, when: install_bro | default(True) == True }
+    - { role: flume,  when: install_snort | default(True) == True }
+    - { role: snort , when: install_snort | default(True) == True }
+    - { role: yaf, when: install_yaf | default(True) == True }
+    - { role: pcap_replay , when: (pcap_replay | default(False)) or (sensor_test_mode | default(False)) == True }
+    - { role: sensor-test-mode, when: sensor_test_mode | default(False) == True }
+  tags:
+      - sensors
+
+- hosts: enrichment
+  become: true
+  roles:
+    - role: metron_streaming
+  tags:
+    - enrichment
+
+- hosts: web
+  become: true
+  roles:
+    - { role: metron_ui, when: install_elasticsearch | default(True) == True }
+    - { role: metron_pcapservice, when: install_elasticsearch | default(True) == True }
+  tags:
+    - web

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_common/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_common/defaults/main.yml b/metron-deployment/roles/ambari_common/defaults/main.yml
new file mode 100644
index 0000000..65c83d9
--- /dev/null
+++ b/metron-deployment/roles/ambari_common/defaults/main.yml
@@ -0,0 +1,19 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+hadoop_logrotate_frequency: daily
+hadoop_logrotate_retention: 30

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_common/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_common/meta/main.yml b/metron-deployment/roles/ambari_common/meta/main.yml
new file mode 100644
index 0000000..8992ac1
--- /dev/null
+++ b/metron-deployment/roles/ambari_common/meta/main.yml
@@ -0,0 +1,22 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - libselinux-python
+  - yum-update
+  - epel
+  - ntp

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_common/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_common/tasks/main.yml b/metron-deployment/roles/ambari_common/tasks/main.yml
new file mode 100644
index 0000000..2526923
--- /dev/null
+++ b/metron-deployment/roles/ambari_common/tasks/main.yml
@@ -0,0 +1,52 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Check OS Version
+  fail: msg="Ambari HDP deployment supports CentOS 6 only."
+  when: (ansible_distribution != "CentOS" or ansible_distribution_major_version != "6")
+
+- include: passwd_less_ssh.yml
+
+- name: Ensure iptables is stopped and is not running at boot time.
+  ignore_errors: yes
+  service: name=iptables state=stopped enabled=no
+
+#
+# ambari uses socket.getfqdn() to find the hostname. with 'localhost.localdomain'
+# in '/etc/hosts' this function will report the hostname as 'localhost.localdomain'
+# rather than 'node1' as would be expected.  other functions like socket.gethostname()
+# will always return 'node1' as expected.  ambari needs to see 'node1' to be able to
+# communicate between the master and agents.
+
+- name: Remove ipv4 'localhost.localdomain' from /etc/hosts
+  lineinfile: dest=/etc/hosts state=absent regexp="^127.0.0.1(.*)localdomain(.*)$"
+
+- name: Remove ipv6 'localhost.localdomain' from /etc/hosts
+  lineinfile: dest=/etc/hosts state=absent regexp="^::1(.*)localdomain(.*)$"
+
+- name: Add localhost to /etc/hosts
+  lineinfile: dest=/etc/hosts line="127.0.0.1   localhost"
+
+- name: Download Ambari repo
+  get_url: url="{{ rhel_ambari_install_url }}" dest=/etc/yum.repos.d/ambari.repo
+
+- name: Create Logrotate Script for Hadoop Services
+  template:
+    src: "metron-hadoop-logrotate.yml"
+    dest: "/etc/logrotate.d/metron-ambari"
+    mode: 0644
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_common/tasks/passwd_less_ssh.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_common/tasks/passwd_less_ssh.yml b/metron-deployment/roles/ambari_common/tasks/passwd_less_ssh.yml
new file mode 100644
index 0000000..0928e34
--- /dev/null
+++ b/metron-deployment/roles/ambari_common/tasks/passwd_less_ssh.yml
@@ -0,0 +1,32 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Generate ssh key pair for "{{ ambari_user }}"
+  user: name={{ ambari_user }} generate_ssh_key=yes
+
+- name: Fetch the generated public key
+  fetch: src=~{{ ambari_user }}/.ssh/id_rsa.pub dest=/tmp/keys/{{ inventory_hostname }}.pub flat=yes
+
+- name: Add key pairs to existing authorized_keys
+  authorized_key: user={{ ambari_user }} key="{{ lookup('file', '/tmp/keys/{{ item }}.pub') }}"
+  with_items:
+    - "{{ play_hosts }}"
+
+- name: Remove local copy of ssh keys
+  local_action: file path=/tmp/keys/{{ inventory_hostname }}.pub state=absent
+  become: False
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_common/templates/metron-hadoop-logrotate.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_common/templates/metron-hadoop-logrotate.yml b/metron-deployment/roles/ambari_common/templates/metron-hadoop-logrotate.yml
new file mode 100644
index 0000000..042b490
--- /dev/null
+++ b/metron-deployment/roles/ambari_common/templates/metron-hadoop-logrotate.yml
@@ -0,0 +1,135 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+#Hadoop HDFS Logs
+/var/log/hadoop/hdfs/*.log* {
+  {{ hadoop_logrotate_frequency }}
+  rotate {{ hadoop_logrotate_retention }}
+  missingok
+  notifempty
+  copytruncate
+}
+
+/var/log/hadoop/hdfs/*.out {
+  {{ hadoop_logrotate_frequency }}
+  rotate {{ hadoop_logrotate_retention }}
+  missingok
+  notifempty
+  copytruncate
+}
+
+/var/log/hadoop/hdfs/*.audit {
+  {{ hadoop_logrotate_frequency }}
+  rotate {{ hadoop_logrotate_retention }}
+  missingok
+  notifempty
+  copytruncate
+}
+
+#Hadoop Yarn Logs
+/var/log/hadoop/yarn/*.log {
+  {{ hadoop_logrotate_frequency }}
+  rotate {{ hadoop_logrotate_retention }}
+  missingok
+  notifempty
+  copytruncate
+}
+
+#Hadoop Mapreduce Logs
+/var/log/hadoop/mapreduce/*.log {
+  {{ hadoop_logrotate_frequency }}
+  rotate {{ hadoop_logrotate_retention }}
+  missingok
+  notifempty
+  copytruncate
+}
+
+#Storm Logs
+/var/log/storm/*.log {
+  {{ hadoop_logrotate_frequency }}
+  rotate {{ hadoop_logrotate_retention }}
+  missingok
+  notifempty
+  copytruncate
+}
+
+/var/log/storm/*.out {
+  {{ hadoop_logrotate_frequency }}
+  rotate {{ hadoop_logrotate_retention }}
+  missingok
+  notifempty
+  copytruncate
+}
+
+#Kafka Logs
+/var/log/kafka/*.log {
+  {{ hadoop_logrotate_frequency }}
+  rotate {{ hadoop_logrotate_retention }}
+  missingok
+  notifempty
+  copytruncate
+}
+
+/var/log/kafka/*.err {
+  {{ hadoop_logrotate_frequency }}
+  rotate {{ hadoop_logrotate_retention }}
+  missingok
+  notifempty
+  copytruncate
+}
+
+#HBase Logs
+/var/log/hbase/*.log* {
+  {{ hadoop_logrotate_frequency }}
+  rotate {{ hadoop_logrotate_retention }}
+  missingok
+  notifempty
+  copytruncate
+}
+
+/var/log/hbase/*.out {
+  {{ hadoop_logrotate_frequency }}
+  rotate {{ hadoop_logrotate_retention }}
+  missingok
+  notifempty
+  copytruncate
+}
+
+/var/log/hbase/*.audit {
+  {{ hadoop_logrotate_frequency }}
+  rotate {{ hadoop_logrotate_retention }}
+  missingok
+  notifempty
+  copytruncate
+}
+
+#Zookeeper Logs
+/var/log/zookeeper/*.log {
+  {{ hadoop_logrotate_frequency }}
+  rotate {{ hadoop_logrotate_retention }}
+  missingok
+  notifempty
+  copytruncate
+}
+
+/var/log/zookeeper/*.out {
+  {{ hadoop_logrotate_frequency }}
+  rotate {{ hadoop_logrotate_retention }}
+  missingok
+  notifempty
+  copytruncate
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_common/vars/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_common/vars/main.yml b/metron-deployment/roles/ambari_common/vars/main.yml
new file mode 100644
index 0000000..699dcf7
--- /dev/null
+++ b/metron-deployment/roles/ambari_common/vars/main.yml
@@ -0,0 +1,21 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+rhel_ambari_install_url: "http://public-repo-1.hortonworks.com/ambari/centos6/2.x/updates/2.1.2.1/ambari.repo"
+ambari_user: "root"
+local_tmp_keygen_file: "/tmp/id_rsa.tmp"
+dest_tmp_keygen_file: "/tmp/id_rsa.tmp"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_config/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_config/defaults/main.yml b/metron-deployment/roles/ambari_config/defaults/main.yml
new file mode 100644
index 0000000..507b6e3
--- /dev/null
+++ b/metron-deployment/roles/ambari_config/defaults/main.yml
@@ -0,0 +1,30 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+zookeeper_data_dir: /hadoop/zookeeper
+namenode_checkpoint_dir: /hadoop/hdfs/namesecondary
+namenode_name_dir: /hadoop/hdfs/namenode
+datanode_data_dir: /hadoop/hdfs/data
+journalnode_edits_dir: /hadoop/hdfs/journalnode
+jhs_recovery_store_ldb_path: /hadoop/mapreduce/jhs
+nodemanager_local_dirs: /hadoop/yarn/local
+timeline_ldb_store_path: /hadoop/yarn/timeline
+timeline_ldb_state_path: /hadoop/yarn/timeline
+nodemanager_log_dirs: /hadoop/yarn/log
+storm_local_dir: /hadoop/storm
+kafka_log_dirs: /kafka-log
+cluster_type: small_cluster

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_config/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_config/meta/main.yml b/metron-deployment/roles/ambari_config/meta/main.yml
new file mode 100644
index 0000000..61197e3
--- /dev/null
+++ b/metron-deployment/roles/ambari_config/meta/main.yml
@@ -0,0 +1,21 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - epel
+  - python-pip
+  - httplib2

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_config/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_config/tasks/main.yml b/metron-deployment/roles/ambari_config/tasks/main.yml
new file mode 100644
index 0000000..f44f929
--- /dev/null
+++ b/metron-deployment/roles/ambari_config/tasks/main.yml
@@ -0,0 +1,42 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- include_vars: "{{ cluster_type }}.yml"
+
+- name: Install python-requests
+  yum:
+    name: python-requests
+    state: installed
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10
+
+- name: Deploy cluster with Ambari; http://{{ groups.ambari_master[0] }}:{{ ambari_port }}
+  ambari_cluster_state:
+    host: "{{ groups.ambari_master[0] }}"
+    port: "{{ ambari_port }}"
+    username: "{{ ambari_user }}"
+    password: "{{ ambari_password }}"
+    cluster_name: "{{ cluster_name }}"
+    cluster_state: present
+    blueprint_name: "{{ blueprint_name }}"
+    configurations: "{{ configurations }}"
+    wait_for_complete: True
+    blueprint_var: "{{ blueprint }}"
+
+- include: start_services.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_config/tasks/start_services.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_config/tasks/start_services.yml b/metron-deployment/roles/ambari_config/tasks/start_services.yml
new file mode 100644
index 0000000..7c6e0a9
--- /dev/null
+++ b/metron-deployment/roles/ambari_config/tasks/start_services.yml
@@ -0,0 +1,48 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Start All Hadoop Services {{ inventory_hostname }}
+  uri:
+    url: http://{{ inventory_hostname}}:{{ ambari_port}}/api/v1/clusters/{{ cluster_name }}/services/{{ item }}
+    HEADER_X-Requested-By: "{{ ambari_user }}"
+    method: PUT
+    body: "{ \"RequestInfo\": { \"context\": \"Start service via REST\" }, \"Body\": { \"ServiceInfo\": { \"state\": \"STARTED\" }}}"
+    body_format: json
+    status_code: 200,202
+    force_basic_auth: yes
+    user: "{{ ambari_user }}"
+    password: "{{ ambari_password }}"
+  with_items:
+    - "{{ metron_services }}"
+
+- name: Wait for Service Start
+  uri:
+    url: http://{{ inventory_hostname}}:{{ ambari_port}}/api/v1/clusters/{{ cluster_name }}/services/{{ item }}
+    HEADER_X-Requested-By: "{{ ambari_user }}"
+    method: GET
+    status_code: 200
+    force_basic_auth: yes
+    user: "{{ ambari_user }}"
+    password: "{{ ambari_password }}"
+    return_content: yes
+  with_items:
+    - "{{ metron_services }}"
+  register: result
+  until: result.content.find("STARTED") != -1
+  retries: 10
+  delay: 60
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_config/vars/multi_vagrant_cluster.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_config/vars/multi_vagrant_cluster.yml b/metron-deployment/roles/ambari_config/vars/multi_vagrant_cluster.yml
new file mode 100644
index 0000000..526661b
--- /dev/null
+++ b/metron-deployment/roles/ambari_config/vars/multi_vagrant_cluster.yml
@@ -0,0 +1,99 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+
+hadoop_master: [NAMENODE, SECONDARY_NAMENODE, RESOURCEMANAGER, HISTORYSERVER]
+hadoop_slave: [APP_TIMELINE_SERVER, DATANODE, NODEMANAGER]
+spark_master: [SPARK_JOBHISTORYSERVER]
+storm_master: [NIMBUS, STORM_UI_SERVER, DRPC_SERVER]
+storm_slave: [SUPERVISOR]
+kafka_broker: [KAFKA_BROKER]
+zookeeper_master: [ZOOKEEPER_SERVER]
+hbase_master: [HBASE_MASTER]
+hbase_slave: [HBASE_REGIONSERVER]
+hadoop_clients: [HDFS_CLIENT, YARN_CLIENT, MAPREDUCE2_CLIENT, SPARK_CLIENT, ZOOKEEPER_CLIENT, HBASE_CLIENT]
+
+metron_services: ["HDFS","YARN","MAPREDUCE2","ZOOKEEPER", "HBASE", "STORM", "KAFKA"]
+
+master_1_components: "{{ hadoop_master | union(hadoop_clients) }}"
+master_1_host:
+  - "{{groups.ambari_slave[0]}}"
+master_2_components: "{{ zookeeper_master | union(storm_master) | union(spark_master) | union(hbase_master) | union(hadoop_clients) }}"
+master_2_host:
+  - "{{groups.ambari_slave[1]}}"
+slave_components: "{{ hadoop_slave | union(storm_slave) | union(kafka_broker) | union(hbase_slave) | union(hadoop_clients) }}"
+
+cluster_name: "metron"
+blueprint_name: "metron_blueprint"
+
+configurations:
+  - zoo.cfg:
+      dataDir: '{{ zookeeper_data_dir | default("/hadoop/zookeeper") }}'
+  - hdfs-site:
+      dfs.namenode.checkpoint.dir: '{{ namenode_checkpoint_dir | default("/hadoop/hdfs/namesecondary") }}'
+      dfs.namenode.name.dir: '{{ namenode_name_dir | default("/hadoop/hdfs/namenode") }}'
+      dfs.datanode.data.dir: '{{ datanode_data_dir | default("/hadoop/hdfs/data" ) }}'
+      dfs.journalnode.edits.dir: '{{ journalnode_edits_dir | default("/hadoop/hdfs/journalnode") }}'
+  - hadoop-env:
+      namenode_heapsize: 1024
+      dtnode_heapsize: 1024
+  - hbase-env:
+      hbase_regionserver_heapsize: 1024
+      hbase_master_heapsize: 1024
+  - yarn-env:
+      nodemanager_heapsize: 512
+      yarn_heapsize: 512
+      apptimelineserver_heapsize : 512
+  - mapred-env:
+      jobhistory_heapsize: 256
+  - yarn-site:
+      yarn.nodemanager.resource.memory-mb: 1024
+      yarn.scheduler.maximum-allocation-mb: 1024
+      yarn.nodemanager.local-dirs : '{{ nodemanager_local_dirs| default("/hadoop/yarn/local") }}'
+      yarn.timeline-service.leveldb-timeline-store.path: '{{ timeline_ldb_store_path | default("/hadoop/yarn/timeline") }}'
+      yarn.timeline-service.leveldb-state-store.path: '{{ timeline_ldb_state_path| default("/hadoop/yarn/timeline") }}'
+      yarn.nodemanager.log-dirs: '{{ nodemanager_log_dirs| default("/hadoop/yarn/log") }}'
+
+  - mapred-site:
+      mapreduce.jobhistory.recovery.store.leveldb.path : '{{ jhs_recovery_store_ldb_path | default("/hadoop/mapreduce/jhs") }}'
+  - storm-site:
+      supervisor.slots.ports: "[6700, 6701, 6702, 6703]"
+      storm.local.dir: '{{ storm_local_dir | default("/hadoop/storm") }}'
+  - kafka-env:
+      content: "{% raw %}\n#!/bin/bash\n\n# Set KAFKA specific environment variables here.\n\n# The java implementation to use.\nexport KAFKA_HEAP_OPTS=\"-Xms256M -Xmx256M\"\nexport KAFKA_JVM_PERFORMANCE_OPTS=\"-server -XX:+UseG1GC -XX:+DisableExplicitGC -Djava.awt.headless=true\"\nexport JAVA_HOME={{java64_home}}\nexport PATH=$PATH:$JAVA_HOME/bin\nexport PID_DIR={{kafka_pid_dir}}\nexport LOG_DIR={{kafka_log_dir}}\nexport KAFKA_KERBEROS_PARAMS={{kafka_kerberos_params}}\n# Add kafka sink to classpath and related depenencies\nif [ -e \"/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar\" ]; then\n  export CLASSPATH=$CLASSPATH:/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar\n  export CLASSPATH=$CLASSPATH:/usr/lib/ambari-metrics-kafka-sink/lib/*\nfi\nif [ -f /etc/kafka/conf/kafka-ranger-env.sh ]; then\n   . /etc/kafka/conf/kafka-ranger-env.sh\nfi{% endraw %}"
+  - kafka-broker:
+      log.dirs: '{{ kafka_log_dirs | default("/kafka-log") }}'
+
+blueprint:
+  stack_name: HDP
+  stack_version: 2.3
+  groups:
+    - name : master_1
+      cardinality: 1
+      configuration: []  # configuration not yet implemented
+      components: "{{ master_1_components }}"
+      hosts: "{{ master_1_host }}"
+    - name : master_2
+      cardinality: 1
+      configuration: []  # configuration not yet implemented
+      components: "{{ master_2_components }}"
+      hosts: "{{ master_2_host }}"
+    - name: slaves
+      cardinality: 1+
+      configuration: []  # configuration not yet implemented
+      components: "{{ slave_components }}"
+      hosts: "{{ groups.ambari_slave | difference(groups.ambari_slave[0]) | difference(groups.ambari_slave[1]) }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_config/vars/single_node_vm.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_config/vars/single_node_vm.yml b/metron-deployment/roles/ambari_config/vars/single_node_vm.yml
new file mode 100644
index 0000000..cb6fe4a
--- /dev/null
+++ b/metron-deployment/roles/ambari_config/vars/single_node_vm.yml
@@ -0,0 +1,85 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+# vars file for single_node_vm blueprint
+
+hadoop_master: [NAMENODE, SECONDARY_NAMENODE, RESOURCEMANAGER, HISTORYSERVER]
+hadoop_slave: [APP_TIMELINE_SERVER, DATANODE, HDFS_CLIENT, NODEMANAGER, YARN_CLIENT, MAPREDUCE2_CLIENT]
+spark_master: [SPARK_JOBHISTORYSERVER]
+spark_slave: [SPARK_CLIENT]
+storm_master: [NIMBUS, STORM_UI_SERVER, DRPC_SERVER]
+storm_slave: [SUPERVISOR]
+kafka_broker: [KAFKA_BROKER]
+zookeeper_master: [ZOOKEEPER_SERVER]
+zookeeper_slave: [ZOOKEEPER_CLIENT]
+hbase_master: [HBASE_MASTER, HBASE_CLIENT]
+hbase_slave: [HBASE_REGIONSERVER]
+
+metron_services: ["HDFS","YARN","MAPREDUCE2","ZOOKEEPER", "HBASE", "STORM", "KAFKA"]
+metron_components: "{{ hadoop_master | union(zookeeper_master) | union(storm_master) | union(hbase_master) | union(hadoop_slave) | union(zookeeper_slave) | union(storm_slave) | union(kafka_broker) | union(hbase_slave) }}"
+
+cluster_name: "metron_cluster"
+blueprint_name: "metron_blueprint"
+
+configurations:
+  - zoo.cfg:
+      dataDir: '{{ zookeeper_data_dir }}'
+  - hadoop-env:
+      hadoop_heapsize: 1024
+      namenode_heapsize: 512
+      dtnode_heapsize: 512
+      namenode_opt_permsize: 128m
+  - hbase-env:
+      hbase_regionserver_heapsize: 512
+      hbase_master_heapsize: 512
+      hbase_regionserver_xmn_max: 512
+  - hdfs-site:
+      dfs.namenode.checkpoint.dir: '{{ namenode_checkpoint_dir  }}'
+      dfs.namenode.name.dir: '{{ namenode_name_dir }}'
+      dfs.datanode.data.dir: '{{ datanode_data_dir }}'
+      dfs.journalnode.edits.dir: '{{ journalnode_edits_dir }}'
+  - yarn-env:
+      nodemanager_heapsize: 512
+      yarn_heapsize: 512
+      apptimelineserver_heapsize : 512
+      resourcemanager_heapsize: 1024
+  - mapred-env:
+      jobhistory_heapsize: 256
+  - mapred-site:
+      mapreduce.jobhistory.recovery.store.leveldb.path : '{{ jhs_recovery_store_ldb_path }}'
+  - yarn-site:
+      yarn.nodemanager.local-dirs : '{{ nodemanager_local_dirs }}'
+      yarn.timeline-service.leveldb-timeline-store.path: '{{ timeline_ldb_store_path }}'
+      yarn.timeline-service.leveldb-state-store.path: '{{ timeline_ldb_state_path }}'
+      yarn.nodemanager.log-dirs: '{{ nodemanager_log_dirs }}'
+  - storm-site:
+      supervisor.slots.ports: "[6700, 6701, 6702, 6703]"
+      storm.local.dir: '{{ storm_local_dir }}'
+  - kafka-env:
+      content: "{% raw %}\n#!/bin/bash\n\n# Set KAFKA specific environment variables here.\n\n# The java implementation to use.\nexport KAFKA_HEAP_OPTS=\"-Xms256M -Xmx256M\"\nexport KAFKA_JVM_PERFORMANCE_OPTS=\"-server -XX:+UseG1GC -XX:+DisableExplicitGC -Djava.awt.headless=true\"\nexport JAVA_HOME={{java64_home}}\nexport PATH=$PATH:$JAVA_HOME/bin\nexport PID_DIR={{kafka_pid_dir}}\nexport LOG_DIR={{kafka_log_dir}}\nexport KAFKA_KERBEROS_PARAMS={{kafka_kerberos_params}}\n# Add kafka sink to classpath and related depenencies\nif [ -e \"/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar\" ]; then\n  export CLASSPATH=$CLASSPATH:/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar\n  export CLASSPATH=$CLASSPATH:/usr/lib/ambari-metrics-kafka-sink/lib/*\nfi\nif [ -f /etc/kafka/conf/kafka-ranger-env.sh ]; then\n   . /etc/kafka/conf/kafka-ranger-env.sh\nfi{% endraw %}"
+  - kafka-broker:
+      log.dirs: '{{ kafka_log_dirs }}'
+
+blueprint:
+  stack_name: HDP
+  stack_version: 2.3
+  groups:
+    - name : host_group_1
+      cardinality: 1
+      configurations: []
+      components: "{{ metron_components }}"
+      hosts: "{{ hdp_host_group }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_config/vars/small_cluster.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_config/vars/small_cluster.yml b/metron-deployment/roles/ambari_config/vars/small_cluster.yml
new file mode 100644
index 0000000..a3792e1
--- /dev/null
+++ b/metron-deployment/roles/ambari_config/vars/small_cluster.yml
@@ -0,0 +1,88 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+
+hadoop_master: [NAMENODE, SECONDARY_NAMENODE, RESOURCEMANAGER, HISTORYSERVER]
+hadoop_slave: [APP_TIMELINE_SERVER, DATANODE, NODEMANAGER]
+spark_master: [SPARK_JOBHISTORYSERVER]
+storm_master: [NIMBUS, STORM_UI_SERVER, DRPC_SERVER]
+storm_slave: [SUPERVISOR]
+kafka_broker: [KAFKA_BROKER]
+zookeeper_master: [ZOOKEEPER_SERVER]
+hbase_master: [HBASE_MASTER]
+hbase_slave: [HBASE_REGIONSERVER]
+hadoop_clients: [HDFS_CLIENT, YARN_CLIENT, MAPREDUCE2_CLIENT, SPARK_CLIENT, ZOOKEEPER_CLIENT, HBASE_CLIENT]
+
+metron_services: ["HDFS","YARN","MAPREDUCE2","ZOOKEEPER", "HBASE", "STORM", "KAFKA"]
+
+master_1_components: "{{ hadoop_master | union(hadoop_clients) }}"
+master_1_host:
+  - "{{groups.ambari_slave[0]}}"
+master_2_components: "{{ zookeeper_master | union(storm_master) | union(spark_master) | union(hbase_master) | union(hadoop_clients) }}"
+master_2_host:
+  - "{{groups.ambari_slave[1]}}"
+slave_components: "{{ hadoop_slave | union(storm_slave) | union(kafka_broker) | union(hbase_slave) | union(hadoop_clients) }}"
+
+cluster_name: "metron"
+blueprint_name: "metron_blueprint"
+
+configurations:
+  - zoo.cfg:
+      dataDir: '{{ zookeeper_data_dir | default("/hadoop/zookeeper") }}'
+  - hadoop-env:
+      namenode_heapsize: 1024
+      dtnode_heapsize: 1024
+  - hbase-env:
+      hbase_regionserver_heapsize: 1024
+      hbase_master_heapsize: 1024
+  - hdfs-site:
+      dfs.namenode.checkpoint.dir: '{{ namenode_checkpoint_dir | default("/hadoop/hdfs/namesecondary") }}'
+      dfs.namenode.name.dir: '{{ namenode_name_dir | default("/hadoop/hdfs/namenode") }}'
+      dfs.datanode.data.dir: '{{ datanode_data_dir | default("/hadoop/hdfs/data" ) }}'
+      dfs.journalnode.edits.dir: '{{ journalnode_edits_dir | default("/hadoop/hdfs/journalnode") }}'
+  - mapred-site:
+      mapreduce.jobhistory.recovery.store.leveldb.path : '{{ jhs_recovery_store_ldb_path | default("/hadoop/mapreduce/jhs") }}'
+  - yarn-site:
+      yarn.nodemanager.local-dirs : '{{ nodemanager_local_dirs| default("/hadoop/yarn/local") }}'
+      yarn.timeline-service.leveldb-timeline-store.path: '{{ timeline_ldb_store_path | default("/hadoop/yarn/timeline") }}'
+      yarn.timeline-service.leveldb-state-store.path: '{{ timeline_ldb_state_path| default("/hadoop/yarn/timeline") }}'
+      yarn.nodemanager.log-dirs: '{{ nodemanager_log_dirs| default("/hadoop/yarn/log") }}'
+  - storm-site:
+      supervisor.slots.ports: "[6700, 6701, 6702, 6703]"
+      storm.local.dir: '{{ storm_local_dir | default("/hadoop/storm") }}'
+  - kafka-broker:
+      log.dirs: '{{ kafka_log_dirs | default("/kafka-log") }}'
+
+blueprint:
+  stack_name: HDP
+  stack_version: 2.3
+  groups:
+    - name : master_1
+      cardinality: 1
+      configuration: []  # configuration not yet implemented
+      components: "{{ master_1_components }}"
+      hosts: "{{ master_1_host }}"
+    - name : master_2
+      cardinality: 1
+      configuration: []  # configuration not yet implemented
+      components: "{{ master_2_components }}"
+      hosts: "{{ master_2_host }}"
+    - name: slaves
+      cardinality: 1+
+      configuration: []  # configuration not yet implemented
+      components: "{{ slave_components }}"
+      hosts: "{{ groups.ambari_slave | difference(groups.ambari_slave[0]) | difference(groups.ambari_slave[1]) }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_gather_facts/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_gather_facts/meta/main.yml b/metron-deployment/roles/ambari_gather_facts/meta/main.yml
new file mode 100644
index 0000000..61197e3
--- /dev/null
+++ b/metron-deployment/roles/ambari_gather_facts/meta/main.yml
@@ -0,0 +1,21 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - epel
+  - python-pip
+  - httplib2

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_gather_facts/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_gather_facts/tasks/main.yml b/metron-deployment/roles/ambari_gather_facts/tasks/main.yml
new file mode 100644
index 0000000..db4927d
--- /dev/null
+++ b/metron-deployment/roles/ambari_gather_facts/tasks/main.yml
@@ -0,0 +1,151 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Ambari rest get cluster name
+  uri:
+    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters"
+    user: "{{ ambari_user }}"
+    password: "{{ ambari_password }}"
+    force_basic_auth: yes
+    return_content: yes
+  register: cluster_name_response
+
+- set_fact:
+    cluster_name: "{{ (cluster_name_response.content | from_json)['items'][0].Clusters.cluster_name }}"
+
+- name: Ambari rest get namenode hosts
+  uri:
+    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/services/HDFS/components/NAMENODE"
+    user: "{{ ambari_user }}"
+    password: "{{ ambari_password }}"
+    force_basic_auth: yes
+    return_content: yes
+  register: namenode_hosts_response
+
+- set_fact:
+    namenode_host: "{{ (namenode_hosts_response.content | from_json).host_components[0].HostRoles.host_name }}"
+
+- name: Ambari rest get namenode core-site tag
+  uri:
+    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/hosts/{{ namenode_host }}/host_components/NAMENODE"
+    user: "{{ ambari_user }}"
+    password: "{{ ambari_password }}"
+    force_basic_auth: yes
+    return_content: yes
+  register: core_site_tag_response
+
+- set_fact:
+    core_site_tag: "{{ (core_site_tag_response.content | from_json).HostRoles.actual_configs['core-site'].default }}"
+
+- name: Ambari rest get namenode core-site properties
+  uri:
+    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/configurations?type=core-site&tag={{ core_site_tag }}"
+    user: "{{ ambari_user }}"
+    password: "{{ ambari_password }}"
+    force_basic_auth: yes
+    return_content: yes
+  register: core_site_response
+
+- set_fact:
+    hdfs_url: "{{ (core_site_response.content | from_json)['items'][0].properties['fs.defaultFS'] }}"
+
+- name: Ambari rest get kafka broker hosts
+  uri:
+    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/services/KAFKA/components/KAFKA_BROKER"
+    user: "{{ ambari_user }}"
+    password: "{{ ambari_password }}"
+    force_basic_auth: yes
+    return_content: yes
+  register: kafka_broker_hosts_response
+
+- set_fact:
+    kafka_broker_hosts: "{{ (kafka_broker_hosts_response.content | from_json).host_components | map(attribute='HostRoles.host_name') | list }}"
+
+- name: Ambari rest get kafka kafka-broker tag
+  uri:
+    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/hosts/{{ kafka_broker_hosts[0] }}/host_components/KAFKA_BROKER"
+    user: "{{ ambari_user }}"
+    password: "{{ ambari_password }}"
+    force_basic_auth: yes
+    return_content: yes
+  register: kafka_broker_tag_response
+
+- set_fact:
+    kafka_broker_tag: "{{ (kafka_broker_tag_response.content | from_json).HostRoles.actual_configs['kafka-broker'].default }}"
+
+- name: Ambari rest get kafka kafka-broker properties
+  uri:
+    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/configurations?type=kafka-broker&tag={{ kafka_broker_tag }}"
+    user: "{{ ambari_user }}"
+    password: "{{ ambari_password }}"
+    force_basic_auth: yes
+    return_content: yes
+  register: kafka_broker_properties_response
+
+- set_fact:
+    kafka_broker_port: "{{ (kafka_broker_properties_response.content | from_json)['items'][0].properties['listeners'] | replace('PLAINTEXT://localhost:', '')}}"
+
+- set_fact:
+    kafka_broker_url: "{% for host in kafka_broker_hosts %}{% if loop.index != 1 %},{% endif %}{{ host }}:{{ kafka_broker_port }}{% endfor %}"
+
+- name: Ambari rest get zookeeper hosts
+  uri:
+    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/services/ZOOKEEPER/components/ZOOKEEPER_SERVER"
+    user: "{{ ambari_user }}"
+    password: "{{ ambari_password }}"
+    force_basic_auth: yes
+    return_content: yes
+  register: zookeeper_hosts_response
+
+- set_fact:
+    zookeeper_hosts: "{{ (zookeeper_hosts_response.content | from_json).host_components | map(attribute='HostRoles.host_name') | list }}"
+
+- name: Ambari rest get zookeeper zoo.cfg tag
+  uri:
+    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/hosts/{{ zookeeper_hosts[0] }}/host_components/ZOOKEEPER_SERVER"
+    user: "{{ ambari_user }}"
+    password: "{{ ambari_password }}"
+    force_basic_auth: yes
+    return_content: yes
+  register: zookeeper_tag_response
+
+- set_fact:
+    zookeeper_tag: "{{ (zookeeper_tag_response.content | from_json).HostRoles.actual_configs['zoo.cfg'].default }}"
+
+- name: Ambari rest get kafka kafka-broker properties
+  uri:
+    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/configurations?type=zoo.cfg&tag={{ zookeeper_tag }}"
+    user: "{{ ambari_user }}"
+    password: "{{ ambari_password }}"
+    force_basic_auth: yes
+    return_content: yes
+  register: zookeeper_properties_response
+
+- set_fact:
+    zookeeper_port: "{{ (zookeeper_properties_response.content | from_json)['items'][0].properties['clientPort'] }}"
+
+- set_fact:
+    zookeeper_url: "{% for host in zookeeper_hosts %}{% if loop.index != 1 %},{% endif %}{{ host }}:{{ zookeeper_port }}{% endfor %}"
+
+- name: debug
+  debug:
+    msg: "zookeeper_port = {{ zookeeper_port }},
+          zookeeper_hosts = {{ zookeeper_hosts }},
+          zookeeper_url = {{ zookeeper_url }},
+          kafka_broker_port = {{ kafka_broker_port }},
+          kafka_broker_hosts = {{ kafka_broker_hosts }},
+          kafka_broker_url = {{ kafka_broker_url }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_master/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_master/defaults/main.yml b/metron-deployment/roles/ambari_master/defaults/main.yml
new file mode 100644
index 0000000..3b8cc73
--- /dev/null
+++ b/metron-deployment/roles/ambari_master/defaults/main.yml
@@ -0,0 +1,19 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+ambari_server_mem: 2048
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_master/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_master/tasks/main.yml b/metron-deployment/roles/ambari_master/tasks/main.yml
new file mode 100644
index 0000000..25c3784
--- /dev/null
+++ b/metron-deployment/roles/ambari_master/tasks/main.yml
@@ -0,0 +1,51 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+# tasks file for ambari_master
+- name: Install ambari server
+  yum:
+    name: ambari-server
+    state: present
+    update_cache: yes
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10
+
+- name: Set Ambari Server Max Memory
+  replace:
+    dest: /var/lib/ambari-server/ambari-env.sh
+    regexp:  "\ -Xmx2048m\ "
+    replace: " -Xmx{{ ambari_server_mem }}m "
+    backup: no
+
+- name: Setup ambari server
+  shell: ambari-server setup -s && touch /etc/ambari-server/configured creates=/etc/ambari-server/configured
+  register: ambari_server_setup
+  failed_when: ambari_server_setup.stderr
+
+- name: start ambari server
+  service:
+    name: ambari-server
+    state: restarted
+
+- name : check if ambari-server is up on {{ ambari_host }}:{{ambari_port}}
+  wait_for :
+    host: "{{ ambari_host }}"
+    port: "{{ ambari_port }}"
+    delay: 120
+    timeout: 300



[34/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/README.md
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/README.md b/metron-platform/metron-data-management/README.md
new file mode 100644
index 0000000..a123cc3
--- /dev/null
+++ b/metron-platform/metron-data-management/README.md
@@ -0,0 +1,252 @@
+# metron-data-management
+
+This project is a collection of classes to assist with loading of
+various enrichment and threat intelligence sources into Metron.
+
+## Simple HBase Enrichments/Threat Intelligence
+
+The vast majority of enrichments and threat intelligence processing tend
+toward the following pattern:
+* Take a field
+* Look up the field in a key/value store
+* If the key exists, then either it's a threat to be alerted or it should be enriched with the value associated with the key.
+
+As such, we have created this capability as a default threat intel and enrichment adapter.  The basic primitive for simple enrichments and threat intelligence sources
+is a complex key containing the following:
+* Type : The type of threat intel or enrichment (e.g. malicious_ip)
+* Indicator : The indicator in question
+* Value : The value to associate with the type, indicator pair.  This is a JSON map.
+
+At present, all of the dataloads utilities function by converting raw data
+sources to this primitive key (type, indicator) and value to be placed in HBase.
+
+In the case of threat intel, a hit on the threat intel table will result
+in:
+* The `is_alert` field being set to `true` in the index
+* A field named `threatintels.hbaseThreatIntel.$field.$threatintel_type` is set to `alert` 
+   * `$field` is the field in the original document that was a match (e.g. `src_ip_addr`) 
+   * `$threatintel_type` is the type of threat intel imported (defined in the Extractor configuration below).
+
+In the case of simple hbase enrichment, a hit on the enrichments table
+will result in the following new field for each key in the value:`enrichments.hbaseEnrichment.$field.$enrichment_type.$key` 
+* `$field` is the field in the original document that was a match (e.g.  `src_ip_addr`)
+* `$enrichment_type` is the type of enrichment imported (defined in the Extractor configuration below).
+* `$key` is a key in the JSON map associated with the row in HBase.
+
+For instance, in the situation where we had the following very silly key/value in
+HBase in the enrichment table:
+* indicator: `127.0.0.1`
+* type : `important_addresses`
+* value: `{ "name" : "localhost", "location" : "home" }`
+
+If we had a document whose `ip_src_addr` came through with a value of
+`127.0.0.1`, we would have the following fields added to the indexed
+document:
+* `enrichments.hbaseEnrichment.ip_src_addr.important_addresses.name` : `localhost`
+* `enrichments.hbaseEnrichment.ip_src_addr.important_addresses.location` : `home`
+
+## Extractor Framework
+
+For the purpose of ingesting data of a variety of formats, we have
+created an Extractor framework which allows for common data formats to
+be interpreted as enrichment or threat intelligence sources.  The
+formats supported at present are:
+* CSV (both threat intel and enrichment)
+* STIX (threat intel only)
+* Custom (pass your own class)
+
+All of the current utilities take a JSON file to configure how to
+interpret input data.  This JSON describes the type of data and the
+schema if necessary for the data if it is not fixed (as in STIX, e.g.).
+
+### CSV Extractor
+
+Consider the following example configuration file which
+describes how to process a CSV file.
+
+````
+{
+  "config" : {
+    "columns" : {
+         "ip" : 0
+        ,"source" : 2
+    }
+    ,"indicator_column" : "ip"
+    ,"type" : "malicious_ip"
+    ,"separator" : ","
+  }
+  ,"extractor" : "CSV"
+}
+````
+
+In this example, we have instructed the extractor of the schema (i.e. the columns field), 
+two columns at the first and third position.  We have indicated that the `ip` column is the indicator type
+and that the enrichment type is named `malicious_ip`.  We have also indicated that the extractor to use is the CSV Extractor.
+The other option is the STIX extractor or a fully qualified classname for your own extractor.
+
+The meta column values will show up in the value in HBase because it is called out as a non-indicator column.  The key
+for the value will be 'meta'.  For instance, given an input string of `123.45.123.12,something,the grapevine`, the following key, value
+would be extracted:
+* Indicator : `123.45.123.12`
+* Type : `malicious_ip`
+* Value : `{ "source" : "the grapevine" }`
+
+### STIX Extractor
+
+Consider the following config for importing STIX documents.  This is a threat intelligence interchange
+format, so it is particularly relevant and attractive data to import for our purposes.  Because STIX is
+a standard format, there is no need to specify the schema or how to interpret the documents.
+
+We support a subset of STIX messages for importation:
+
+| STIX Type | Specific Type | Enrichment Type Name |
+|-----------|---------------|----------------------|
+| Address   | IPV_4_ADDR    | address:IPV_4_ADDR   |
+| Address   | IPV_6_ADDR    | address:IPV_6_ADDR   |
+| Address   | E_MAIL        | address:E_MAIL       |
+| Address   | MAC           | address:MAC          |
+| Domain    | FQDN          | domain:FQDN          |
+| Hostname  |               | hostname             |
+
+
+NOTE: The enrichment type will be used as the type above.
+
+Consider the following configuration for an Extractor
+
+````
+{
+  "config" : {
+    "stix_address_categories" : "IPV_4_ADDR"
+  }
+  ,"extractor" : "STIX"
+}
+````
+
+In here, we're configuring the STIX extractor to load from a series of STIX files, however we only want to bring in IPv4
+addresses from the set of all possible addresses.  Note that if no categories are specified for import, all are assumed.
+Also, only address and domain types allow filtering via `stix_address_categories` and `stix_domain_categories` config
+parameters.
+
+## Enrichment Config
+
+In order to automatically add new enrichment and threat intel types to existing, running enrichment topologies, you will
+need to add new fields and new types to the zookeeper configuration.  A convenience parameter has been made to assist in this
+when doing an import.  Namely, you can specify the enrichment configs and how they associate with the fields of the 
+documents flowing through the enrichment topology.
+
+Consider the following Enrichment Configuration JSON.  This one is for a threat intelligence type:
+
+````
+{
+  "zkQuorum" : "localhost:2181"
+ ,"sensorToFieldList" : {
+    "bro" : {
+           "type" : "THREAT_INTEL"
+          ,"fieldToEnrichmentTypes" : {
+             "ip_src_addr" : [ "malicious_ip" ]
+            ,"ip_dst_addr" : [ "malicious_ip" ]
+                                      }
+           }
+                        }
+}
+````
+
+We have to specify the following:
+* The zookeeper quorum which holds the cluster configuration
+* The mapping between the fields in the enriched documents and the enrichment types.
+
+This configuration allows the ingestion tools to update zookeeper post-ingestion so that the enrichment topology can take advantage
+immediately of the new type.
+
+
+## Loading Utilities
+
+The two configurations above are used in the three separate ingestion tools:
+* Taxii Loader
+* Bulk load from HDFS via MapReduce
+* Flat File ingestion
+
+### Taxii Loader
+
+The shell script `$METRON_HOME/bin/threatintel_taxii_load.sh` can be used to poll a Taxii server for STIX documents and ingest them into HBase.  
+It is quite common for this Taxii server to be an aggregation server such as Soltra Edge.
+
+In addition to the Enrichment and Extractor configs described above, this loader requires a configuration file describing the connection information
+to the Taxii server.  An illustrative example of such a configuration file is:
+
+````
+{
+   "endpoint" : "http://localhost:8282/taxii-discovery-service"
+  ,"type" : "DISCOVER"
+  ,"collection" : "guest.Abuse_ch"
+  ,"table" : "threat_intel"
+  ,"columnFamily" : "cf"
+  ,"allowedIndicatorTypes" : [ "domainname:FQDN", "address:IPV_4_ADDR" ]
+}
+````
+
+As you can see, we are specifying the following information:
+* endpoint : The URL of the endpoint
+* type : `POLL` or `DISCOVER` depending on the endpoint.
+* collection : The Taxii collection to ingest
+* table : The HBase table to import into
+* columnFamily : The column family to import into
+* allowedIndicatorTypes : an array of acceptable threat intel types (see the "Enrichment Type Name" column of the Stix table above for the possibilities).
+
+The parameters for the utility are as follows:
+
+| Short Code | Long Code                 | Is Required? | Description                                                                                                                                        |
+|------------|---------------------------|--------------|----------------------------------------------------------------------------------------------------------------------------------------------------|
+| -h         |                           | No           | Generate the help screen/set of options                                                                                                            |
+| -e         | --extractor_config        | Yes          | JSON Document describing the extractor for this input data source                                                                                  |
+| -c         | --taxii_connection_config | Yes          | The JSON config file to configure the connection                                                                                                   |
+| -p         | --time_between_polls      | No           | The time between polling the Taxii server in milliseconds. (default: 1 hour)                                                                       |
+| -b         | --begin_time              | No           | Start time to poll the Taxii server (all data from that point will be gathered in the first pull).  The format for the date is yyyy-MM-dd HH:mm:ss |
+| -l         | --log4j                   | No           | The Log4j Properties to load                                                                                                                       |
+| -n         | --enrichment_config       | No           | The JSON document describing the enrichments to configure.  Unlike other loaders, this is run first if specified.                                  |
+
+
+### Bulk Load from HDFS
+
+The shell script `$METRON_HOME/bin/threatintel_bulk_load.sh` will kick off a MR job to load data staged in HDFS into an HBase table.  Note: despite what
+the naming may suggest, this utility works for enrichment as well as threat intel due to the underlying infrastructure being the same.
+
+The parameters for the utility are as follows:
+
+| Short Code | Long Code           | Is Required? | Description                                                                                                       |
+|------------|---------------------|--------------|-------------------------------------------------------------------------------------------------------------------|
+| -h         |                     | No           | Generate the help screen/set of options                                                                           |
+| -e         | --extractor_config  | Yes          | JSON Document describing the extractor for this input data source                                                 |
+| -t         | --table             | Yes          | The HBase table to import into                                                                                    |
+| -f         | --column_family     | Yes          | The HBase table column family to import into                                                                      |
+| -i         | --input             | Yes          | The input data location on HDFS                                                                                   |
+| -n         | --enrichment_config | No           | The JSON document describing the enrichments to configure.  Unlike other loaders, this is run first if specified. |
+or threat intel.
+
+### Flatfile Loader
+
+The shell script `$METRON_HOME/bin/flatfile_loader.sh` will read data from local disk and load the enrichment or threat intel data into an HBase table.  
+Note: This utility works for enrichment as well as threat intel due to the underlying infrastructure being the same.
+
+One special thing to note here is that there is a special configuration
+parameter to the Extractor config that is only considered during this
+loader:
+* inputFormatHandler : This specifies how to consider the data.  The two implementations are `BY_LINE` and `org.apache.metron.dataloads.extractor.inputformat.WholeFileFormat`.
+
+The default is `BY_LINE`, which makes sense for a list of CSVs where
+each line indicates a unit of information which can be imported.
+However, if you are importing a set of STIX documents, then you want
+each document to be considered as input to the Extractor.
+
+The parameters for the utility are as follows:
+
+| Short Code | Long Code           | Is Required? | Description                                                                                                                                                                          |
+|------------|---------------------|--------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| -h         |                     | No           | Generate the help screen/set of options                                                                                                                                              |
+| -e         | --extractor_config  | Yes          | JSON Document describing the extractor for this input data source                                                                                                                    |
+| -t         | --hbase_table       | Yes          | The HBase table to import into                                                                                                                                                       |
+| -c         | --hbase_cf          | Yes          | The HBase table column family to import into                                                                                                                                         |
+| -i         | --input             | Yes          | The input data location on local disk.  If this is a file, then that file will be loaded.  If this is a directory, then the files will be loaded recursively under that directory. |
+| -l         | --log4j             | No           | The log4j properties file to load                                                                                                                                                    |
+| -n         | --enrichment_config | No           | The JSON document describing the enrichments to configure.  Unlike other loaders, this is run first if specified.                                                                    |
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/pom.xml b/metron-platform/metron-data-management/pom.xml
new file mode 100644
index 0000000..6c3f866
--- /dev/null
+++ b/metron-platform/metron-data-management/pom.xml
@@ -0,0 +1,327 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 
+  Licensed to the Apache Software 
+  Foundation (ASF) under one or more contributor license agreements. See the 
+  NOTICE file distributed with this work for additional information regarding 
+  copyright ownership. The ASF licenses this file to You under the Apache License, 
+  Version 2.0 (the "License"); you may not use this file except in compliance 
+  with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 
+  Unless required by applicable law or agreed to in writing, software distributed 
+  under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES 
+  OR CONDITIONS OF ANY KIND, either express or implied. See the License for 
+  the specific language governing permissions and limitations under the License. 
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.metron</groupId>
+        <artifactId>metron-platform</artifactId>
+        <version>0.1BETA</version>
+    </parent>
+    <artifactId>metron-data-management</artifactId>
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+        <httpcore.version>4.3.2</httpcore.version>
+    </properties>
+    <dependencies>
+        <dependency>
+            <groupId>com.google.guava</groupId>
+            <artifactId>guava</artifactId>
+            <version>${global_hbase_guava_version}</version>
+        </dependency>
+        <dependency>
+            <groupId>javax.xml.bind</groupId>
+            <artifactId>jaxb-api</artifactId>
+            <version>2.2.11</version>
+        </dependency>
+        <dependency>
+            <groupId>net.sf.saxon</groupId>
+            <artifactId>Saxon-HE</artifactId>
+            <version>9.5.1-5</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>javax.xml.bind</groupId>
+                    <artifactId>jaxb-api</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>com.sun.xml.bind</groupId>
+            <artifactId>jaxb-impl</artifactId>
+            <version>2.2.5-2</version>
+        </dependency>
+        <dependency>
+            <groupId>org.mitre</groupId>
+            <artifactId>stix</artifactId>
+            <version>1.2.0.2</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>javax.xml.bind</groupId>
+                    <artifactId>jaxb-api</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-common</artifactId>
+            <version>${project.parent.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>com.googlecode.disruptor</groupId>
+                    <artifactId>disruptor</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.apache.hadoop</groupId>
+                    <artifactId>hadoop-common</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.apache.hadoop</groupId>
+                    <artifactId>hadoop-hdfs</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-enrichment</artifactId>
+            <version>0.1BETA</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.apache.hadoop</groupId>
+                    <artifactId>hadoop-hdfs</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.apache.hadoop</groupId>
+                    <artifactId>hadoop-common</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>junit</groupId>
+                    <artifactId>junit</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-hbase</artifactId>
+            <version>${project.parent.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.mitre.taxii</groupId>
+            <artifactId>taxii</artifactId>
+            <version>1.1.0.1</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-common</artifactId>
+            <version>${global_hbase_version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-server</artifactId>
+            <version>${global_hbase_version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.opencsv</groupId>
+            <artifactId>opencsv</artifactId>
+            <version>${global_opencsv_version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.httpcomponents</groupId>
+            <artifactId>httpcore</artifactId>
+            <version>${httpcore.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.httpcomponents</groupId>
+            <artifactId>httpclient</artifactId>
+            <version>${httpcore.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.elasticsearch</groupId>
+            <artifactId>elasticsearch</artifactId>
+            <version>${global_elasticsearch_version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-all</artifactId>
+            <version>1.3</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.carrotsearch.randomizedtesting</groupId>
+            <artifactId>randomizedtesting-runner</artifactId>
+            <version>2.1.14</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.elasticsearch</groupId>
+            <artifactId>elasticsearch</artifactId>
+            <version>${global_elasticsearch_version}</version>
+            <type>test-jar</type>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.lucene</groupId>
+            <artifactId>lucene-test-framework</artifactId>
+            <version>4.10.4</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.lucene</groupId>
+            <artifactId>lucene-core</artifactId>
+            <version>4.10.4</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-testing-util</artifactId>
+            <version>${global_hbase_version}</version>
+            <scope>test</scope>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>com.sun.jersey</groupId>
+            <artifactId>jersey-client</artifactId>
+            <version>1.19</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-integration-test</artifactId>
+            <version>${project.parent.version}</version>
+            <scope>test</scope>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.apache.logging.log4j</groupId>
+                    <artifactId>log4j-slf4j-impl</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.mockito</groupId>
+            <artifactId>mockito-core</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.powermock</groupId>
+            <artifactId>powermock-module-junit4</artifactId>
+            <version>1.6.2</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.powermock</groupId>
+            <artifactId>powermock-api-mockito</artifactId>
+            <version>1.6.2</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.powermock</groupId>
+            <artifactId>powermock-api-easymock</artifactId>
+            <version>1.6.2</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.easymock</groupId>
+            <artifactId>easymock</artifactId>
+            <version>3.4</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <version>4.12</version>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+    <build>
+        <resources>
+            <resource>
+                <directory>src</directory>
+                <excludes>
+                    <exclude>**/*.java</exclude>
+                </excludes>
+            </resource>
+        </resources>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-surefire-plugin</artifactId>
+                <configuration>
+                    <argLine>-Xmx2048m -XX:MaxPermSize=256m -XX:-UseSplitVerifier</argLine>
+                    <skip>true</skip>
+                    <trimStackTrace>false</trimStackTrace>
+                </configuration>
+            </plugin>
+            <plugin>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <version>3.1</version>
+                <configuration>
+                    <source>1.7</source>
+                    <target>1.7</target>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <version>2.3</version>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <relocations>
+                                <relocation>
+                                    <pattern>com.google.common</pattern>
+                                    <shadedPattern>org.apache.metron.guava.dataload</shadedPattern>
+                                </relocation>
+                                <relocation>
+                                    <pattern>org.apache.http</pattern>
+                                    <shadedPattern>org.apache.metron.httpcore.dataload</shadedPattern>
+                                </relocation>
+                            </relocations>
+                            <artifactSet>
+                                <excludes>
+                                    <exclude>classworlds:classworlds</exclude>
+                                    <exclude>junit:junit</exclude>
+                                    <exclude>jmock:*</exclude>
+                                    <exclude>*:xml-apis</exclude>
+                                    <exclude>*slf4j*</exclude>
+                                    <exclude>org.apache.maven:lib:tests</exclude>
+                                    <exclude>log4j:log4j:jar:</exclude>
+                                    <exclude>*:hbase:*</exclude>
+                                    <exclude>org.apache.hadoop.yarn.util.package-info*</exclude>
+                                </excludes>
+                            </artifactSet>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <configuration>
+                    <descriptor>src/main/assembly/assembly.xml</descriptor>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>make-assembly</id> <!-- this is used for inheritance merges -->
+                        <phase>package</phase> <!-- bind to the packaging phase -->
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+
+        </plugins>
+    </build>
+
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/assembly/assembly.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/assembly/assembly.xml b/metron-platform/metron-data-management/src/main/assembly/assembly.xml
new file mode 100644
index 0000000..c2c384b
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/assembly/assembly.xml
@@ -0,0 +1,42 @@
+<!--
+  Licensed to the Apache Software
+	Foundation (ASF) under one or more contributor license agreements. See the
+	NOTICE file distributed with this work for additional information regarding
+	copyright ownership. The ASF licenses this file to You under the Apache License,
+	Version 2.0 (the "License"); you may not use this file except in compliance
+	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+	Unless required by applicable law or agreed to in writing, software distributed
+	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
+	OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+  the specific language governing permissions and limitations under the License.
+  -->
+
+<assembly>
+  <id>archive</id>
+  <formats>
+    <format>tar.gz</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>${project.basedir}/src/main/bash</directory>
+      <outputDirectory>/bin</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+      <excludes>
+        <exclude>**/*.formatted</exclude>
+        <exclude>**/*.filtered</exclude>
+      </excludes>
+      <fileMode>0755</fileMode>
+      <lineEnding>unix</lineEnding>
+      <filtered>true</filtered>
+    </fileSet>
+    <fileSet>
+      <directory>${project.basedir}/target</directory>
+      <includes>
+        <include>${project.artifactId}-${project.version}.jar</include>
+      </includes>
+      <outputDirectory>/lib</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+    </fileSet>
+  </fileSets>
+</assembly>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/bash/Whois_CSV_to_JSON.py
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/bash/Whois_CSV_to_JSON.py b/metron-platform/metron-data-management/src/main/bash/Whois_CSV_to_JSON.py
new file mode 100755
index 0000000..2091418
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/bash/Whois_CSV_to_JSON.py
@@ -0,0 +1,208 @@
+#!/usr/bin/python
+
+"""
+Copyright 2014 Cisco Systems, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import sys
+import os
+import csv
+import json
+import multiprocessing
+import logging
+logging.basicConfig(level=logging.DEBUG)
+
+
+def is_field_excluded(fieldname=None):
+    """
+    Checks to see if a field name is a member of a list of names to exclude. Modify to suit your own list.
+
+    :param fieldname: A string representing a field name
+    :return: True or False
+    """
+    import re
+
+    # List of fields names to exclude
+    excluded_fields = [
+        'Audit_auditUpdatedDate',
+        #'domainName'
+    ]
+
+    if fieldname in excluded_fields:
+        return True
+
+    # Regexes to match for exclusion
+    excluded_regexes = [
+        ['_rawText$', re.IGNORECASE],
+    ]
+
+    for regex in excluded_regexes:
+        if re.search(regex[0], fieldname, regex[1]):
+            return True
+
+    return False
+
+
+def process_csv(in_filename, out_filename):
+    """
+    Processes a CSV file of WHOIS data and converts each line to a JSON element, skipping specific fields that
+    are not deemed necessary (domainName, *_rawText, Audit_auditUpdatedDate)
+
+    :param in_filename: Input CSV filename with full path
+    :param out_filename: Output JSON filename with full path
+    :return: None
+    """
+    if out_filename:
+        out_fh = open(out_filename, 'wb')
+        logging.debug('%s: Converting %s to %s' % (multiprocessing.current_process().name, in_filename, out_filename))
+    else:
+        logging.debug('%s: Analyzing %s' % (multiprocessing.current_process().name, in_filename))
+
+    with open(in_filename, 'rb') as f:
+        reader = csv.DictReader(f, delimiter=',', quotechar='"')
+        line_num = 0
+        try:
+            for row in reader:
+                line_num += 1
+                try:
+                    if out_filename:
+                        # json conversion and output
+                        new_row = {}
+                        for field in reader.fieldnames:
+                            # fields we don't want include these + anything with rawText
+                            #if field not in ['Audit_auditUpdatedDate', 'domainName'] and not field.endswith('_rawText'):
+                            if not is_field_excluded(field):
+                                new_row[field] = row.get(field)
+                        json.dump(new_row, out_fh)
+                        out_fh.write('\n')
+                    else:
+                        # analysis .. check to be sure fileheader and csv row counts match
+                        if len(row) != len(reader.fieldnames):
+                            raise Exception('Field count mismatch: row: %s / fields: %s' % (len(row), len(reader.fieldnames)))
+                except Exception, e:
+                    logging.warn("Error with file %s, line %s: %s" % (in_filename, line_num, e))
+
+            if not out_filename:
+                logging.info('Analyzed %s: OK' % in_filename)
+        except Exception, e:
+            logging.warn(e)
+
+        out_fh.close()
+
+
+##-------------------------------------------------------------------------
+
+def process_files(source_dir, output_dir, max_processes=10, overwrite=False):
+    """
+    Generates a multiprocessing.Pool() queue with a list of input and output files to be processed with processCSV.
+    Files are added by walking the source_dir and adding any file with a CSV extension. Output is placed into a single
+    directory for processing. Output filenames are generated using the first part of the directory name so a file
+    named source_dir/com/1.csv would become outputDir/com_1.json
+
+    :param source_dir: Source directory of CSV files
+    :param output_dir: Output directory for resultant JSON files
+    :param max_processes: Maximum number of processes run
+    :return:
+    """
+    logging.info("Processing Whois files from %s" % source_dir)
+
+    if output_dir and not os.path.exists(output_dir):
+        logging.debug("Creating output directory %s" % output_dir)
+        os.makedirs(output_dir)
+
+    logging.info("Starting %s pool workers" % max_processes)
+
+    if sys.version.startswith('2.6'):
+        # no maxtaskperchild in 2.6
+        pool = multiprocessing.Pool(processes=max_processes)
+    else:
+        pool = multiprocessing.Pool(processes=max_processes, maxtasksperchild=4)
+
+    filecount = 0
+    for dirname, dirnames, filenames in os.walk(source_dir):
+        for filename in filenames:
+            if filename.endswith('.csv'):
+                # output files go to outputDir and are named using the last subdirectory from the dirname
+                if output_dir:
+                    out_filename = filename.replace('csv', 'json')
+                    out_filename = os.path.join(output_dir, '%s_%s' % (os.path.split(dirname)[-1], out_filename))
+
+                    # if file does not exist or if overwrite is true, add file process to the pool
+                    if not os.path.isfile(out_filename) or overwrite:
+                        pool.apply_async(process_csv, args=(os.path.join(dirname, filename), out_filename))
+                        filecount += 1
+                    else:
+                        logging.info("Skipping %s, %s exists and overwrite is false" % (filename, out_filename))
+                else:
+                    # no outputdir so we just analyze the files
+                    pool.apply_async(process_csv, args=(os.path.join(dirname, filename), None))
+                    filecount += 1
+
+    try:
+        pool.close()
+        logging.info("Starting activities on %s CSV files" % filecount)
+        pool.join()
+    except KeyboardInterrupt:
+        logging.info("Aborting")
+        pool.terminate()
+
+    logging.info("Completed")
+
+
+##-------------------------------------------------------------------------
+
+if __name__ == "__main__":
+
+    max_cpu = multiprocessing.cpu_count()
+
+    from optparse import OptionParser
+    parser = OptionParser()
+    parser.add_option('-s', '--source', dest='source_dir', action='store',
+                      help='Source directory to walk for CSV files')
+    parser.add_option('-o', '--output', dest='out_dir', action='store',
+                      help='Output directory for JSON files')
+    parser.add_option('-O', '--overwrite', dest='overwrite', action='store_true',
+                      help='Overwrite existing files in output directory')
+    parser.add_option('-p', '--processes', dest='max_processes', action='store', default=max_cpu, type='int',
+                      help='Max number of processes to spawn')
+    parser.add_option('-a', '--analyze', dest='analyze', action='store_true',
+                      help='Analyze CSV files for validity, no file output')
+    parser.add_option('-d', '--debug', dest='debug', action='store_true',
+                      help='Enable debug messages')
+
+    (options, args) = parser.parse_args()
+
+    if not options.source_dir:
+        logging.error("Source directory required")
+        sys.exit(-1)
+
+    if not options.out_dir or options.analyze:
+        out_dir = None
+    elif not options.out_dir:
+        logging.error("Ouput directory or analysis option required")
+        sys.exit(-1)
+    else:
+        out_dir = options.out_dir
+
+    if options.max_processes > max_cpu:
+        logging.warn('Max Processes (%s) is greater than available Processors (%s)' % (options.max_processes, max_cpu))
+
+    if options.debug:
+        # enable debug level and multiprocessing debugging
+        logging.basicConfig(level=logging.DEBUG)
+        multiprocessing.log_to_stderr(logging.DEBUG)
+
+    process_files(options.source_dir, options.out_dir, options.max_processes, options.overwrite)
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/bash/flatfile_loader.sh
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/bash/flatfile_loader.sh b/metron-platform/metron-data-management/src/main/bash/flatfile_loader.sh
new file mode 100755
index 0000000..e464984
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/bash/flatfile_loader.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+# 
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#     http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# 
+
+BIGTOP_DEFAULTS_DIR=${BIGTOP_DEFAULTS_DIR-/etc/default}
+[ -n "${BIGTOP_DEFAULTS_DIR}" -a -r ${BIGTOP_DEFAULTS_DIR}/hbase ] && . ${BIGTOP_DEFAULTS_DIR}/hbase
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+  . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+  . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
+
+export HBASE_HOME=${HBASE_HOME:-/usr/hdp/current/hbase-client}
+CP=/usr/metron/0.1BETA/lib/metron-data-management-0.1BETA.jar:/usr/metron/0.1BETA/lib/taxii-1.1.0.1.jar:`${HBASE_HOME}/bin/hbase classpath`
+HADOOP_CLASSPATH=$(echo $CP )
+for jar in $(echo $HADOOP_CLASSPATH | sed 's/:/ /g');do
+  if [ -f $jar ];then
+    LIBJARS="$jar,$LIBJARS"
+  fi
+done
+export HADOOP_CLASSPATH
+hadoop jar /usr/metron/0.1BETA/lib/metron-data-management-0.1BETA.jar org.apache.metron.dataloads.nonbulk.flatfile.SimpleEnrichmentFlatFileLoader "$@"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/bash/prune_elasticsearch_indices.sh
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/bash/prune_elasticsearch_indices.sh b/metron-platform/metron-data-management/src/main/bash/prune_elasticsearch_indices.sh
new file mode 100644
index 0000000..aed6782
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/bash/prune_elasticsearch_indices.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+yarn jar /usr/metron/${project.version}/lib/metron-data-management-${project.version}.jar org.apache.metron.dataloads.bulk.ElasticsearchDataPrunerRunner "$@"
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/bash/prune_hdfs_files.sh
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/bash/prune_hdfs_files.sh b/metron-platform/metron-data-management/src/main/bash/prune_hdfs_files.sh
new file mode 100644
index 0000000..b37e022
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/bash/prune_hdfs_files.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+yarn jar /usr/metron/${project.version}/lib/metron-data-management-${project.version}.jar org.apache.metron.dataloads.bulk.HDFSDataPruner "$@"
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/bash/threatintel_bulk_load.sh
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/bash/threatintel_bulk_load.sh b/metron-platform/metron-data-management/src/main/bash/threatintel_bulk_load.sh
new file mode 100755
index 0000000..2df4ee3
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/bash/threatintel_bulk_load.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+# 
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#     http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# 
+
+BIGTOP_DEFAULTS_DIR=${BIGTOP_DEFAULTS_DIR-/etc/default}
+[ -n "${BIGTOP_DEFAULTS_DIR}" -a -r ${BIGTOP_DEFAULTS_DIR}/hbase ] && . ${BIGTOP_DEFAULTS_DIR}/hbase
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+  . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+  . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
+
+export HBASE_HOME=${HBASE_HOME:-/usr/hdp/current/hbase-client}
+HADOOP_CLASSPATH=${HBASE_HOME}/lib/hbase-server.jar:`${HBASE_HOME}/bin/hbase classpath`
+for jar in $(echo $HADOOP_CLASSPATH | sed 's/:/ /g');do
+  if [ -f $jar ];then
+    LIBJARS="$jar,$LIBJARS"
+  fi
+done
+export HADOOP_CLASSPATH
+hadoop jar /usr/metron/0.1BETA/lib/metron-data-management-0.1BETA.jar org.apache.metron.dataloads.bulk.ThreatIntelBulkLoader -libjars ${LIBJARS} "$@"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/bash/threatintel_bulk_prune.sh
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/bash/threatintel_bulk_prune.sh b/metron-platform/metron-data-management/src/main/bash/threatintel_bulk_prune.sh
new file mode 100755
index 0000000..c3ec233
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/bash/threatintel_bulk_prune.sh
@@ -0,0 +1,37 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#     http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# 
+BIGTOP_DEFAULTS_DIR=${BIGTOP_DEFAULTS_DIR-/etc/default}
+[ -n "${BIGTOP_DEFAULTS_DIR}" -a -r ${BIGTOP_DEFAULTS_DIR}/hbase ] && . ${BIGTOP_DEFAULTS_DIR}/hbase
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+  . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+  . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
+
+export HBASE_HOME=${HBASE_HOME:-/usr/hdp/current/hbase-client}
+HADOOP_CLASSPATH=${HBASE_HOME}/lib/hbase-server.jar:`${HBASE_HOME}/bin/hbase classpath`
+for jar in $(echo $HADOOP_CLASSPATH | sed 's/:/ /g');do
+  if [ -f $jar ];then
+    LIBJARS="$jar,$LIBJARS"
+  fi
+done
+export HADOOP_CLASSPATH
+hadoop jar /usr/metron/0.1BETA/lib/metron-data-management-0.1BETA.jar org.apache.metron.dataloads.bulk.LeastRecentlyUsedPruner -libjars ${LIBJARS} "$@"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/bash/threatintel_taxii_load.sh
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/bash/threatintel_taxii_load.sh b/metron-platform/metron-data-management/src/main/bash/threatintel_taxii_load.sh
new file mode 100755
index 0000000..321041a
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/bash/threatintel_taxii_load.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+# 
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#     http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# 
+
+BIGTOP_DEFAULTS_DIR=${BIGTOP_DEFAULTS_DIR-/etc/default}
+[ -n "${BIGTOP_DEFAULTS_DIR}" -a -r ${BIGTOP_DEFAULTS_DIR}/hbase ] && . ${BIGTOP_DEFAULTS_DIR}/hbase
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+  . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+  . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
+
+export HBASE_HOME=${HBASE_HOME:-/usr/hdp/current/hbase-client}
+CP=/usr/metron/0.1BETA/lib/metron-data-management-0.1BETA.jar:/usr/metron/0.1BETA/lib/taxii-1.1.0.1.jar:`${HBASE_HOME}/bin/hbase classpath`
+HADOOP_CLASSPATH=$(echo $CP )
+for jar in $(echo $HADOOP_CLASSPATH | sed 's/:/ /g');do
+  if [ -f $jar ];then
+    LIBJARS="$jar,$LIBJARS"
+  fi
+done
+export HADOOP_CLASSPATH
+hadoop jar /usr/metron/0.1BETA/lib/metron-data-management-0.1BETA.jar org.apache.metron.dataloads.nonbulk.taxii.TaxiiLoader "$@"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/DataPruner.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/DataPruner.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/DataPruner.java
new file mode 100644
index 0000000..98e3b52
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/DataPruner.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.bulk;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.concurrent.TimeUnit;
+
+public abstract class DataPruner {
+
+    protected static final Logger LOG = LoggerFactory.getLogger(DataPruner.class);
+    protected long firstTimeMillis;
+    protected long lastTimeMillis;
+    protected String wildCard;
+
+    public DataPruner(Date startDate, Integer numDays, String wildCard) throws StartDateException {
+
+        Date startAtMidnight = dateAtMidnight(startDate);
+        this.lastTimeMillis = startDate.getTime();
+        this.firstTimeMillis = lastTimeMillis - TimeUnit.DAYS.toMillis(numDays);
+        this.wildCard = wildCard;
+
+        Date today = dateAtMidnight(new Date());
+
+        if (!today.after(startAtMidnight)) {
+            throw new StartDateException("Prune Start Date must be prior to today");
+        }
+    }
+
+    protected Date dateAtMidnight(Date date) {
+
+        Calendar calendar = Calendar.getInstance();
+
+        calendar.setTime(date);
+        calendar.set(Calendar.HOUR_OF_DAY, 0);
+        calendar.set(Calendar.MINUTE, 0);
+        calendar.set(Calendar.SECOND, 0);
+        calendar.set(Calendar.MILLISECOND, 0);
+        return calendar.getTime();
+
+    }
+
+
+    public abstract Long prune() throws IOException;
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPruner.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPruner.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPruner.java
new file mode 100644
index 0000000..ddbb61b
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPruner.java
@@ -0,0 +1,135 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.bulk;
+
+import com.google.common.base.Predicate;
+import com.google.common.collect.Iterables;
+import org.apache.commons.collections.IteratorUtils;
+import org.apache.metron.common.configuration.Configuration;
+import org.elasticsearch.client.AdminClient;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.common.collect.ImmutableOpenMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.Iterator;
+
+public class ElasticsearchDataPruner extends DataPruner {
+
+    private String indexPattern;
+    private SimpleDateFormat dateFormat;
+    protected Client indexClient = null;
+    protected Configuration configuration;
+
+    private static final Logger LOG = LoggerFactory.getLogger(ElasticsearchDataPruner.class);
+    private static final String defaultDateFormat = "yyyy.MM.dd.HH";
+
+
+
+    private Predicate<String> filterWithRegex = new Predicate<String>() {
+
+        @Override
+        public boolean apply(String str) {
+
+            try {
+                String dateString = str.substring(indexPattern.length());
+                Date indexCreateDate = dateFormat.parse(dateString);
+                long indexCreatedDate = indexCreateDate.getTime();
+                if (indexCreatedDate >= firstTimeMillis && indexCreatedDate < lastTimeMillis) {
+                    return true;
+                }
+            } catch (ParseException e) {
+                LOG.error("Unable to parse date from + " + str.substring(indexPattern.length()), e);
+            }
+
+            return false;
+        }
+
+    };
+
+    public ElasticsearchDataPruner(Date startDate, Integer numDays,Configuration configuration, Client indexClient, String indexPattern) throws Exception {
+
+        super(startDate, numDays, indexPattern);
+
+        this.indexPattern = indexPattern;
+        this.dateFormat = new SimpleDateFormat(defaultDateFormat);
+        this.configuration = configuration;
+        this.indexClient = indexClient;
+
+
+    }
+
+    @Override
+    public Long prune() throws IOException {
+
+        try {
+
+            configuration.update();
+
+        }
+        catch(Exception e) {
+
+            LOG.error("Unable to update configs",e);
+
+        }
+
+        String dateString = configuration.getGlobalConfig().get("es.date.format").toString();
+
+        if( null != dateString ){
+            dateFormat = new SimpleDateFormat(dateString);
+        }
+
+        ImmutableOpenMap<String, IndexMetaData> allIndices = indexClient.admin().cluster().prepareState().get().getState().getMetaData().getIndices();
+        Iterable indicesForDeletion = getFilteredIndices(allIndices);
+        Object[] indexArray = IteratorUtils.toArray(indicesForDeletion.iterator());
+
+        if(indexArray.length > 0) {
+            String[] indexStringArray = new String[indexArray.length];
+            System.arraycopy(indexArray, 0, indexStringArray, 0, indexArray.length);
+            deleteIndex(indexClient.admin(), indexStringArray);
+        }
+
+        return new Long(indexArray.length);
+
+    }
+
+    public Boolean deleteIndex(AdminClient adminClient, String... index) {
+
+        boolean isAcknowledged = adminClient.indices().delete(adminClient.indices().prepareDelete(index).request()).actionGet().isAcknowledged();
+        return new Boolean(isAcknowledged);
+
+    }
+
+    protected Iterable<String> getFilteredIndices(ImmutableOpenMap<String, IndexMetaData> indices) {
+
+        String[] returnedIndices = new String[indices.size()];
+        Iterator it = indices.keysIt();
+        System.arraycopy(IteratorUtils.toArray(it), 0, returnedIndices, 0, returnedIndices.length);
+        Iterable<String> matches = Iterables.filter(Arrays.asList(returnedIndices), filterWithRegex);
+
+        return matches;
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerRunner.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerRunner.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerRunner.java
new file mode 100644
index 0000000..f0a4d3b
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerRunner.java
@@ -0,0 +1,190 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.bulk;
+
+import org.apache.commons.cli.*;
+import org.apache.curator.RetryPolicy;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.metron.common.configuration.Configuration;
+import org.elasticsearch.client.transport.TransportClient;
+import org.elasticsearch.common.settings.ImmutableSettings;
+import org.elasticsearch.common.transport.InetSocketTransportAddress;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.nio.file.Paths;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Map;
+
+public class ElasticsearchDataPrunerRunner {
+
+    private static final Logger LOG = LoggerFactory.getLogger(ElasticsearchDataPruner.class);
+
+    public static void main(String... argv) throws IOException, java.text.ParseException, ClassNotFoundException, InterruptedException {
+
+        /**
+         * Example
+         * start=$(date -d '30 days ago' +%m/%d/%Y)
+         * yarn jar Metron-DataLoads-{VERSION}.jar org.apache.metron.dataloads.bulk.ElasticsearchDataPrunerRunner -i host1:9300 -p '/bro_index_' -s $(date -d '30 days ago' +%m/%d/%Y) -n 1;
+         * echo ${start}
+         **/
+
+        Options options = buildOptions();
+        Options help = new Options();
+        TransportClient client = null;
+
+        Option o = new Option("h", "help", false, "This screen");
+        o.setRequired(false);
+        help.addOption(o);
+
+
+
+        try {
+
+            CommandLine cmd = checkOptions(help,options, argv);
+
+            String start = cmd.getOptionValue("s");
+            Date startDate = new SimpleDateFormat("MM/dd/yyyy").parse(start);
+
+            Integer numDays = Integer.parseInt(cmd.getOptionValue("n"));
+            String indexPrefix = cmd.getOptionValue("p");
+
+            if(LOG.isDebugEnabled()) {
+                LOG.debug("Running prune with args: " + startDate + " " + numDays);
+            }
+
+            Configuration configuration = null;
+
+            if( cmd.hasOption("z")){
+
+                RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
+                CuratorFramework framework = CuratorFrameworkFactory.newClient(cmd.getOptionValue("z"),retryPolicy);
+                framework.start();
+                configuration = new Configuration(framework);
+
+            } else if ( cmd.hasOption("c") ){
+
+                String resourceFile = cmd.getOptionValue("c");
+                configuration = new Configuration(Paths.get(resourceFile));
+
+            }
+
+            configuration.update();
+
+            Map<String, Object> globalConfiguration = configuration.getGlobalConfig();
+            ImmutableSettings.Builder builder = ImmutableSettings.settingsBuilder();
+            builder.put("cluster.name", globalConfiguration.get("es.clustername"));
+            builder.put("curatorFramework.transport.ping_timeout","500s");
+            client = new TransportClient(builder.build())
+                    .addTransportAddress(new InetSocketTransportAddress(globalConfiguration.get("es.ip").toString(), Integer.parseInt(globalConfiguration.get("es.port").toString())));
+
+            DataPruner pruner = new ElasticsearchDataPruner(startDate, numDays, configuration, client, indexPrefix);
+
+            LOG.info("Pruned " + pruner.prune() + " indices from " +  globalConfiguration.get("es.ip") + ":" + globalConfiguration.get("es.port") + "/" + indexPrefix);
+
+
+        } catch (Exception e) {
+
+            e.printStackTrace();
+            System.exit(-1);
+
+        } finally {
+
+            if( null != client) {
+                client.close();
+            }
+
+        }
+
+    }
+
+    public static CommandLine checkOptions(Options help, Options options, String ... argv) throws ParseException {
+
+        CommandLine cmd = null;
+        CommandLineParser parser = new PosixParser();
+
+
+        try {
+
+            cmd = parser.parse(help,argv,true);
+
+            if( cmd.getOptions().length > 0){
+                final HelpFormatter usageFormatter = new HelpFormatter();
+                usageFormatter.printHelp("ElasticsearchDataPrunerRunner", null, options, null, true);
+                System.exit(0);
+            }
+
+            cmd = parser.parse(options, argv);
+
+        } catch (ParseException e) {
+
+            final HelpFormatter usageFormatter = new HelpFormatter();
+            usageFormatter.printHelp("ElasticsearchDataPrunerRunner", null, options, null, true);
+            throw e;
+
+        }
+
+
+        if( (cmd.hasOption("z") && cmd.hasOption("c")) || (!cmd.hasOption("z") && !cmd.hasOption("c")) ){
+
+            System.err.println("One (only) of zookeeper-hosts or config-location is required");
+            final HelpFormatter usageFormatter = new HelpFormatter();
+            usageFormatter.printHelp("ElasticsearchDataPrunerRunner", null, options, null, true);
+            throw new RuntimeException("Must specify zookeeper-hosts or config-location, but not both");
+
+        }
+
+        return cmd;
+    }
+
+    public static Options buildOptions(){
+
+        Options options = new Options();
+
+        Option o = new Option("s", "start-date", true, "Starting Date (MM/DD/YYYY)");
+        o.setArgName("START_DATE");
+        o.setRequired(true);
+        options.addOption(o);
+
+        o = new Option("n", "numdays", true, "Number of days back to purge");
+        o.setArgName("NUMDAYS");
+        o.setRequired(true);
+        options.addOption(o);
+
+        o = new Option("p", "index-prefix", true, "Index prefix  - e.g. bro_index_");
+        o.setArgName("PREFIX");
+        o.setRequired(true);
+        options.addOption(o);
+
+        o = new Option("c", "config-location", true, "Directory Path - e.g. /path/to/config/dir");
+        o.setArgName("CONFIG");
+        o.setRequired(false);
+        options.addOption(o);
+
+        o = new Option("z", "zookeeper-hosts", true, "Zookeeper URL - e.g. zkhost1:2181,zkhost2:2181,zkhost3:2181");
+        o.setArgName("PREFIX");
+        o.setRequired(false);
+        options.addOption(o);
+
+        return options;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/HDFSDataPruner.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/HDFSDataPruner.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/HDFSDataPruner.java
new file mode 100644
index 0000000..097253c
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/HDFSDataPruner.java
@@ -0,0 +1,226 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.bulk;
+
+
+import org.apache.commons.cli.*;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathFilter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+public class HDFSDataPruner extends DataPruner {
+
+
+    private Path globPath;
+    protected FileSystem fileSystem;
+    protected static final Logger LOG = LoggerFactory.getLogger(HDFSDataPruner.class);
+
+    HDFSDataPruner(Date startDate, Integer numDays, String fsUri, String globPath) throws IOException, StartDateException {
+
+        super(startDate,numDays,globPath);
+        this.globPath = new Path(wildCard);
+        Configuration conf = new Configuration();
+        conf.set("fs.defaultFS", fsUri);
+        this.fileSystem = FileSystem.get(conf);
+
+    }
+
+
+    public static void main(String... argv) throws IOException, java.text.ParseException, ClassNotFoundException, InterruptedException {
+
+        /**
+         * Example
+         * start=$(date -d '30 days ago' +%m/%d/%Y)
+         * yarn jar Metron-DataLoads-0.1BETA.jar org.apache.metron.dataloads.bulk.HDFSDataPruner -f hdfs://ec2-52-36-25-217.us-west-2.compute.amazonaws.com:8020 -g '/apps/metron/enrichment/indexed/bro_doc/*enrichment-*' -s $(date -d '30 days ago' +%m/%d/%Y) -n 1;
+         * echo ${start}
+         **/
+
+        Options options = new Options();
+        Options help = new Options();
+
+        {
+            Option o = new Option("h", "help", false, "This screen");
+            o.setRequired(false);
+            help.addOption(o);
+        }
+        {
+            Option o = new Option("s", "start-date", true, "Starting Date (MM/DD/YYYY)");
+            o.setArgName("START_DATE");
+            o.setRequired(true);
+            options.addOption(o);
+        }
+        {
+            Option o = new Option("f", "filesystem", true, "Filesystem uri - e.g. hdfs://host:8020 or file:///");
+            o.setArgName("FILESYSTEM");
+            o.setRequired(true);
+            options.addOption(o);
+        }
+        {
+            Option o = new Option("n", "numdays", true, "Number of days back to purge");
+            o.setArgName("NUMDAYS");
+            o.setRequired(true);
+            options.addOption(o);
+        }
+        {
+            Option o = new Option("g", "glob-string", true, "Glob filemask for files to delete - e.g. /apps/metron/enrichment/bro_doc/file-*");
+            o.setArgName("GLOBSTRING");
+            o.setRequired(true);
+            options.addOption(o);
+        }
+
+        try {
+
+            CommandLineParser parser = new PosixParser();
+            CommandLine cmd = null;
+
+            try {
+
+                cmd = parser.parse(help,argv,true);
+                if( cmd.getOptions().length > 0){
+                    final HelpFormatter usageFormatter = new HelpFormatter();
+                    usageFormatter.printHelp("HDFSDataPruner", null, options, null, true);
+                    System.exit(0);
+                }
+
+                cmd = parser.parse(options, argv);
+
+            } catch (ParseException pe) {
+
+                final HelpFormatter usageFormatter = new HelpFormatter();
+                usageFormatter.printHelp("HDFSDataPruner", null, options, null, true);
+                System.exit(-1);
+
+            }
+
+            String start = cmd.getOptionValue("s");
+            Date startDate = new SimpleDateFormat("MM/dd/yyyy").parse(start);
+            String fileSystemUri = cmd.getOptionValue("f");
+            Integer numDays = Integer.parseInt(cmd.getOptionValue("n"));
+            String globString = cmd.getOptionValue("g");
+
+            if(LOG.isDebugEnabled()) {
+                LOG.debug("Running prune with args: " + startDate + " " + numDays + " " + fileSystemUri + " " + globString);
+            }
+
+            DataPruner pruner = new HDFSDataPruner(startDate, numDays, fileSystemUri, globString);
+
+            LOG.info("Pruned " + pruner.prune() + " files from " + fileSystemUri + globString);
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            System.exit(-1);
+        }
+
+    }
+
+    public Long prune() throws IOException {
+
+        Long filesPruned = new Long(0);
+
+        FileStatus[] filesToDelete = fileSystem.globStatus(globPath, new HDFSDataPruner.DateFileFilter(this));
+
+        for (FileStatus fileStatus : filesToDelete) {
+
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Deleting File: " + fileStatus.getPath());
+            }
+
+            fileSystem.delete(fileStatus.getPath(), false);
+
+            filesPruned++;
+        }
+
+        return filesPruned;
+    }
+
+    class DateFileFilter extends Configured implements PathFilter {
+
+        HDFSDataPruner pruner;
+        Boolean failOnError = false;
+
+        DateFileFilter(HDFSDataPruner pruner) {
+            this.pruner = pruner;
+        }
+
+        DateFileFilter(HDFSDataPruner pruner, Boolean failOnError) {
+
+            this(pruner);
+            this.failOnError = failOnError;
+
+        }
+
+        @Override
+        public boolean accept(Path path) {
+            try {
+
+                if(pruner.LOG.isDebugEnabled()) {
+                    pruner.LOG.debug("ACCEPT - working with file: " + path);
+                }
+
+                if (pruner.fileSystem.isDirectory(path)) {
+                    return false;
+
+                }
+            } catch (IOException e) {
+
+                pruner.LOG.error("IOException", e);
+
+                if (failOnError) {
+                    throw new RuntimeException(e);
+                }
+
+                return false;
+            }
+
+            try {
+
+                FileStatus file = pruner.fileSystem.getFileStatus(path);
+                long fileModificationTime = file.getModificationTime();
+                boolean accept = false;
+
+                if (fileModificationTime >= pruner.firstTimeMillis && fileModificationTime < pruner.lastTimeMillis) {
+
+                    accept = true;
+                }
+
+                return accept;
+
+            } catch (IOException e) {
+
+                pruner.LOG.error("IOException", e);
+
+                if (failOnError) {
+                    throw new RuntimeException(e);
+                }
+
+                return false;
+            }
+
+        }
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/LeastRecentlyUsedPruner.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/LeastRecentlyUsedPruner.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/LeastRecentlyUsedPruner.java
new file mode 100644
index 0000000..7acc96c
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/LeastRecentlyUsedPruner.java
@@ -0,0 +1,221 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.bulk;
+
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import org.apache.commons.cli.*;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.metron.dataloads.hbase.mr.PrunerMapper;
+
+import javax.annotation.Nullable;
+import java.io.IOException;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+public class LeastRecentlyUsedPruner {
+    private static abstract class OptionHandler implements Function<String, Option> {}
+    private enum BulkLoadOptions {
+        HELP("h", new OptionHandler() {
+
+            @Nullable
+            @Override
+            public Option apply(@Nullable String s) {
+                return new Option(s, "help", false, "Generate Help screen");
+            }
+        }), TABLE("t", new OptionHandler() {
+            @Nullable
+            @Override
+            public Option apply(@Nullable String s) {
+                Option o = new Option(s, "table", true, "HBase table to prune");
+                o.setRequired(true);
+                o.setArgName("HBASE_TABLE");
+                return o;
+            }
+        }), COLUMN_FAMILY("f", new OptionHandler() {
+            @Nullable
+            @Override
+            public Option apply(@Nullable String s) {
+                Option o = new Option(s, "column_family", true, "Column family of the HBase table to prune");
+                o.setRequired(false);
+                o.setArgName("CF_NAME");
+                return o;
+            }
+        })
+        ,AS_OF_TIME("a", new OptionHandler() {
+            @Nullable
+            @Override
+            public Option apply(@Nullable String s) {
+                Option o = new Option(s, "as_of", true, "The earliest access tracker you want to use.");
+                o.setArgName("datetime");
+                o.setRequired(true);
+                return o;
+            }
+        })
+        ,AS_OF_TIME_FORMAT("t", new OptionHandler() {
+            @Nullable
+            @Override
+            public Option apply(@Nullable String s) {
+                String defaultFormat = new SimpleDateFormat().toLocalizedPattern();
+                Option o = new Option(s, "as_of_format", true, "The format of the as_of time (only used in conjunction with the as_of option) (Default is: " + defaultFormat + ")");
+                o.setArgName("format");
+                o.setRequired(false);
+                return o;
+            }
+        })
+        ,ACCESS_TABLE("u", new OptionHandler() {
+            @Nullable
+            @Override
+            public Option apply(@Nullable String s) {
+                Option o = new Option(s, "access_table", true, "HBase table containing the access trackers.");
+                o.setRequired(true);
+                o.setArgName("HBASE_TABLE");
+                return o;
+            }
+        }), ACCESS_COLUMN_FAMILY("z", new OptionHandler() {
+            @Nullable
+            @Override
+            public Option apply(@Nullable String s) {
+                Option o = new Option(s, "access_column_family", true, "Column family of the HBase table containing the access trackers");
+                o.setRequired(true);
+                o.setArgName("CF_NAME");
+                return o;
+            }
+        });
+        Option option;
+        String shortCode;
+        BulkLoadOptions(String shortCode, OptionHandler optionHandler) {
+            this.shortCode = shortCode;
+            this.option = optionHandler.apply(shortCode);
+        }
+
+        public boolean has(CommandLine cli) {
+            return cli.hasOption(shortCode);
+        }
+
+        public String get(CommandLine cli) {
+            return cli.getOptionValue(shortCode);
+        }
+        private static long getTimestamp(CommandLine cli) throws java.text.ParseException {
+            Date d = getFormat(cli).parse(BulkLoadOptions.AS_OF_TIME.get(cli));
+            return d.getTime();
+        }
+
+        private static DateFormat getFormat(CommandLine cli) {
+            DateFormat format = new SimpleDateFormat();
+            if (BulkLoadOptions.AS_OF_TIME_FORMAT.has(cli)) {
+                 format = new SimpleDateFormat(BulkLoadOptions.AS_OF_TIME_FORMAT.get(cli));
+            }
+            return format;
+        }
+
+        public static CommandLine parse(CommandLineParser parser, String[] args) {
+            try {
+                CommandLine cli = parser.parse(getOptions(), args);
+                if(BulkLoadOptions.HELP.has(cli)) {
+                    printHelp();
+                    System.exit(0);
+                }
+                return cli;
+            } catch (ParseException e) {
+                System.err.println("Unable to parse args: " + Joiner.on(' ').join(args));
+                e.printStackTrace(System.err);
+                printHelp();
+                System.exit(-1);
+                return null;
+            }
+        }
+
+        public static void printHelp() {
+            HelpFormatter formatter = new HelpFormatter();
+            formatter.printHelp( "LeastRecentlyUsedPruner", getOptions());
+        }
+
+        public static Options getOptions() {
+            Options ret = new Options();
+            for(BulkLoadOptions o : BulkLoadOptions.values()) {
+               ret.addOption(o.option);
+            }
+            return ret;
+        }
+    }
+
+    public static void setupHBaseJob(Job job, String sourceTable, String cf) throws IOException {
+        Scan scan = new Scan();
+        if(cf != null) {
+            scan.addFamily(Bytes.toBytes(cf));
+        }
+        scan.setCaching(500);        // 1 is the default in Scan, which will be bad for MapReduce jobs
+        scan.setCacheBlocks(false);  // don't set to true for MR jobs
+// set other scan attrs
+
+        TableMapReduceUtil.initTableMapperJob(
+                sourceTable,      // input table
+                scan,	          // Scan instance to control CF and attribute selection
+                PrunerMapper.class,   // mapper class
+                null,	          // mapper output key
+                null,	          // mapper output value
+                job);
+        TableMapReduceUtil.initTableReducerJob(
+                sourceTable,      // output table
+                null,             // reducer class
+                job);
+    }
+
+    public static Job createJob( Configuration conf
+                               , String table
+                               , String cf
+                               , String accessTrackerTable
+                               , String accessTrackerColumnFamily
+                               , Long ts
+                               ) throws IOException
+    {
+        Job job = new Job(conf);
+        job.setJobName("LeastRecentlyUsedPruner: Pruning " +  table + ":" + cf + " since " + new SimpleDateFormat().format(new Date(ts)));
+        System.out.println("Configuring " + job.getJobName());
+        job.setJarByClass(LeastRecentlyUsedPruner.class);
+        job.getConfiguration().setLong(PrunerMapper.TIMESTAMP_CONF, ts);
+        job.getConfiguration().set(PrunerMapper.ACCESS_TRACKER_NAME_CONF, table);
+        job.getConfiguration().set(PrunerMapper.ACCESS_TRACKER_CF_CONF, accessTrackerColumnFamily);
+        job.getConfiguration().set(PrunerMapper.ACCESS_TRACKER_TABLE_CONF, accessTrackerTable);
+        setupHBaseJob(job, table, cf);
+        job.setNumReduceTasks(0);
+        return job;
+    }
+
+    public static void main(String... argv) throws IOException, java.text.ParseException, ClassNotFoundException, InterruptedException {
+        Configuration conf = HBaseConfiguration.create();
+        String[] otherArgs = new GenericOptionsParser(conf, argv).getRemainingArgs();
+
+        CommandLine cli = BulkLoadOptions.parse(new PosixParser(), otherArgs);
+        Long ts = BulkLoadOptions.getTimestamp(cli);
+        String table = BulkLoadOptions.TABLE.get(cli);
+        String cf = BulkLoadOptions.COLUMN_FAMILY.get(cli);
+        String accessTrackerTable = BulkLoadOptions.ACCESS_TABLE.get(cli);
+        String accessTrackerCF = BulkLoadOptions.ACCESS_COLUMN_FAMILY.get(cli);
+        Job job = createJob(conf, table, cf, accessTrackerTable, accessTrackerCF, ts);
+        System.exit(job.waitForCompletion(true) ? 0 : 1);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/StartDateException.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/StartDateException.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/StartDateException.java
new file mode 100644
index 0000000..d3a0549
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/StartDateException.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.bulk;
+
+
+public class StartDateException extends Exception {
+
+    public StartDateException(String message){
+        super(message);
+    }
+
+    public StartDateException(String message, Throwable t){
+        super(message,t);
+    }
+
+}



[49/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_common/templates/metron-hadoop-logrotate.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_common/templates/metron-hadoop-logrotate.yml b/deployment/roles/ambari_common/templates/metron-hadoop-logrotate.yml
deleted file mode 100644
index 042b490..0000000
--- a/deployment/roles/ambari_common/templates/metron-hadoop-logrotate.yml
+++ /dev/null
@@ -1,135 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-#Hadoop HDFS Logs
-/var/log/hadoop/hdfs/*.log* {
-  {{ hadoop_logrotate_frequency }}
-  rotate {{ hadoop_logrotate_retention }}
-  missingok
-  notifempty
-  copytruncate
-}
-
-/var/log/hadoop/hdfs/*.out {
-  {{ hadoop_logrotate_frequency }}
-  rotate {{ hadoop_logrotate_retention }}
-  missingok
-  notifempty
-  copytruncate
-}
-
-/var/log/hadoop/hdfs/*.audit {
-  {{ hadoop_logrotate_frequency }}
-  rotate {{ hadoop_logrotate_retention }}
-  missingok
-  notifempty
-  copytruncate
-}
-
-#Hadoop Yarn Logs
-/var/log/hadoop/yarn/*.log {
-  {{ hadoop_logrotate_frequency }}
-  rotate {{ hadoop_logrotate_retention }}
-  missingok
-  notifempty
-  copytruncate
-}
-
-#Hadoop Mapreduce Logs
-/var/log/hadoop/mapreduce/*.log {
-  {{ hadoop_logrotate_frequency }}
-  rotate {{ hadoop_logrotate_retention }}
-  missingok
-  notifempty
-  copytruncate
-}
-
-#Storm Logs
-/var/log/storm/*.log {
-  {{ hadoop_logrotate_frequency }}
-  rotate {{ hadoop_logrotate_retention }}
-  missingok
-  notifempty
-  copytruncate
-}
-
-/var/log/storm/*.out {
-  {{ hadoop_logrotate_frequency }}
-  rotate {{ hadoop_logrotate_retention }}
-  missingok
-  notifempty
-  copytruncate
-}
-
-#Kafka Logs
-/var/log/kafka/*.log {
-  {{ hadoop_logrotate_frequency }}
-  rotate {{ hadoop_logrotate_retention }}
-  missingok
-  notifempty
-  copytruncate
-}
-
-/var/log/kafka/*.err {
-  {{ hadoop_logrotate_frequency }}
-  rotate {{ hadoop_logrotate_retention }}
-  missingok
-  notifempty
-  copytruncate
-}
-
-#HBase Logs
-/var/log/hbase/*.log* {
-  {{ hadoop_logrotate_frequency }}
-  rotate {{ hadoop_logrotate_retention }}
-  missingok
-  notifempty
-  copytruncate
-}
-
-/var/log/hbase/*.out {
-  {{ hadoop_logrotate_frequency }}
-  rotate {{ hadoop_logrotate_retention }}
-  missingok
-  notifempty
-  copytruncate
-}
-
-/var/log/hbase/*.audit {
-  {{ hadoop_logrotate_frequency }}
-  rotate {{ hadoop_logrotate_retention }}
-  missingok
-  notifempty
-  copytruncate
-}
-
-#Zookeeper Logs
-/var/log/zookeeper/*.log {
-  {{ hadoop_logrotate_frequency }}
-  rotate {{ hadoop_logrotate_retention }}
-  missingok
-  notifempty
-  copytruncate
-}
-
-/var/log/zookeeper/*.out {
-  {{ hadoop_logrotate_frequency }}
-  rotate {{ hadoop_logrotate_retention }}
-  missingok
-  notifempty
-  copytruncate
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_common/vars/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_common/vars/main.yml b/deployment/roles/ambari_common/vars/main.yml
deleted file mode 100644
index 699dcf7..0000000
--- a/deployment/roles/ambari_common/vars/main.yml
+++ /dev/null
@@ -1,21 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-rhel_ambari_install_url: "http://public-repo-1.hortonworks.com/ambari/centos6/2.x/updates/2.1.2.1/ambari.repo"
-ambari_user: "root"
-local_tmp_keygen_file: "/tmp/id_rsa.tmp"
-dest_tmp_keygen_file: "/tmp/id_rsa.tmp"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_config/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_config/defaults/main.yml b/deployment/roles/ambari_config/defaults/main.yml
deleted file mode 100644
index 507b6e3..0000000
--- a/deployment/roles/ambari_config/defaults/main.yml
+++ /dev/null
@@ -1,30 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-zookeeper_data_dir: /hadoop/zookeeper
-namenode_checkpoint_dir: /hadoop/hdfs/namesecondary
-namenode_name_dir: /hadoop/hdfs/namenode
-datanode_data_dir: /hadoop/hdfs/data
-journalnode_edits_dir: /hadoop/hdfs/journalnode
-jhs_recovery_store_ldb_path: /hadoop/mapreduce/jhs
-nodemanager_local_dirs: /hadoop/yarn/local
-timeline_ldb_store_path: /hadoop/yarn/timeline
-timeline_ldb_state_path: /hadoop/yarn/timeline
-nodemanager_log_dirs: /hadoop/yarn/log
-storm_local_dir: /hadoop/storm
-kafka_log_dirs: /kafka-log
-cluster_type: small_cluster

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_config/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_config/meta/main.yml b/deployment/roles/ambari_config/meta/main.yml
deleted file mode 100644
index 61197e3..0000000
--- a/deployment/roles/ambari_config/meta/main.yml
+++ /dev/null
@@ -1,21 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - epel
-  - python-pip
-  - httplib2

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_config/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_config/tasks/main.yml b/deployment/roles/ambari_config/tasks/main.yml
deleted file mode 100644
index f44f929..0000000
--- a/deployment/roles/ambari_config/tasks/main.yml
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- include_vars: "{{ cluster_type }}.yml"
-
-- name: Install python-requests
-  yum:
-    name: python-requests
-    state: installed
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10
-
-- name: Deploy cluster with Ambari; http://{{ groups.ambari_master[0] }}:{{ ambari_port }}
-  ambari_cluster_state:
-    host: "{{ groups.ambari_master[0] }}"
-    port: "{{ ambari_port }}"
-    username: "{{ ambari_user }}"
-    password: "{{ ambari_password }}"
-    cluster_name: "{{ cluster_name }}"
-    cluster_state: present
-    blueprint_name: "{{ blueprint_name }}"
-    configurations: "{{ configurations }}"
-    wait_for_complete: True
-    blueprint_var: "{{ blueprint }}"
-
-- include: start_services.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_config/tasks/start_services.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_config/tasks/start_services.yml b/deployment/roles/ambari_config/tasks/start_services.yml
deleted file mode 100644
index 7c6e0a9..0000000
--- a/deployment/roles/ambari_config/tasks/start_services.yml
+++ /dev/null
@@ -1,48 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Start All Hadoop Services {{ inventory_hostname }}
-  uri:
-    url: http://{{ inventory_hostname}}:{{ ambari_port}}/api/v1/clusters/{{ cluster_name }}/services/{{ item }}
-    HEADER_X-Requested-By: "{{ ambari_user }}"
-    method: PUT
-    body: "{ \"RequestInfo\": { \"context\": \"Start service via REST\" }, \"Body\": { \"ServiceInfo\": { \"state\": \"STARTED\" }}}"
-    body_format: json
-    status_code: 200,202
-    force_basic_auth: yes
-    user: "{{ ambari_user }}"
-    password: "{{ ambari_password }}"
-  with_items:
-    - "{{ metron_services }}"
-
-- name: Wait for Service Start
-  uri:
-    url: http://{{ inventory_hostname}}:{{ ambari_port}}/api/v1/clusters/{{ cluster_name }}/services/{{ item }}
-    HEADER_X-Requested-By: "{{ ambari_user }}"
-    method: GET
-    status_code: 200
-    force_basic_auth: yes
-    user: "{{ ambari_user }}"
-    password: "{{ ambari_password }}"
-    return_content: yes
-  with_items:
-    - "{{ metron_services }}"
-  register: result
-  until: result.content.find("STARTED") != -1
-  retries: 10
-  delay: 60
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_config/vars/multi_vagrant_cluster.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_config/vars/multi_vagrant_cluster.yml b/deployment/roles/ambari_config/vars/multi_vagrant_cluster.yml
deleted file mode 100644
index 526661b..0000000
--- a/deployment/roles/ambari_config/vars/multi_vagrant_cluster.yml
+++ /dev/null
@@ -1,99 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-
-hadoop_master: [NAMENODE, SECONDARY_NAMENODE, RESOURCEMANAGER, HISTORYSERVER]
-hadoop_slave: [APP_TIMELINE_SERVER, DATANODE, NODEMANAGER]
-spark_master: [SPARK_JOBHISTORYSERVER]
-storm_master: [NIMBUS, STORM_UI_SERVER, DRPC_SERVER]
-storm_slave: [SUPERVISOR]
-kafka_broker: [KAFKA_BROKER]
-zookeeper_master: [ZOOKEEPER_SERVER]
-hbase_master: [HBASE_MASTER]
-hbase_slave: [HBASE_REGIONSERVER]
-hadoop_clients: [HDFS_CLIENT, YARN_CLIENT, MAPREDUCE2_CLIENT, SPARK_CLIENT, ZOOKEEPER_CLIENT, HBASE_CLIENT]
-
-metron_services: ["HDFS","YARN","MAPREDUCE2","ZOOKEEPER", "HBASE", "STORM", "KAFKA"]
-
-master_1_components: "{{ hadoop_master | union(hadoop_clients) }}"
-master_1_host:
-  - "{{groups.ambari_slave[0]}}"
-master_2_components: "{{ zookeeper_master | union(storm_master) | union(spark_master) | union(hbase_master) | union(hadoop_clients) }}"
-master_2_host:
-  - "{{groups.ambari_slave[1]}}"
-slave_components: "{{ hadoop_slave | union(storm_slave) | union(kafka_broker) | union(hbase_slave) | union(hadoop_clients) }}"
-
-cluster_name: "metron"
-blueprint_name: "metron_blueprint"
-
-configurations:
-  - zoo.cfg:
-      dataDir: '{{ zookeeper_data_dir | default("/hadoop/zookeeper") }}'
-  - hdfs-site:
-      dfs.namenode.checkpoint.dir: '{{ namenode_checkpoint_dir | default("/hadoop/hdfs/namesecondary") }}'
-      dfs.namenode.name.dir: '{{ namenode_name_dir | default("/hadoop/hdfs/namenode") }}'
-      dfs.datanode.data.dir: '{{ datanode_data_dir | default("/hadoop/hdfs/data" ) }}'
-      dfs.journalnode.edits.dir: '{{ journalnode_edits_dir | default("/hadoop/hdfs/journalnode") }}'
-  - hadoop-env:
-      namenode_heapsize: 1024
-      dtnode_heapsize: 1024
-  - hbase-env:
-      hbase_regionserver_heapsize: 1024
-      hbase_master_heapsize: 1024
-  - yarn-env:
-      nodemanager_heapsize: 512
-      yarn_heapsize: 512
-      apptimelineserver_heapsize : 512
-  - mapred-env:
-      jobhistory_heapsize: 256
-  - yarn-site:
-      yarn.nodemanager.resource.memory-mb: 1024
-      yarn.scheduler.maximum-allocation-mb: 1024
-      yarn.nodemanager.local-dirs : '{{ nodemanager_local_dirs| default("/hadoop/yarn/local") }}'
-      yarn.timeline-service.leveldb-timeline-store.path: '{{ timeline_ldb_store_path | default("/hadoop/yarn/timeline") }}'
-      yarn.timeline-service.leveldb-state-store.path: '{{ timeline_ldb_state_path| default("/hadoop/yarn/timeline") }}'
-      yarn.nodemanager.log-dirs: '{{ nodemanager_log_dirs| default("/hadoop/yarn/log") }}'
-
-  - mapred-site:
-      mapreduce.jobhistory.recovery.store.leveldb.path : '{{ jhs_recovery_store_ldb_path | default("/hadoop/mapreduce/jhs") }}'
-  - storm-site:
-      supervisor.slots.ports: "[6700, 6701, 6702, 6703]"
-      storm.local.dir: '{{ storm_local_dir | default("/hadoop/storm") }}'
-  - kafka-env:
-      content: "{% raw %}\n#!/bin/bash\n\n# Set KAFKA specific environment variables here.\n\n# The java implementation to use.\nexport KAFKA_HEAP_OPTS=\"-Xms256M -Xmx256M\"\nexport KAFKA_JVM_PERFORMANCE_OPTS=\"-server -XX:+UseG1GC -XX:+DisableExplicitGC -Djava.awt.headless=true\"\nexport JAVA_HOME={{java64_home}}\nexport PATH=$PATH:$JAVA_HOME/bin\nexport PID_DIR={{kafka_pid_dir}}\nexport LOG_DIR={{kafka_log_dir}}\nexport KAFKA_KERBEROS_PARAMS={{kafka_kerberos_params}}\n# Add kafka sink to classpath and related depenencies\nif [ -e \"/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar\" ]; then\n  export CLASSPATH=$CLASSPATH:/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar\n  export CLASSPATH=$CLASSPATH:/usr/lib/ambari-metrics-kafka-sink/lib/*\nfi\nif [ -f /etc/kafka/conf/kafka-ranger-env.sh ]; then\n   . /etc/kafka/conf/kafka-ranger-env.sh\nfi{% endraw %}"
-  - kafka-broker:
-      log.dirs: '{{ kafka_log_dirs | default("/kafka-log") }}'
-
-blueprint:
-  stack_name: HDP
-  stack_version: 2.3
-  groups:
-    - name : master_1
-      cardinality: 1
-      configuration: []  # configuration not yet implemented
-      components: "{{ master_1_components }}"
-      hosts: "{{ master_1_host }}"
-    - name : master_2
-      cardinality: 1
-      configuration: []  # configuration not yet implemented
-      components: "{{ master_2_components }}"
-      hosts: "{{ master_2_host }}"
-    - name: slaves
-      cardinality: 1+
-      configuration: []  # configuration not yet implemented
-      components: "{{ slave_components }}"
-      hosts: "{{ groups.ambari_slave | difference(groups.ambari_slave[0]) | difference(groups.ambari_slave[1]) }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_config/vars/single_node_vm.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_config/vars/single_node_vm.yml b/deployment/roles/ambari_config/vars/single_node_vm.yml
deleted file mode 100644
index cb6fe4a..0000000
--- a/deployment/roles/ambari_config/vars/single_node_vm.yml
+++ /dev/null
@@ -1,85 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-# vars file for single_node_vm blueprint
-
-hadoop_master: [NAMENODE, SECONDARY_NAMENODE, RESOURCEMANAGER, HISTORYSERVER]
-hadoop_slave: [APP_TIMELINE_SERVER, DATANODE, HDFS_CLIENT, NODEMANAGER, YARN_CLIENT, MAPREDUCE2_CLIENT]
-spark_master: [SPARK_JOBHISTORYSERVER]
-spark_slave: [SPARK_CLIENT]
-storm_master: [NIMBUS, STORM_UI_SERVER, DRPC_SERVER]
-storm_slave: [SUPERVISOR]
-kafka_broker: [KAFKA_BROKER]
-zookeeper_master: [ZOOKEEPER_SERVER]
-zookeeper_slave: [ZOOKEEPER_CLIENT]
-hbase_master: [HBASE_MASTER, HBASE_CLIENT]
-hbase_slave: [HBASE_REGIONSERVER]
-
-metron_services: ["HDFS","YARN","MAPREDUCE2","ZOOKEEPER", "HBASE", "STORM", "KAFKA"]
-metron_components: "{{ hadoop_master | union(zookeeper_master) | union(storm_master) | union(hbase_master) | union(hadoop_slave) | union(zookeeper_slave) | union(storm_slave) | union(kafka_broker) | union(hbase_slave) }}"
-
-cluster_name: "metron_cluster"
-blueprint_name: "metron_blueprint"
-
-configurations:
-  - zoo.cfg:
-      dataDir: '{{ zookeeper_data_dir }}'
-  - hadoop-env:
-      hadoop_heapsize: 1024
-      namenode_heapsize: 512
-      dtnode_heapsize: 512
-      namenode_opt_permsize: 128m
-  - hbase-env:
-      hbase_regionserver_heapsize: 512
-      hbase_master_heapsize: 512
-      hbase_regionserver_xmn_max: 512
-  - hdfs-site:
-      dfs.namenode.checkpoint.dir: '{{ namenode_checkpoint_dir  }}'
-      dfs.namenode.name.dir: '{{ namenode_name_dir }}'
-      dfs.datanode.data.dir: '{{ datanode_data_dir }}'
-      dfs.journalnode.edits.dir: '{{ journalnode_edits_dir }}'
-  - yarn-env:
-      nodemanager_heapsize: 512
-      yarn_heapsize: 512
-      apptimelineserver_heapsize : 512
-      resourcemanager_heapsize: 1024
-  - mapred-env:
-      jobhistory_heapsize: 256
-  - mapred-site:
-      mapreduce.jobhistory.recovery.store.leveldb.path : '{{ jhs_recovery_store_ldb_path }}'
-  - yarn-site:
-      yarn.nodemanager.local-dirs : '{{ nodemanager_local_dirs }}'
-      yarn.timeline-service.leveldb-timeline-store.path: '{{ timeline_ldb_store_path }}'
-      yarn.timeline-service.leveldb-state-store.path: '{{ timeline_ldb_state_path }}'
-      yarn.nodemanager.log-dirs: '{{ nodemanager_log_dirs }}'
-  - storm-site:
-      supervisor.slots.ports: "[6700, 6701, 6702, 6703]"
-      storm.local.dir: '{{ storm_local_dir }}'
-  - kafka-env:
-      content: "{% raw %}\n#!/bin/bash\n\n# Set KAFKA specific environment variables here.\n\n# The java implementation to use.\nexport KAFKA_HEAP_OPTS=\"-Xms256M -Xmx256M\"\nexport KAFKA_JVM_PERFORMANCE_OPTS=\"-server -XX:+UseG1GC -XX:+DisableExplicitGC -Djava.awt.headless=true\"\nexport JAVA_HOME={{java64_home}}\nexport PATH=$PATH:$JAVA_HOME/bin\nexport PID_DIR={{kafka_pid_dir}}\nexport LOG_DIR={{kafka_log_dir}}\nexport KAFKA_KERBEROS_PARAMS={{kafka_kerberos_params}}\n# Add kafka sink to classpath and related depenencies\nif [ -e \"/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar\" ]; then\n  export CLASSPATH=$CLASSPATH:/usr/lib/ambari-metrics-kafka-sink/ambari-metrics-kafka-sink.jar\n  export CLASSPATH=$CLASSPATH:/usr/lib/ambari-metrics-kafka-sink/lib/*\nfi\nif [ -f /etc/kafka/conf/kafka-ranger-env.sh ]; then\n   . /etc/kafka/conf/kafka-ranger-env.sh\nfi{% endraw %}"
-  - kafka-broker:
-      log.dirs: '{{ kafka_log_dirs }}'
-
-blueprint:
-  stack_name: HDP
-  stack_version: 2.3
-  groups:
-    - name : host_group_1
-      cardinality: 1
-      configurations: []
-      components: "{{ metron_components }}"
-      hosts: "{{ hdp_host_group }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_config/vars/small_cluster.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_config/vars/small_cluster.yml b/deployment/roles/ambari_config/vars/small_cluster.yml
deleted file mode 100644
index a3792e1..0000000
--- a/deployment/roles/ambari_config/vars/small_cluster.yml
+++ /dev/null
@@ -1,88 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-
-hadoop_master: [NAMENODE, SECONDARY_NAMENODE, RESOURCEMANAGER, HISTORYSERVER]
-hadoop_slave: [APP_TIMELINE_SERVER, DATANODE, NODEMANAGER]
-spark_master: [SPARK_JOBHISTORYSERVER]
-storm_master: [NIMBUS, STORM_UI_SERVER, DRPC_SERVER]
-storm_slave: [SUPERVISOR]
-kafka_broker: [KAFKA_BROKER]
-zookeeper_master: [ZOOKEEPER_SERVER]
-hbase_master: [HBASE_MASTER]
-hbase_slave: [HBASE_REGIONSERVER]
-hadoop_clients: [HDFS_CLIENT, YARN_CLIENT, MAPREDUCE2_CLIENT, SPARK_CLIENT, ZOOKEEPER_CLIENT, HBASE_CLIENT]
-
-metron_services: ["HDFS","YARN","MAPREDUCE2","ZOOKEEPER", "HBASE", "STORM", "KAFKA"]
-
-master_1_components: "{{ hadoop_master | union(hadoop_clients) }}"
-master_1_host:
-  - "{{groups.ambari_slave[0]}}"
-master_2_components: "{{ zookeeper_master | union(storm_master) | union(spark_master) | union(hbase_master) | union(hadoop_clients) }}"
-master_2_host:
-  - "{{groups.ambari_slave[1]}}"
-slave_components: "{{ hadoop_slave | union(storm_slave) | union(kafka_broker) | union(hbase_slave) | union(hadoop_clients) }}"
-
-cluster_name: "metron"
-blueprint_name: "metron_blueprint"
-
-configurations:
-  - zoo.cfg:
-      dataDir: '{{ zookeeper_data_dir | default("/hadoop/zookeeper") }}'
-  - hadoop-env:
-      namenode_heapsize: 1024
-      dtnode_heapsize: 1024
-  - hbase-env:
-      hbase_regionserver_heapsize: 1024
-      hbase_master_heapsize: 1024
-  - hdfs-site:
-      dfs.namenode.checkpoint.dir: '{{ namenode_checkpoint_dir | default("/hadoop/hdfs/namesecondary") }}'
-      dfs.namenode.name.dir: '{{ namenode_name_dir | default("/hadoop/hdfs/namenode") }}'
-      dfs.datanode.data.dir: '{{ datanode_data_dir | default("/hadoop/hdfs/data" ) }}'
-      dfs.journalnode.edits.dir: '{{ journalnode_edits_dir | default("/hadoop/hdfs/journalnode") }}'
-  - mapred-site:
-      mapreduce.jobhistory.recovery.store.leveldb.path : '{{ jhs_recovery_store_ldb_path | default("/hadoop/mapreduce/jhs") }}'
-  - yarn-site:
-      yarn.nodemanager.local-dirs : '{{ nodemanager_local_dirs| default("/hadoop/yarn/local") }}'
-      yarn.timeline-service.leveldb-timeline-store.path: '{{ timeline_ldb_store_path | default("/hadoop/yarn/timeline") }}'
-      yarn.timeline-service.leveldb-state-store.path: '{{ timeline_ldb_state_path| default("/hadoop/yarn/timeline") }}'
-      yarn.nodemanager.log-dirs: '{{ nodemanager_log_dirs| default("/hadoop/yarn/log") }}'
-  - storm-site:
-      supervisor.slots.ports: "[6700, 6701, 6702, 6703]"
-      storm.local.dir: '{{ storm_local_dir | default("/hadoop/storm") }}'
-  - kafka-broker:
-      log.dirs: '{{ kafka_log_dirs | default("/kafka-log") }}'
-
-blueprint:
-  stack_name: HDP
-  stack_version: 2.3
-  groups:
-    - name : master_1
-      cardinality: 1
-      configuration: []  # configuration not yet implemented
-      components: "{{ master_1_components }}"
-      hosts: "{{ master_1_host }}"
-    - name : master_2
-      cardinality: 1
-      configuration: []  # configuration not yet implemented
-      components: "{{ master_2_components }}"
-      hosts: "{{ master_2_host }}"
-    - name: slaves
-      cardinality: 1+
-      configuration: []  # configuration not yet implemented
-      components: "{{ slave_components }}"
-      hosts: "{{ groups.ambari_slave | difference(groups.ambari_slave[0]) | difference(groups.ambari_slave[1]) }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_gather_facts/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_gather_facts/meta/main.yml b/deployment/roles/ambari_gather_facts/meta/main.yml
deleted file mode 100644
index 61197e3..0000000
--- a/deployment/roles/ambari_gather_facts/meta/main.yml
+++ /dev/null
@@ -1,21 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - epel
-  - python-pip
-  - httplib2

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_gather_facts/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_gather_facts/tasks/main.yml b/deployment/roles/ambari_gather_facts/tasks/main.yml
deleted file mode 100644
index db4927d..0000000
--- a/deployment/roles/ambari_gather_facts/tasks/main.yml
+++ /dev/null
@@ -1,151 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Ambari rest get cluster name
-  uri:
-    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters"
-    user: "{{ ambari_user }}"
-    password: "{{ ambari_password }}"
-    force_basic_auth: yes
-    return_content: yes
-  register: cluster_name_response
-
-- set_fact:
-    cluster_name: "{{ (cluster_name_response.content | from_json)['items'][0].Clusters.cluster_name }}"
-
-- name: Ambari rest get namenode hosts
-  uri:
-    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/services/HDFS/components/NAMENODE"
-    user: "{{ ambari_user }}"
-    password: "{{ ambari_password }}"
-    force_basic_auth: yes
-    return_content: yes
-  register: namenode_hosts_response
-
-- set_fact:
-    namenode_host: "{{ (namenode_hosts_response.content | from_json).host_components[0].HostRoles.host_name }}"
-
-- name: Ambari rest get namenode core-site tag
-  uri:
-    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/hosts/{{ namenode_host }}/host_components/NAMENODE"
-    user: "{{ ambari_user }}"
-    password: "{{ ambari_password }}"
-    force_basic_auth: yes
-    return_content: yes
-  register: core_site_tag_response
-
-- set_fact:
-    core_site_tag: "{{ (core_site_tag_response.content | from_json).HostRoles.actual_configs['core-site'].default }}"
-
-- name: Ambari rest get namenode core-site properties
-  uri:
-    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/configurations?type=core-site&tag={{ core_site_tag }}"
-    user: "{{ ambari_user }}"
-    password: "{{ ambari_password }}"
-    force_basic_auth: yes
-    return_content: yes
-  register: core_site_response
-
-- set_fact:
-    hdfs_url: "{{ (core_site_response.content | from_json)['items'][0].properties['fs.defaultFS'] }}"
-
-- name: Ambari rest get kafka broker hosts
-  uri:
-    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/services/KAFKA/components/KAFKA_BROKER"
-    user: "{{ ambari_user }}"
-    password: "{{ ambari_password }}"
-    force_basic_auth: yes
-    return_content: yes
-  register: kafka_broker_hosts_response
-
-- set_fact:
-    kafka_broker_hosts: "{{ (kafka_broker_hosts_response.content | from_json).host_components | map(attribute='HostRoles.host_name') | list }}"
-
-- name: Ambari rest get kafka kafka-broker tag
-  uri:
-    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/hosts/{{ kafka_broker_hosts[0] }}/host_components/KAFKA_BROKER"
-    user: "{{ ambari_user }}"
-    password: "{{ ambari_password }}"
-    force_basic_auth: yes
-    return_content: yes
-  register: kafka_broker_tag_response
-
-- set_fact:
-    kafka_broker_tag: "{{ (kafka_broker_tag_response.content | from_json).HostRoles.actual_configs['kafka-broker'].default }}"
-
-- name: Ambari rest get kafka kafka-broker properties
-  uri:
-    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/configurations?type=kafka-broker&tag={{ kafka_broker_tag }}"
-    user: "{{ ambari_user }}"
-    password: "{{ ambari_password }}"
-    force_basic_auth: yes
-    return_content: yes
-  register: kafka_broker_properties_response
-
-- set_fact:
-    kafka_broker_port: "{{ (kafka_broker_properties_response.content | from_json)['items'][0].properties['listeners'] | replace('PLAINTEXT://localhost:', '')}}"
-
-- set_fact:
-    kafka_broker_url: "{% for host in kafka_broker_hosts %}{% if loop.index != 1 %},{% endif %}{{ host }}:{{ kafka_broker_port }}{% endfor %}"
-
-- name: Ambari rest get zookeeper hosts
-  uri:
-    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/services/ZOOKEEPER/components/ZOOKEEPER_SERVER"
-    user: "{{ ambari_user }}"
-    password: "{{ ambari_password }}"
-    force_basic_auth: yes
-    return_content: yes
-  register: zookeeper_hosts_response
-
-- set_fact:
-    zookeeper_hosts: "{{ (zookeeper_hosts_response.content | from_json).host_components | map(attribute='HostRoles.host_name') | list }}"
-
-- name: Ambari rest get zookeeper zoo.cfg tag
-  uri:
-    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/hosts/{{ zookeeper_hosts[0] }}/host_components/ZOOKEEPER_SERVER"
-    user: "{{ ambari_user }}"
-    password: "{{ ambari_password }}"
-    force_basic_auth: yes
-    return_content: yes
-  register: zookeeper_tag_response
-
-- set_fact:
-    zookeeper_tag: "{{ (zookeeper_tag_response.content | from_json).HostRoles.actual_configs['zoo.cfg'].default }}"
-
-- name: Ambari rest get kafka kafka-broker properties
-  uri:
-    url: "http://{{ groups.ambari_master[0] }}:{{ ambari_port }}/api/v1/clusters/{{ cluster_name }}/configurations?type=zoo.cfg&tag={{ zookeeper_tag }}"
-    user: "{{ ambari_user }}"
-    password: "{{ ambari_password }}"
-    force_basic_auth: yes
-    return_content: yes
-  register: zookeeper_properties_response
-
-- set_fact:
-    zookeeper_port: "{{ (zookeeper_properties_response.content | from_json)['items'][0].properties['clientPort'] }}"
-
-- set_fact:
-    zookeeper_url: "{% for host in zookeeper_hosts %}{% if loop.index != 1 %},{% endif %}{{ host }}:{{ zookeeper_port }}{% endfor %}"
-
-- name: debug
-  debug:
-    msg: "zookeeper_port = {{ zookeeper_port }},
-          zookeeper_hosts = {{ zookeeper_hosts }},
-          zookeeper_url = {{ zookeeper_url }},
-          kafka_broker_port = {{ kafka_broker_port }},
-          kafka_broker_hosts = {{ kafka_broker_hosts }},
-          kafka_broker_url = {{ kafka_broker_url }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_master/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_master/defaults/main.yml b/deployment/roles/ambari_master/defaults/main.yml
deleted file mode 100644
index 3b8cc73..0000000
--- a/deployment/roles/ambari_master/defaults/main.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-ambari_server_mem: 2048
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_master/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_master/tasks/main.yml b/deployment/roles/ambari_master/tasks/main.yml
deleted file mode 100644
index 25c3784..0000000
--- a/deployment/roles/ambari_master/tasks/main.yml
+++ /dev/null
@@ -1,51 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-# tasks file for ambari_master
-- name: Install ambari server
-  yum:
-    name: ambari-server
-    state: present
-    update_cache: yes
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10
-
-- name: Set Ambari Server Max Memory
-  replace:
-    dest: /var/lib/ambari-server/ambari-env.sh
-    regexp:  "\ -Xmx2048m\ "
-    replace: " -Xmx{{ ambari_server_mem }}m "
-    backup: no
-
-- name: Setup ambari server
-  shell: ambari-server setup -s && touch /etc/ambari-server/configured creates=/etc/ambari-server/configured
-  register: ambari_server_setup
-  failed_when: ambari_server_setup.stderr
-
-- name: start ambari server
-  service:
-    name: ambari-server
-    state: restarted
-
-- name : check if ambari-server is up on {{ ambari_host }}:{{ambari_port}}
-  wait_for :
-    host: "{{ ambari_host }}"
-    port: "{{ ambari_port }}"
-    delay: 120
-    timeout: 300

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_slave/files/hostname.sh
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_slave/files/hostname.sh b/deployment/roles/ambari_slave/files/hostname.sh
deleted file mode 100644
index cc8c1cd..0000000
--- a/deployment/roles/ambari_slave/files/hostname.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/sh
-
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-echo {{ inventory_hostname }}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_slave/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_slave/tasks/main.yml b/deployment/roles/ambari_slave/tasks/main.yml
deleted file mode 100644
index fc068da..0000000
--- a/deployment/roles/ambari_slave/tasks/main.yml
+++ /dev/null
@@ -1,51 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-# tasks file for ambari_slave
-- name: Install ambari-agent
-  yum:
-    name: ambari-agent
-    state: installed
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10
-
-- name: Create ambari-agent hostname script
-  template:
-    src: "../roles/ambari_slave/files/hostname.sh"
-    dest: "/var/lib/ambari-agent/hostname.sh"
-    mode: 0744
-    owner: "{{ ambari_user }}"
-    group: "{{ ambari_user }}"
-
-- name: Configure ambari-server hostname in ambari-agent configuration
-  lineinfile:
-    dest: /etc/ambari-agent/conf/ambari-agent.ini
-    regexp: "{{ item.regexp }}"
-    line: "{{ item.line }}"
-    insertafter: "{{ item.insertafter }}"
-    backup: yes
-  with_items:
-    - { regexp: "^.*hostname=.*$", line: "hostname={{ groups.ambari_master[0] }}", insertafter: '\[server\]' }
-    - { regexp: "^hostname_script=.*$", line: "hostname_script=/var/lib/ambari-agent/hostname.sh", insertafter: '\[agent\]'}
-
-- name: Ensure ambari-agent is running
-  service: name=ambari-agent state=restarted
-
-- name : Wait for agent to register
-  command : sleep 10

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_slave/vars/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_slave/vars/main.yml b/deployment/roles/ambari_slave/vars/main.yml
deleted file mode 100644
index 600b3fc..0000000
--- a/deployment/roles/ambari_slave/vars/main.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-#
-# TODO: duplicates from ambari-common.  need all of these moved to group_vars
-#
-rhel_ambari_install_url: "http://public-repo-1.hortonworks.com/ambari/centos6/2.x/updates/2.1.2.1/ambari.repo"
-ambari_user: "root"
-local_tmp_keygen_file: "/tmp/id_rsa.tmp"
-dest_tmp_keygen_file: "/tmp/id_rsa.tmp"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/bro/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/bro/meta/main.yml b/deployment/roles/bro/meta/main.yml
deleted file mode 100644
index 9c9286f..0000000
--- a/deployment/roles/bro/meta/main.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - libselinux-python
-  - ambari_gather_facts
-  - build-tools
-  - kafka-client
-  - librdkafka

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/bro/tasks/bro-plugin-kafka.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/bro/tasks/bro-plugin-kafka.yml b/deployment/roles/bro/tasks/bro-plugin-kafka.yml
deleted file mode 100644
index d8e887d..0000000
--- a/deployment/roles/bro/tasks/bro-plugin-kafka.yml
+++ /dev/null
@@ -1,41 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Distribute bro-kafka plugin
-  copy: src=../../../metron-sensors/bro-plugin-kafka dest=/tmp/ mode=0755
-
-- name: Compile and install the plugin
-  shell: "{{ item }}"
-  args:
-    chdir: "/tmp/bro-plugin-kafka"
-    creates: "{{ bro_home }}/lib/bro/plugins/BRO_KAFKA"
-  with_items:
-    - rm -rf build/
-    - "./configure --bro-dist=/tmp/bro-{{ bro_version }} --install-root={{ bro_home }}/lib/bro/plugins/ --with-librdkafka={{ librdkafka_home }}"
-    - make
-    - make install
-
-- name: Configure bro-kafka plugin
-  lineinfile:
-    dest: "{{ bro_home }}/share/bro/site/local.bro"
-    line: "{{ item }}"
-  with_items:
-    - "@load Bro/Kafka/logs-to-kafka.bro"
-    - "redef Kafka::logs_to_send = set(HTTP::LOG, DNS::LOG);"
-    - "redef Kafka::topic_name = \"{{ bro_topic }}\";"
-    - "redef Kafka::tag_json = T;"
-    - "redef Kafka::kafka_conf = table([\"metadata.broker.list\"] = \"{{ kafka_broker_url }}\");"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/bro/tasks/bro.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/bro/tasks/bro.yml b/deployment/roles/bro/tasks/bro.yml
deleted file mode 100644
index fb27ef9..0000000
--- a/deployment/roles/bro/tasks/bro.yml
+++ /dev/null
@@ -1,44 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Download bro
-  get_url:
-    url: "https://www.bro.org/downloads/release/bro-{{ bro_version }}.tar.gz"
-    dest: "/tmp/bro-{{ bro_version }}.tar.gz"
-
-- name: Extract bro tarball
-  unarchive:
-    src: "/tmp/bro-{{ bro_version }}.tar.gz"
-    dest: /tmp
-    copy: no
-    creates: "/tmp/bro-{{ bro_version }}"
-
-- name: Compile and Install bro
-  shell: "{{ item }}"
-  args:
-    chdir: "/tmp/bro-{{ bro_version }}"
-    creates: "{{ bro_home }}/bin/bro"
-  with_items:
-    - "./configure --prefix={{ bro_home }}"
-    - make
-    - make install
-
-- name: Configure bro
-  lineinfile:
-    dest: "{{ bro_home }}/etc/node.cfg"
-    regexp: '^interface=.*$'
-    line: 'interface={{ sniff_interface }}'

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/bro/tasks/dependencies.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/bro/tasks/dependencies.yml b/deployment/roles/bro/tasks/dependencies.yml
deleted file mode 100644
index 431e861..0000000
--- a/deployment/roles/bro/tasks/dependencies.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Install prerequisites
-  yum: name={{ item }}
-  with_items:
-    - cmake
-    - make
-    - gcc
-    - gcc-c++
-    - flex
-    - bison
-    - libpcap
-    - libpcap-devel
-    - openssl-devel
-    - python-devel
-    - swig
-    - zlib-devel
-    - perl
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/bro/tasks/librdkafka.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/bro/tasks/librdkafka.yml b/deployment/roles/bro/tasks/librdkafka.yml
deleted file mode 100644
index 652d319..0000000
--- a/deployment/roles/bro/tasks/librdkafka.yml
+++ /dev/null
@@ -1,39 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Download librdkafka
-  get_url:
-    url: "{{ librdkafka_url }}"
-    dest: "/tmp/librdkafka-{{ librdkafka_version }}.tar.gz"
-
-- name: Extract librdkafka tarball
-  unarchive:
-    src: "/tmp/librdkafka-{{ librdkafka_version }}.tar.gz"
-    dest: /tmp
-    copy: no
-    creates: "/tmp/librdkafka-{{ librdkafka_version }}"
-
-- name: Compile and install librdkafka
-  shell: "{{ item }}"
-  args:
-    chdir: "/tmp/librdkafka-{{ librdkafka_version }}"
-    creates: "{{ librdkafka_home }}/lib/librdkafka.so"
-  with_items:
-    - rm -rf build/
-    - "./configure --prefix={{ librdkafka_home }}"
-    - make
-    - make install

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/bro/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/bro/tasks/main.yml b/deployment/roles/bro/tasks/main.yml
deleted file mode 100644
index 14426d6..0000000
--- a/deployment/roles/bro/tasks/main.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- include: dependencies.yml
-- include: librdkafka.yml
-- include: bro.yml
-- include: bro-plugin-kafka.yml
-- include: start-bro.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/bro/tasks/start-bro.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/bro/tasks/start-bro.yml b/deployment/roles/bro/tasks/start-bro.yml
deleted file mode 100644
index 1a0b938..0000000
--- a/deployment/roles/bro/tasks/start-bro.yml
+++ /dev/null
@@ -1,31 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Turn on promiscuous mode for {{ sniff_interface }}
-  shell: "ip link set {{ sniff_interface }} promisc on"
-
-- name: Start bro
-  shell: "{{ bro_home }}/bin/broctl deploy"
-
-- name: Bro Cronjob
-  cron:
-    name: Bro Cron
-    minute: "{{ bro_crontab_minutes }}"
-    job: "{{ item }}"
-  with_items:
-    - "{{ bro_crontab_job }}"
-    - "{{ bro_clean_job }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/bro/vars/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/bro/vars/main.yml b/deployment/roles/bro/vars/main.yml
deleted file mode 100644
index 2ff5177..0000000
--- a/deployment/roles/bro/vars/main.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-bro_home: /usr/local/bro
-bro_version: 2.4.1
-bro_daemon_log: /var/log/bro.log
-bro_topic: bro
-
-# bro cronjob
-bro_crontab_minutes: 0-59/5
-bro_crontab_job: "{{ bro_home }}/bin/broctl cron"
-bro_clean_job: "rm -rf {{ bro_home }}/spool/tmp/*"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/build-tools/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/build-tools/meta/main.yml b/deployment/roles/build-tools/meta/main.yml
deleted file mode 100644
index ddf6aa9..0000000
--- a/deployment/roles/build-tools/meta/main.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - java_jdk

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/build-tools/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/build-tools/tasks/main.yml b/deployment/roles/build-tools/tasks/main.yml
deleted file mode 100644
index c47ef43..0000000
--- a/deployment/roles/build-tools/tasks/main.yml
+++ /dev/null
@@ -1,34 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Install Build Tools
-  yum: name={{ item }}
-  with_items:
-    - "@Development tools"
-    - libdnet-devel
-    - rpm-build
-    - libpcap
-    - libpcap-devel
-    - pcre
-    - pcre-devel
-    - zlib
-    - zlib-devel
-    - glib2-devel
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/elasticsearch/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/elasticsearch/defaults/main.yml b/deployment/roles/elasticsearch/defaults/main.yml
deleted file mode 100644
index 0026717..0000000
--- a/deployment/roles/elasticsearch/defaults/main.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-elasticsearch_data_dir: /var/lib/elasticsearch
-elasticsearch_network_interface: eth0
-elasticsearch_logrotate_frequency: daily
-elasticsearch_logrotate_retention: 30
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/elasticsearch/files/elasticsearch.repo
----------------------------------------------------------------------
diff --git a/deployment/roles/elasticsearch/files/elasticsearch.repo b/deployment/roles/elasticsearch/files/elasticsearch.repo
deleted file mode 100644
index f033ced..0000000
--- a/deployment/roles/elasticsearch/files/elasticsearch.repo
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-[elasticsearch-1.7]
-name=Elasticsearch repository for 1.7.x packages
-baseurl=http://packages.elastic.co/elasticsearch/1.7/centos
-gpgcheck=1
-gpgkey=http://packages.elastic.co/GPG-KEY-elasticsearch
-enabled=1

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/elasticsearch/files/yaf_index.template
----------------------------------------------------------------------
diff --git a/deployment/roles/elasticsearch/files/yaf_index.template b/deployment/roles/elasticsearch/files/yaf_index.template
deleted file mode 100644
index 15ff6f8..0000000
--- a/deployment/roles/elasticsearch/files/yaf_index.template
+++ /dev/null
@@ -1,82 +0,0 @@
-{
-   "template": "yaf_index*",
-   "mappings": {
-      "yaf_doc": {
-         "properties": {
-            "timestamp": {
-               "type": "date",
-               "format": "dateOptionalTime"
-            },
-            "location_point": {
-               "type": "geo_point"
-            },
-            "end-time": {
-               "type": "string"
-            },
-            "duration": {
-               "type": "string"
-            },
-            "rtt": {
-               "type": "string"
-            },
-            "proto": {
-               "type": "string"
-            },
-            "sip": {
-               "type": "string"
-            },
-            "sp": {
-               "type": "string"
-            },
-            "dip": {
-               "type": "string"
-            },
-            "dp": {
-               "type": "string"
-            },
-            "iflags": {
-               "type": "string"
-            },
-            "uflags": {
-               "type": "string"
-            },
-            "riflags": {
-               "type": "string"
-            },
-            "ruflags": {
-               "type": "string"
-            },
-            "isn": {
-               "type": "string"
-            },
-            "risn": {
-               "type": "string"
-            },
-            "tag": {
-               "type": "string"
-            },
-            "rtag": {
-               "type": "string"
-            },
-            "pkt": {
-               "type": "string"
-            },
-            "oct": {
-               "type": "string"
-            },
-            "rpkt": {
-               "type": "string"
-            },
-            "roct": {
-               "type": "string"
-            },
-            "app": {
-               "type": "string"
-            },
-            "end-reason": {
-               "type": "string"
-            }
-         }
-      }
-   }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/elasticsearch/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/elasticsearch/meta/main.yml b/deployment/roles/elasticsearch/meta/main.yml
deleted file mode 100644
index f5f059a..0000000
--- a/deployment/roles/elasticsearch/meta/main.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - java_jdk
-  - epel
-  - python-pip
-  - httplib2
-  - libselinux-python
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/elasticsearch/tasks/configure_index.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/elasticsearch/tasks/configure_index.yml b/deployment/roles/elasticsearch/tasks/configure_index.yml
deleted file mode 100644
index 09739be..0000000
--- a/deployment/roles/elasticsearch/tasks/configure_index.yml
+++ /dev/null
@@ -1,44 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name : Wait for Elasticsearch Host to Start
-  wait_for:
-    host: "{{ groups.search[0] }}"
-    port: "{{ elasticsearch_web_port }}"
-    delay: 10
-    timeout: 300
-
-- name: Wait for Green Index Status
-  uri:
-    url: "http://{{ groups.search[0] }}:{{ elasticsearch_web_port }}/_cat/health"
-    method: GET
-    status_code: 200
-    return_content: yes
-  register: result
-  until: result.content.find("green") != -1
-  retries: 10
-  delay: 60
-  run_once: yes
-
-- name: Add Elasticsearch templates for topologies
-  uri:
-    url: "http://{{ groups.search[0] }}:{{ elasticsearch_web_port }}/_template/template_yaf"
-    method: POST
-    body: "{{ lookup('file','yaf_index.template') }}"
-    status_code: 200
-    body_format: json
-  run_once: yes

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/elasticsearch/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/elasticsearch/tasks/main.yml b/deployment/roles/elasticsearch/tasks/main.yml
deleted file mode 100644
index 26554aa..0000000
--- a/deployment/roles/elasticsearch/tasks/main.yml
+++ /dev/null
@@ -1,73 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Add Elasticsearch GPG key.
-  rpm_key:
-    key: https://packages.elastic.co/GPG-KEY-elasticsearch
-    state: present
-
-- name: Add Elasticsearch repository.
-  copy:
-    src: elasticsearch.repo
-    dest: /etc/yum.repos.d/elasticsearch.repo
-    mode: 0644
-
-- name: Install Elasticsearch.
-  yum:
-    name: elasticsearch
-    state: installed
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10
-
-- name: Create Data Directories
-  file:
-    path: "{{ item }}"
-    state: directory
-    mode: 0755
-    owner: elasticsearch
-    group: elasticsearch
-  when: elasticsearch_data_dir is defined
-  with_items:
-     - '{{ elasticsearch_data_dir.split(",") }}'
-
-- name: Configure Elasticsearch.
-  lineinfile: >
-    dest=/etc/elasticsearch/elasticsearch.yml
-    regexp="{{ item.regexp }}"
-    line="{{ item.line }}"
-    state=present
-  with_items:
-    - { regexp: '#cluster\.name', line: 'cluster.name: metron' }
-    - { regexp: '#network\.host:', line: 'network.host: _{{
-    elasticsearch_network_interface  }}:ipv4_' }
-    - { regexp: '#discovery\.zen\.ping\.unicast\.hosts',
-    line: 'discovery.zen.ping.unicast.hosts: [ {{ es_hosts }} ]'}
-    - { regexp: '#path\.data', line: 'path.data: {{     elasticsearch_data_dir }}' }
-
-- name: Start Elasticsearch.
-  service: name=elasticsearch state=started enabled=yes
-
-- include: configure_index.yml
-
-- name: Create Logrotate Script for Elasticsearch
-  template:
-    src: "metron-elasticsearch-logrotate.yml"
-    dest: "/etc/logrotate.d/metron-elasticsearch"
-    mode: 0644
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/elasticsearch/templates/metron-elasticsearch-logrotate.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/elasticsearch/templates/metron-elasticsearch-logrotate.yml b/deployment/roles/elasticsearch/templates/metron-elasticsearch-logrotate.yml
deleted file mode 100644
index 5504ce1..0000000
--- a/deployment/roles/elasticsearch/templates/metron-elasticsearch-logrotate.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-#Elasticsearch
-/var/log/elasticsearch/*.log {
-  {{ elasticsearch_logrotate_frequency }}
-  rotate {{ elasticsearch_logrotate_retention }}
-  missingok
-  notifempty
-  copytruncate
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/epel/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/epel/tasks/main.yml b/deployment/roles/epel/tasks/main.yml
deleted file mode 100644
index db4e70b..0000000
--- a/deployment/roles/epel/tasks/main.yml
+++ /dev/null
@@ -1,30 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Get epel-repo rpm
-  get_url:
-    dest: /tmp/epel-release.rpm
-    url: http://dl.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm
-
-- name: Install epel-repo rpm
-  yum:
-    pkg: /tmp/epel-release.rpm
-    state: installed
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/flume/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/flume/meta/main.yml b/deployment/roles/flume/meta/main.yml
deleted file mode 100644
index ff35a5a..0000000
--- a/deployment/roles/flume/meta/main.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - java_jdk
-  - libselinux-python

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/flume/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/flume/tasks/main.yml b/deployment/roles/flume/tasks/main.yml
deleted file mode 100644
index 8576c3c..0000000
--- a/deployment/roles/flume/tasks/main.yml
+++ /dev/null
@@ -1,52 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Retrieve HDP repository definition
-  get_url:
-    url: "{{ hdp_repo_def }}"
-    dest: /etc/yum.repos.d/hdp.repo
-    mode: 0644
-
-- name: Install flume
-  yum: name={{item}}
-  with_items:
-    - flume
-    - flume-agent
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10
-
-- name: Create flume-env.sh
-  shell: cp /etc/flume/conf/flume-env.sh.template /etc/flume/conf/flume-env.sh
-
-- name: Configure flume-env.sh
-  lineinfile: >
-    dest=/etc/flume/conf/flume-env.sh
-    regexp="{{ item.regexp }}"
-    line="{{ item.line }}"
-    state=present
-  with_items:
-    - { regexp: '^.*export JAVA_HOME=.*$', line: 'export JAVA_HOME={{ java_home }}' }
-
-- name: Create flume service
-  shell: "{{item}}"
-  with_items:
-    - cp /usr/hdp/current/flume-server/etc/rc.d/init.d/flume-agent /etc/init.d/
-
-- name: Remove default flume configuration
-  file: path=/etc/flume/conf/flume.conf state=absent

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/flume/vars/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/flume/vars/main.yml b/deployment/roles/flume/vars/main.yml
deleted file mode 100644
index 351d125..0000000
--- a/deployment/roles/flume/vars/main.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-hdp_repo_def: http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.2.0/hdp.repo

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/hadoop_setup/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/hadoop_setup/defaults/main.yml b/deployment/roles/hadoop_setup/defaults/main.yml
deleted file mode 100644
index 99a55f6..0000000
--- a/deployment/roles/hadoop_setup/defaults/main.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-num_partitions: 1
-retention_in_gb: 10
-pycapa_topic: pcap
-bro_topic: bro
-yaf_topic: yaf
-snort_topic: snort
-enrichments_topic: enrichments
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/hadoop_setup/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/hadoop_setup/meta/main.yml b/deployment/roles/hadoop_setup/meta/main.yml
deleted file mode 100644
index 8f0bf2b..0000000
--- a/deployment/roles/hadoop_setup/meta/main.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - ambari_gather_facts
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/hadoop_setup/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/hadoop_setup/tasks/main.yml b/deployment/roles/hadoop_setup/tasks/main.yml
deleted file mode 100644
index de01abf..0000000
--- a/deployment/roles/hadoop_setup/tasks/main.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-#must run on hadoop host
-- name: Create HBase tables
-  shell: echo "create '{{ item }}','t'" | hbase shell -n
-  ignore_errors: yes
-  with_items:
-    - "{{ pcap_hbase_table }}"
-    - "{{ tracker_hbase_table }}"
-    - "{{ threatintel_hbase_table }}"
-    - "{{ enrichment_hbase_table }}"
-
-#if kafka topic
-- name: Create Kafka topics
-  shell: "{{ kafka_home }}/bin/kafka-topics.sh --zookeeper {{ zookeeper_url }} --create --topic {{ item }} --partitions {{ num_partitions }} --replication-factor 1 --config retention.bytes={{ retention_in_gb * 1024 * 1024 * 1024}}"
-  ignore_errors: yes
-  with_items:
-    - "{{ pycapa_topic }}"
-    - "{{ bro_topic }}"
-    - "{{ yaf_topic }}"
-    - "{{ snort_topic }}"
-    - "{{ enrichments_topic }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/hadoop_setup/vars/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/hadoop_setup/vars/main.yml b/deployment/roles/hadoop_setup/vars/main.yml
deleted file mode 100644
index 9747044..0000000
--- a/deployment/roles/hadoop_setup/vars/main.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-kafka_home: /usr/hdp/current/kafka-broker/

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/httplib2/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/httplib2/tasks/main.yml b/deployment/roles/httplib2/tasks/main.yml
deleted file mode 100644
index 5502cf4..0000000
--- a/deployment/roles/httplib2/tasks/main.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Install python httplib2 dependency
-  pip:
-    name: httplib2
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/java_jdk/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/java_jdk/defaults/main.yml b/deployment/roles/java_jdk/defaults/main.yml
deleted file mode 100644
index 28f6c71..0000000
--- a/deployment/roles/java_jdk/defaults/main.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-java_home: /usr/jdk64/jdk1.8.0_40



[30/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/test/resources/taxii-messages/messages.poll
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/resources/taxii-messages/messages.poll b/metron-platform/metron-data-management/src/test/resources/taxii-messages/messages.poll
new file mode 100644
index 0000000..1c9d529
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/resources/taxii-messages/messages.poll
@@ -0,0 +1,2914 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<taxii_11:Poll_Response collection_name="guest.Abuse_ch" more="false" in_response_to="urn:uuid:8bb2bae7-cc8a-43ae-ab81-f581e6e97a7e" message_id="36900" xmlns:xmldsig="http://www.w3.org/2000/09/xmldsig#" xmlns:taxii_11="http://taxii.mitre.org/messages/taxii_xml_binding-1.1">
+    <taxii_11:Inclusive_End_Timestamp>2016-02-22T15:24:02.950562Z</taxii_11:Inclusive_End_Timestamp>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-16853623-bf9e-4691-a602-c9e0a9b4777a" timestamp="2016-02-22T15:24:02.958672+00:00" version="1.1.1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:indicator="http://stix.mitre.org/Indicator-2" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:ttp="http://stix.mitre.org/TTP-1" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCom
 mon="http://cybox.mitre.org/common-2" xmlns:cybox="http://cybox.mitre.org/cybox-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:Indicators>
+                    <stix:Indicator id="opensource:indicator-e04fe4b7-82ae-4586-99bc-40b8c1d99304" timestamp="2014-10-31T16:44:24.973043+00:00" version="2.1.1" xsi:type="indicator:IndicatorType">
+                        <indicator:Title>ZeuS Tracker (offline)| www.office-112.com/wp-blog/offi.bin (2014-10-13) | This domain has been identified as malicious by zeustracker.abuse.ch</indicator:Title>
+                        <indicator:Type xsi:type="stixVocabs:IndicatorTypeVocab-1.1">Domain Watchlist</indicator:Type>
+                        <indicator:Type xsi:type="stixVocabs:IndicatorTypeVocab-1.1">URL Watchlist</indicator:Type>
+                        <indicator:Type xsi:type="stixVocabs:IndicatorTypeVocab-1.1">File Hash Watchlist</indicator:Type>
+                        <indicator:Description>This domain www.office-112.com has been identified as malicious by zeustracker.abuse.ch. For more detailed infomation about this indicator go to [CAUTION!!Read-URL-Before-Click] [https://zeustracker.abuse.ch/monitor.php?host=www.office-112.com].</indicator:Description>
+                        <indicator:Observable idref="opensource:Observable-c374c3ab-5c34-46bb-aa56-2e2d33de8c18">
+            </indicator:Observable>
+                        <indicator:Indicated_TTP>
+                            <stixCommon:TTP idref="opensource:ttp-48eaa91e-c331-4e1d-89e3-254b440cd927" xsi:type="ttp:TTPType"/>
+            </indicator:Indicated_TTP>
+                        <indicator:Producer>
+                            <stixCommon:Identity id="opensource:Identity-cae1a346-9f33-488f-af6e-1109692473ee">
+                                <stixCommon:Name>zeustracker.abuse.ch</stixCommon:Name>
+                </stixCommon:Identity>
+                            <stixCommon:Time>
+                                <cyboxCommon:Produced_Time>2014-10-13T00:00:00+00:00</cyboxCommon:Produced_Time>
+                                <cyboxCommon:Received_Time>2014-10-20T19:29:30+00:00</cyboxCommon:Received_Time>
+                </stixCommon:Time>
+            </indicator:Producer>
+        </stix:Indicator>
+    </stix:Indicators>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.960198Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-b1c62719-4936-4c1e-822f-43596dc85a98" timestamp="2016-02-22T15:24:02.960865+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:ttp="http://stix.mitre.org/TTP-1" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCommon="http://cybox.mitre.org/common-2" xmlns:cybox="h
 ttp://cybox.mitre.org/cybox-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:TTPs>
+                    <stix:TTP id="opensource:ttp-48eaa91e-c331-4e1d-89e3-254b440cd927" timestamp="2014-10-31T16:44:24.974454+00:00" version="1.1.1" xsi:type="ttp:TTPType">
+                        <ttp:Title>ZeuS</ttp:Title>
+                        <ttp:Behavior>
+                            <ttp:Malware>
+                                <ttp:Malware_Instance id="opensource:malware-4eafc7e8-833c-4807-986c-bfbca8a9e86e">
+                                    <ttp:Type xsi:type="stixVocabs:MalwareTypeVocab-1.0">Remote Access Trojan</ttp:Type>
+                                    <ttp:Name>ZeuS</ttp:Name>
+                                    <ttp:Name>Zbot</ttp:Name>
+                                    <ttp:Name>Zeus</ttp:Name>
+                                    <ttp:Description>Zeus, ZeuS, or Zbot is Trojan horse computer malware that runs on computers running under versions of the Microsoft Windows operating system. While it is capable of being used to carry out many malicious and criminal tasks, it is often used to steal banking information by man-in-the-browser keystroke logging and form grabbing. It is also used to install the CryptoLocker ransomware.[1] Zeus is spread mainly through drive-by downloads and phishing schemes. (2014(http://en.wikipedia.org/wiki/Zeus_%28Trojan_horse%29))</ttp:Description>
+                                    <ttp:Short_Description>Zeus, ZeuS, or Zbot is Trojan horse computer malware effects Microsoft Windows operating system</ttp:Short_Description>
+                    </ttp:Malware_Instance>
+                </ttp:Malware>
+            </ttp:Behavior>
+        </stix:TTP>
+    </stix:TTPs>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.961541Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-466aa3c2-3c73-4e17-856a-299c0ce8e53b" timestamp="2016-02-22T15:24:02.962045+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCommon="http://cybox.mitre.org/common-2" xmlns:cybox="http://cybox.mitre.org/cybox-2" xmlns:xsi
 ="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:Observables cybox_major_version="2" cybox_minor_version="1" cybox_update_version="0">
+                    <cybox:Observable id="opensource:Observable-c374c3ab-5c34-46bb-aa56-2e2d33de8c18">
+                        <cybox:Observable_Composition operator="OR">
+                            <cybox:Observable idref="opensource:Observable-b2084701-ab03-4233-b3ee-4cf96e3b9131">
+                </cybox:Observable>
+                            <cybox:Observable idref="opensource:Observable-84495ed8-634c-4ef8-aeb0-5968e1ef065b">
+                </cybox:Observable>
+                            <cybox:Observable idref="opensource:Observable-61b0fce7-df20-4f5f-a410-a2e1f80debc4">
+                </cybox:Observable>
+            </cybox:Observable_Composition>
+        </cybox:Observable>
+    </stix:Observables>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.962573Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-4aac2ef7-e5ae-413b-9da8-80457a3a5a1c" timestamp="2016-02-22T15:24:02.963049+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:FileObj="http://cybox.mitre.org/objects#FileObject-2" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCommon="http://cybox.mitre.org/comm
 on-2" xmlns:cybox="http://cybox.mitre.org/cybox-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:Observables cybox_major_version="2" cybox_minor_version="1" cybox_update_version="0">
+                    <cybox:Observable id="opensource:Observable-61b0fce7-df20-4f5f-a410-a2e1f80debc4" sighting_count="1">
+                        <cybox:Title>File: offi.bin</cybox:Title>
+                        <cybox:Description>FileName: offi.bin | FileHash: f4004af2ad5e52fc9a67c5950978b141 | </cybox:Description>
+                        <cybox:Object id="opensource:File-15a9628d-42c7-43c8-ac4c-fc1237db2cdb">
+                            <cybox:Properties xsi:type="FileObj:FileObjectType">
+                                <FileObj:File_Name>offi.bin</FileObj:File_Name>
+                                <FileObj:File_Format>bin</FileObj:File_Format>
+                                <FileObj:Hashes>
+                                    <cyboxCommon:Hash>
+                                        <cyboxCommon:Type xsi:type="cyboxVocabs:HashNameVocab-1.0">MD5</cyboxCommon:Type>
+                                        <cyboxCommon:Simple_Hash_Value condition="Equals">f4004af2ad5e52fc9a67c5950978b141</cyboxCommon:Simple_Hash_Value>
+                        </cyboxCommon:Hash>
+                    </FileObj:Hashes>
+                </cybox:Properties>
+            </cybox:Object>
+        </cybox:Observable>
+    </stix:Observables>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.964187Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-7724b0a5-e820-4320-b1aa-6bee89070ea9" timestamp="2016-02-22T15:24:02.964677+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCommon="http://cybox.mitre.org/common-2" xmlns:URIObj="http://cybox.mitre.org/objects#URIObject
 -2" xmlns:cybox="http://cybox.mitre.org/cybox-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:Observables cybox_major_version="2" cybox_minor_version="1" cybox_update_version="0">
+                    <cybox:Observable id="opensource:Observable-84495ed8-634c-4ef8-aeb0-5968e1ef065b" sighting_count="1">
+                        <cybox:Title>URI: http://www.office-112.com/wp-blog/offi.bin</cybox:Title>
+                        <cybox:Description>URI: http://www.office-112.com/wp-blog/offi.bin | Type: URL | </cybox:Description>
+                        <cybox:Object id="opensource:URI-a56683cd-0553-46ee-b197-431928b184b1">
+                            <cybox:Properties type="URL" xsi:type="URIObj:URIObjectType">
+                                <URIObj:Value condition="Equals">http://www.office-112.com/wp-blog/offi.bin</URIObj:Value>
+                </cybox:Properties>
+            </cybox:Object>
+        </cybox:Observable>
+    </stix:Observables>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.965365Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-ee87f913-a31c-4fa8-84c9-d7b5212f3483" timestamp="2016-02-22T15:24:02.965950+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:DomainNameObj="http://cybox.mitre.org/objects#DomainNameObject-1" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCommon="http://cybox.mi
 tre.org/common-2" xmlns:cybox="http://cybox.mitre.org/cybox-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:Observables cybox_major_version="2" cybox_minor_version="1" cybox_update_version="0">
+                    <cybox:Observable id="opensource:Observable-b2084701-ab03-4233-b3ee-4cf96e3b9131" sighting_count="1">
+                        <cybox:Title>Domain: www.office-112.com</cybox:Title>
+                        <cybox:Description>Domain: www.office-112.com | isFQDN: True | </cybox:Description>
+                        <cybox:Object id="opensource:DomainName-045c58ac-5fb4-4b84-95a2-19cf91c1e166">
+                            <cybox:Properties xsi:type="DomainNameObj:DomainNameObjectType">
+                                <DomainNameObj:Value condition="Equals">www.office-112.com</DomainNameObj:Value>
+                </cybox:Properties>
+            </cybox:Object>
+        </cybox:Observable>
+    </stix:Observables>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.966541Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-faf794a7-b20f-4ef2-8b81-08d706799b9d" timestamp="2016-02-22T15:24:02.966875+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:indicator="http://stix.mitre.org/Indicator-2" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:ttp="http://stix.mitre.org/TTP-1" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCom
 mon="http://cybox.mitre.org/common-2" xmlns:cybox="http://cybox.mitre.org/cybox-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:Indicators>
+                    <stix:Indicator id="opensource:indicator-5c797902-13ea-4b3f-abb4-867eab337185" timestamp="2014-10-31T16:44:24.723668+00:00" version="2.1.1" xsi:type="indicator:IndicatorType">
+                        <indicator:Title>ZeuS Tracker (offline)| 94.102.53.142/~zadmin/find/http.bin (2014-10-14) | This IP address has been identified as malicious by zeustracker.abuse.ch</indicator:Title>
+                        <indicator:Type xsi:type="stixVocabs:IndicatorTypeVocab-1.1">IP Watchlist</indicator:Type>
+                        <indicator:Type xsi:type="stixVocabs:IndicatorTypeVocab-1.1">URL Watchlist</indicator:Type>
+                        <indicator:Type xsi:type="stixVocabs:IndicatorTypeVocab-1.1">File Hash Watchlist</indicator:Type>
+                        <indicator:Description>This IP address 94.102.53.142 has been identified as malicious by zeustracker.abuse.ch. For more detailed infomation about this indicator go to [CAUTION!!Read-URL-Before-Click] [https://zeustracker.abuse.ch/monitor.php?host=94.102.53.142].</indicator:Description>
+                        <indicator:Observable idref="opensource:Observable-87fd80af-647f-43ab-8992-3fe478593793">
+            </indicator:Observable>
+                        <indicator:Indicated_TTP>
+                            <stixCommon:TTP idref="opensource:ttp-08b96668-60fe-4a85-b28e-31fc9fe917c2" xsi:type="ttp:TTPType"/>
+            </indicator:Indicated_TTP>
+                        <indicator:Producer>
+                            <stixCommon:Identity id="opensource:Identity-6c77d05c-9929-4d50-bcc7-edd06125ca38">
+                                <stixCommon:Name>zeustracker.abuse.ch</stixCommon:Name>
+                </stixCommon:Identity>
+                            <stixCommon:Time>
+                                <cyboxCommon:Produced_Time>2014-10-14T00:00:00+00:00</cyboxCommon:Produced_Time>
+                                <cyboxCommon:Received_Time>2014-10-20T19:29:30+00:00</cyboxCommon:Received_Time>
+                </stixCommon:Time>
+            </indicator:Producer>
+        </stix:Indicator>
+    </stix:Indicators>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.968141Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-61200a02-db0c-42dd-a156-0d8c230ffcde" timestamp="2016-02-22T15:24:02.968669+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCommon="http://cybox.mitre.org/common-2" xmlns:cybox="http://cybox.mitre.org/cybox-2" xmlns:xsi
 ="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:Observables cybox_major_version="2" cybox_minor_version="1" cybox_update_version="0">
+                    <cybox:Observable id="opensource:Observable-87fd80af-647f-43ab-8992-3fe478593793">
+                        <cybox:Observable_Composition operator="OR">
+                            <cybox:Observable idref="opensource:Observable-2aa6d9ec-633a-4dbf-9410-64efce2422ff">
+                </cybox:Observable>
+                            <cybox:Observable idref="opensource:Observable-010363d5-cca4-47ac-8538-23a1151fbcfd">
+                </cybox:Observable>
+                            <cybox:Observable idref="opensource:Observable-e49fe4fb-6aef-486d-be21-0ec8a371ddb1">
+                </cybox:Observable>
+            </cybox:Observable_Composition>
+        </cybox:Observable>
+    </stix:Observables>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.969150Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-c56b73b0-9e74-4f98-adfe-4d3fbe49fd66" timestamp="2016-02-22T15:24:02.969617+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCommon="http://cybox.mitre.org/common-2" xmlns:URIObj="http://cybox.mitre.org/objects#URIObject
 -2" xmlns:cybox="http://cybox.mitre.org/cybox-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:Observables cybox_major_version="2" cybox_minor_version="1" cybox_update_version="0">
+                    <cybox:Observable id="opensource:Observable-010363d5-cca4-47ac-8538-23a1151fbcfd" sighting_count="1">
+                        <cybox:Title>URI: http://94.102.53.142/~zadmin/find/http.bin</cybox:Title>
+                        <cybox:Description>URI: http://94.102.53.142/~zadmin/find/http.bin | Type: URL | </cybox:Description>
+                        <cybox:Object id="opensource:URI-38f2db8f-41d6-4876-850b-5283e1987270">
+                            <cybox:Properties type="URL" xsi:type="URIObj:URIObjectType">
+                                <URIObj:Value condition="Equals">http://94.102.53.142/~zadmin/find/http.bin</URIObj:Value>
+                </cybox:Properties>
+            </cybox:Object>
+        </cybox:Observable>
+    </stix:Observables>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.970273Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-aba08109-b0aa-4852-9e95-05463debcdc0" timestamp="2016-02-22T15:24:02.970755+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:AddressObj="http://cybox.mitre.org/objects#AddressObject-2" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCommon="http://cybox.mitre.or
 g/common-2" xmlns:cybox="http://cybox.mitre.org/cybox-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:Observables cybox_major_version="2" cybox_minor_version="1" cybox_update_version="0">
+                    <cybox:Observable id="opensource:Observable-2aa6d9ec-633a-4dbf-9410-64efce2422ff" sighting_count="1">
+                        <cybox:Title>IP: 94.102.53.142</cybox:Title>
+                        <cybox:Description>IPv4: 94.102.53.142 | isSource: True | </cybox:Description>
+                        <cybox:Object id="opensource:Address-cf5489ab-a097-46df-9ec9-91ab3bbcc477">
+                            <cybox:Properties category="ipv4-addr" is_source="true" xsi:type="AddressObj:AddressObjectType">
+                                <AddressObj:Address_Value condition="Equal">94.102.53.142</AddressObj:Address_Value>
+                </cybox:Properties>
+            </cybox:Object>
+        </cybox:Observable>
+    </stix:Observables>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.971398Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-77c7b736-eb95-4055-8bae-61c5e2bc4310" timestamp="2016-02-22T15:24:02.971847+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:FileObj="http://cybox.mitre.org/objects#FileObject-2" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCommon="http://cybox.mitre.org/comm
 on-2" xmlns:cybox="http://cybox.mitre.org/cybox-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:Observables cybox_major_version="2" cybox_minor_version="1" cybox_update_version="0">
+                    <cybox:Observable id="opensource:Observable-e49fe4fb-6aef-486d-be21-0ec8a371ddb1" sighting_count="1">
+                        <cybox:Title>File: http.bin</cybox:Title>
+                        <cybox:Description>FileName: http.bin | FileHash: c88a8635a7eed7ff6641868b697650db | </cybox:Description>
+                        <cybox:Object id="opensource:File-546da1e6-6c00-4c0b-90f3-d56d4aa7f855">
+                            <cybox:Properties xsi:type="FileObj:FileObjectType">
+                                <FileObj:File_Name>http.bin</FileObj:File_Name>
+                                <FileObj:File_Format>bin</FileObj:File_Format>
+                                <FileObj:Hashes>
+                                    <cyboxCommon:Hash>
+                                        <cyboxCommon:Type xsi:type="cyboxVocabs:HashNameVocab-1.0">MD5</cyboxCommon:Type>
+                                        <cyboxCommon:Simple_Hash_Value condition="Equals">c88a8635a7eed7ff6641868b697650db</cyboxCommon:Simple_Hash_Value>
+                        </cyboxCommon:Hash>
+                    </FileObj:Hashes>
+                </cybox:Properties>
+            </cybox:Object>
+        </cybox:Observable>
+    </stix:Observables>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.972941Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-c5daa87d-9dbe-4cbe-b18b-2d01589c1f55" timestamp="2016-02-22T15:24:02.973486+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:ttp="http://stix.mitre.org/TTP-1" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCommon="http://cybox.mitre.org/common-2" xmlns:cybox="h
 ttp://cybox.mitre.org/cybox-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:TTPs>
+                    <stix:TTP id="opensource:ttp-08b96668-60fe-4a85-b28e-31fc9fe917c2" timestamp="2014-10-31T16:44:24.724411+00:00" version="1.1.1" xsi:type="ttp:TTPType">
+                        <ttp:Title>ZeuS</ttp:Title>
+                        <ttp:Behavior>
+                            <ttp:Malware>
+                                <ttp:Malware_Instance id="opensource:malware-c3b59946-8e31-4f50-bbd9-132d418ecb7a">
+                                    <ttp:Type xsi:type="stixVocabs:MalwareTypeVocab-1.0">Remote Access Trojan</ttp:Type>
+                                    <ttp:Name>ZeuS</ttp:Name>
+                                    <ttp:Name>Zbot</ttp:Name>
+                                    <ttp:Name>Zeus</ttp:Name>
+                                    <ttp:Description>Zeus, ZeuS, or Zbot is Trojan horse computer malware that runs on computers running under versions of the Microsoft Windows operating system. While it is capable of being used to carry out many malicious and criminal tasks, it is often used to steal banking information by man-in-the-browser keystroke logging and form grabbing. It is also used to install the CryptoLocker ransomware.[1] Zeus is spread mainly through drive-by downloads and phishing schemes. (2014(http://en.wikipedia.org/wiki/Zeus_%28Trojan_horse%29))</ttp:Description>
+                                    <ttp:Short_Description>Zeus, ZeuS, or Zbot is Trojan horse computer malware effects Microsoft Windows operating system</ttp:Short_Description>
+                    </ttp:Malware_Instance>
+                </ttp:Malware>
+            </ttp:Behavior>
+        </stix:TTP>
+    </stix:TTPs>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.974363Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-926c768a-4e31-43cd-9390-37229e3fa2f7" timestamp="2016-02-22T15:24:02.974720+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:indicator="http://stix.mitre.org/Indicator-2" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:ttp="http://stix.mitre.org/TTP-1" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCom
 mon="http://cybox.mitre.org/common-2" xmlns:cybox="http://cybox.mitre.org/cybox-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:Indicators>
+                    <stix:Indicator id="opensource:indicator-f93507d6-dad1-4299-8617-dff154b5ac62" timestamp="2014-10-31T16:44:24.911510+00:00" version="2.1.1" xsi:type="indicator:IndicatorType">
+                        <indicator:Title>ZeuS Tracker (online)| krlsma.com/wp-includes/Text/dom/php/file.php (2014-10-15) | This domain has been identified as malicious by zeustracker.abuse.ch</indicator:Title>
+                        <indicator:Type xsi:type="stixVocabs:IndicatorTypeVocab-1.1">Domain Watchlist</indicator:Type>
+                        <indicator:Type xsi:type="stixVocabs:IndicatorTypeVocab-1.1">URL Watchlist</indicator:Type>
+                        <indicator:Type xsi:type="stixVocabs:IndicatorTypeVocab-1.1">File Hash Watchlist</indicator:Type>
+                        <indicator:Description>This domain krlsma.com has been identified as malicious by zeustracker.abuse.ch. For more detailed infomation about this indicator go to [CAUTION!!Read-URL-Before-Click] [https://zeustracker.abuse.ch/monitor.php?host=krlsma.com].</indicator:Description>
+                        <indicator:Observable idref="opensource:Observable-24efd7b7-3474-4b22-8ad7-285f298dad41">
+            </indicator:Observable>
+                        <indicator:Indicated_TTP>
+                            <stixCommon:TTP idref="opensource:ttp-8f601fc3-bd6d-4c4e-94c9-b5dbad93ed0b" xsi:type="ttp:TTPType"/>
+            </indicator:Indicated_TTP>
+                        <indicator:Producer>
+                            <stixCommon:Identity id="opensource:Identity-740330e4-9f30-4710-9336-ff9d71492984">
+                                <stixCommon:Name>zeustracker.abuse.ch</stixCommon:Name>
+                </stixCommon:Identity>
+                            <stixCommon:Time>
+                                <cyboxCommon:Produced_Time>2014-10-15T00:00:00+00:00</cyboxCommon:Produced_Time>
+                                <cyboxCommon:Received_Time>2014-10-20T19:29:30+00:00</cyboxCommon:Received_Time>
+                </stixCommon:Time>
+            </indicator:Producer>
+        </stix:Indicator>
+    </stix:Indicators>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.975911Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-6225344f-12c1-49f8-b6e2-862d804044b1" timestamp="2016-02-22T15:24:02.976445+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:ttp="http://stix.mitre.org/TTP-1" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCommon="http://cybox.mitre.org/common-2" xmlns:cybox="h
 ttp://cybox.mitre.org/cybox-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:TTPs>
+                    <stix:TTP id="opensource:ttp-8f601fc3-bd6d-4c4e-94c9-b5dbad93ed0b" timestamp="2014-10-31T16:44:24.912948+00:00" version="1.1.1" xsi:type="ttp:TTPType">
+                        <ttp:Title>ZeuS</ttp:Title>
+                        <ttp:Behavior>
+                            <ttp:Malware>
+                                <ttp:Malware_Instance id="opensource:malware-a908f65f-1a21-4658-b70c-ea1acd6ceca1">
+                                    <ttp:Type xsi:type="stixVocabs:MalwareTypeVocab-1.0">Remote Access Trojan</ttp:Type>
+                                    <ttp:Name>ZeuS</ttp:Name>
+                                    <ttp:Name>Zbot</ttp:Name>
+                                    <ttp:Name>Zeus</ttp:Name>
+                                    <ttp:Description>Zeus, ZeuS, or Zbot is Trojan horse computer malware that runs on computers running under versions of the Microsoft Windows operating system. While it is capable of being used to carry out many malicious and criminal tasks, it is often used to steal banking information by man-in-the-browser keystroke logging and form grabbing. It is also used to install the CryptoLocker ransomware.[1] Zeus is spread mainly through drive-by downloads and phishing schemes. (2014(http://en.wikipedia.org/wiki/Zeus_%28Trojan_horse%29))</ttp:Description>
+                                    <ttp:Short_Description>Zeus, ZeuS, or Zbot is Trojan horse computer malware effects Microsoft Windows operating system</ttp:Short_Description>
+                    </ttp:Malware_Instance>
+                </ttp:Malware>
+            </ttp:Behavior>
+        </stix:TTP>
+    </stix:TTPs>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.977154Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-3d34d4a5-9c0f-4cbb-9751-ced4d77dfc89" timestamp="2016-02-22T15:24:02.977643+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCommon="http://cybox.mitre.org/common-2" xmlns:cybox="http://cybox.mitre.org/cybox-2" xmlns:xsi
 ="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:Observables cybox_major_version="2" cybox_minor_version="1" cybox_update_version="0">
+                    <cybox:Observable id="opensource:Observable-24efd7b7-3474-4b22-8ad7-285f298dad41">
+                        <cybox:Observable_Composition operator="OR">
+                            <cybox:Observable idref="opensource:Observable-cf7403a3-6c63-49e1-ab97-ed0861f71ba9">
+                </cybox:Observable>
+                            <cybox:Observable idref="opensource:Observable-5dade958-25b7-4eec-b3de-c25b580d6d6c">
+                </cybox:Observable>
+                            <cybox:Observable idref="opensource:Observable-398c91f0-d12b-499b-844c-67c7a0a9dbe1">
+                </cybox:Observable>
+            </cybox:Observable_Composition>
+        </cybox:Observable>
+    </stix:Observables>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.978111Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-1a737ebf-ee76-4460-9cb2-183577366ab9" timestamp="2016-02-22T15:24:02.978580+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:DomainNameObj="http://cybox.mitre.org/objects#DomainNameObject-1" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCommon="http://cybox.mi
 tre.org/common-2" xmlns:cybox="http://cybox.mitre.org/cybox-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:Observables cybox_major_version="2" cybox_minor_version="1" cybox_update_version="0">
+                    <cybox:Observable id="opensource:Observable-cf7403a3-6c63-49e1-ab97-ed0861f71ba9" sighting_count="1">
+                        <cybox:Title>Domain: krlsma.com</cybox:Title>
+                        <cybox:Description>Domain: krlsma.com | isFQDN: True | </cybox:Description>
+                        <cybox:Object id="opensource:DomainName-53c73da2-81ef-4242-98b2-1b1215e0f124">
+                            <cybox:Properties xsi:type="DomainNameObj:DomainNameObjectType">
+                                <DomainNameObj:Value condition="Equals">krlsma.com</DomainNameObj:Value>
+                </cybox:Properties>
+            </cybox:Object>
+        </cybox:Observable>
+    </stix:Observables>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.979274Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-8f2994a9-1a61-40fc-be8f-63f7709823d1" timestamp="2016-02-22T15:24:02.979721+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCommon="http://cybox.mitre.org/common-2" xmlns:URIObj="http://cybox.mitre.org/objects#URIObject
 -2" xmlns:cybox="http://cybox.mitre.org/cybox-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:Observables cybox_major_version="2" cybox_minor_version="1" cybox_update_version="0">
+                    <cybox:Observable id="opensource:Observable-5dade958-25b7-4eec-b3de-c25b580d6d6c" sighting_count="1">
+                        <cybox:Title>URI: http://krlsma.com/wp-includes/Text/dom/php/file.php</cybox:Title>
+                        <cybox:Description>URI: http://krlsma.com/wp-includes/Text/dom/php/file.php | Type: URL | </cybox:Description>
+                        <cybox:Object id="opensource:URI-44fa3e9f-6599-4dda-bbe6-a03b06930084">
+                            <cybox:Properties type="URL" xsi:type="URIObj:URIObjectType">
+                                <URIObj:Value condition="Equals">http://krlsma.com/wp-includes/Text/dom/php/file.php</URIObj:Value>
+                </cybox:Properties>
+            </cybox:Object>
+        </cybox:Observable>
+    </stix:Observables>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.980513Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-935dbc5c-5eb5-4022-ba49-95d30b845160" timestamp="2016-02-22T15:24:02.981129+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:FileObj="http://cybox.mitre.org/objects#FileObject-2" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCommon="http://cybox.mitre.org/comm
 on-2" xmlns:cybox="http://cybox.mitre.org/cybox-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:Observables cybox_major_version="2" cybox_minor_version="1" cybox_update_version="0">
+                    <cybox:Observable id="opensource:Observable-398c91f0-d12b-499b-844c-67c7a0a9dbe1" sighting_count="1">
+                        <cybox:Title>File: file.php</cybox:Title>
+                        <cybox:Description>FileName: file.php | FileHash: cccc3d971cc7f2814229e836076664a1 | </cybox:Description>
+                        <cybox:Object id="opensource:File-eb908acc-cf77-4cd4-9875-eb6408c4c726">
+                            <cybox:Properties xsi:type="FileObj:FileObjectType">
+                                <FileObj:File_Name>file.php</FileObj:File_Name>
+                                <FileObj:File_Format>php</FileObj:File_Format>
+                                <FileObj:Hashes>
+                                    <cyboxCommon:Hash>
+                                        <cyboxCommon:Type xsi:type="cyboxVocabs:HashNameVocab-1.0">MD5</cyboxCommon:Type>
+                                        <cyboxCommon:Simple_Hash_Value condition="Equals">cccc3d971cc7f2814229e836076664a1</cyboxCommon:Simple_Hash_Value>
+                        </cyboxCommon:Hash>
+                    </FileObj:Hashes>
+                </cybox:Properties>
+            </cybox:Object>
+        </cybox:Observable>
+    </stix:Observables>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.982272Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-28ffd222-1203-4609-86c3-efa5cfac9b41" timestamp="2016-02-22T15:24:02.982590+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:indicator="http://stix.mitre.org/Indicator-2" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:ttp="http://stix.mitre.org/TTP-1" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCom
 mon="http://cybox.mitre.org/common-2" xmlns:cybox="http://cybox.mitre.org/cybox-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:Indicators>
+                    <stix:Indicator id="opensource:indicator-0324e19d-9a5a-4cc0-bc74-5a04b6de8bd3" timestamp="2014-10-31T16:44:24.842915+00:00" version="2.1.1" xsi:type="indicator:IndicatorType">
+                        <indicator:Title>ZeuS Tracker (offline)| goomjav1kaformjavkd.com/neverwind/tmp/pixel.jpg (2014-10-31) | This domain has been identified as malicious by zeustracker.abuse.ch</indicator:Title>
+                        <indicator:Type xsi:type="stixVocabs:IndicatorTypeVocab-1.1">Domain Watchlist</indicator:Type>
+                        <indicator:Type xsi:type="stixVocabs:IndicatorTypeVocab-1.1">URL Watchlist</indicator:Type>
+                        <indicator:Type xsi:type="stixVocabs:IndicatorTypeVocab-1.1">File Hash Watchlist</indicator:Type>
+                        <indicator:Description>This domain goomjav1kaformjavkd.com has been identified as malicious by zeustracker.abuse.ch. For more detailed infomation about this indicator go to [CAUTION!!Read-URL-Before-Click] [https://zeustracker.abuse.ch/monitor.php?host=goomjav1kaformjavkd.com].</indicator:Description>
+                        <indicator:Observable idref="opensource:Observable-eae86c81-73ec-4ee6-87b6-31a8fa3fe5ac">
+            </indicator:Observable>
+                        <indicator:Indicated_TTP>
+                            <stixCommon:TTP idref="opensource:ttp-e6a4b409-9e89-4841-b293-f08483efb12f" xsi:type="ttp:TTPType"/>
+            </indicator:Indicated_TTP>
+                        <indicator:Producer>
+                            <stixCommon:Identity id="opensource:Identity-3b2e3f22-c0ae-4f57-aac0-f7da7de9d294">
+                                <stixCommon:Name>zeustracker.abuse.ch</stixCommon:Name>
+                </stixCommon:Identity>
+                            <stixCommon:Time>
+                                <cyboxCommon:Produced_Time>2014-10-31T00:00:00+00:00</cyboxCommon:Produced_Time>
+                                <cyboxCommon:Received_Time>2014-10-31T16:44:24+00:00</cyboxCommon:Received_Time>
+                </stixCommon:Time>
+            </indicator:Producer>
+        </stix:Indicator>
+    </stix:Indicators>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.983991Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-8c7a08a2-8c9d-4761-aebc-0b7a0aac5f68" timestamp="2016-02-22T15:24:02.984608+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:ttp="http://stix.mitre.org/TTP-1" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCommon="http://cybox.mitre.org/common-2" xmlns:cybox="h
 ttp://cybox.mitre.org/cybox-2" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:TTPs>
+                    <stix:TTP id="opensource:ttp-e6a4b409-9e89-4841-b293-f08483efb12f" timestamp="2014-10-31T16:44:24.843868+00:00" version="1.1.1" xsi:type="ttp:TTPType">
+                        <ttp:Title>ZeuS</ttp:Title>
+                        <ttp:Behavior>
+                            <ttp:Malware>
+                                <ttp:Malware_Instance id="opensource:malware-d0ef452d-4e25-4770-b560-4e9c01e0de25">
+                                    <ttp:Type xsi:type="stixVocabs:MalwareTypeVocab-1.0">Remote Access Trojan</ttp:Type>
+                                    <ttp:Name>ZeuS</ttp:Name>
+                                    <ttp:Name>Zbot</ttp:Name>
+                                    <ttp:Name>Zeus</ttp:Name>
+                                    <ttp:Description>Zeus, ZeuS, or Zbot is Trojan horse computer malware that runs on computers running under versions of the Microsoft Windows operating system. While it is capable of being used to carry out many malicious and criminal tasks, it is often used to steal banking information by man-in-the-browser keystroke logging and form grabbing. It is also used to install the CryptoLocker ransomware.[1] Zeus is spread mainly through drive-by downloads and phishing schemes. (2014(http://en.wikipedia.org/wiki/Zeus_%28Trojan_horse%29))</ttp:Description>
+                                    <ttp:Short_Description>Zeus, ZeuS, or Zbot is Trojan horse computer malware effects Microsoft Windows operating system</ttp:Short_Description>
+                    </ttp:Malware_Instance>
+                </ttp:Malware>
+            </ttp:Behavior>
+        </stix:TTP>
+    </stix:TTPs>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.985825Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-6042ee98-aa0a-47c0-982c-97fc6c8b65b8" timestamp="2016-02-22T15:24:02.986544+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2" xmlns:taxii="http://taxii.mitre.org/messages/taxii_xml_binding-1" xmlns:TOUMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Terms_Of_Use-1" xmlns:stix="http://stix.mitre.org/stix-1" xmlns:marking="http://data-marking.mitre.org/Marking-1" xmlns:tdq="http://taxii.mitre.org/query/taxii_default_query-1" xmlns:cyboxCommon="http://cybox.mitre.org/common-2" xmlns:cybox="http://cybox.mitre.org/cybox-2" xmlns:xsi
 ="http://www.w3.org/2001/XMLSchema-instance">
+                <stix:STIX_Header>
+                    <stix:Handling>
+                        <marking:Marking>
+                            <marking:Controlled_Structure>../../../../descendant-or-self::node()</marking:Controlled_Structure>
+                            <marking:Marking_Structure color="WHITE" xsi:type="tlpMarking:TLPMarkingStructureType"/>
+                            <marking:Marking_Structure xsi:type="TOUMarking:TermsOfUseMarkingStructureType">
+                                <TOUMarking:Terms_Of_Use>zeustracker.abuse.ch | Abuse source[https://sslbl.abuse.ch/blacklist/] - As for all abuse.ch projects, the use of the SSL Blacklist is free for both commercial and non-commercial usage without any limitation. However, if you are a commercial vendor of security software/services and you want to integrate data from the SSL Blacklist into your products / services, you will have to ask for permission first by contacting me using the contact form [http://www.abuse.ch/?page_id=4727].'
+</TOUMarking:Terms_Of_Use>
+                </marking:Marking_Structure>
+                            <marking:Marking_Structure xsi:type="simpleMarking:SimpleMarkingStructureType">
+                                <simpleMarking:Statement>Unclassified (Public)</simpleMarking:Statement>
+                </marking:Marking_Structure>
+            </marking:Marking>
+        </stix:Handling>
+    </stix:STIX_Header>
+                <stix:Observables cybox_major_version="2" cybox_minor_version="1" cybox_update_version="0">
+                    <cybox:Observable id="opensource:Observable-eae86c81-73ec-4ee6-87b6-31a8fa3fe5ac">
+                        <cybox:Observable_Composition operator="OR">
+                            <cybox:Observable idref="opensource:Observable-013f5351-5e03-4256-8405-ab3342146755">
+                </cybox:Observable>
+                            <cybox:Observable idref="opensource:Observable-3bb01333-456b-4f2c-9d40-d35f08702f74">
+                </cybox:Observable>
+                            <cybox:Observable idref="opensource:Observable-c0974652-6074-4410-aa49-3cf828a39663">
+                </cybox:Observable>
+            </cybox:Observable_Composition>
+        </cybox:Observable>
+    </stix:Observables>
+</stix:STIX_Package>
+        </taxii_11:Content>
+        <taxii_11:Timestamp_Label>2016-02-22T15:24:02.987068Z</taxii_11:Timestamp_Label>
+    </taxii_11:Content_Block>
+    <taxii_11:Content_Block>
+        <taxii_11:Content_Binding binding_id="urn:stix.mitre.org:xml:1.1.1"/>
+        <taxii_11:Content>
+            <stix:STIX_Package id="edge:Package-f74faad4-5796-4f11-955c-fc8d23852fe1" timestamp="2016-02-22T15:24:02.987620+00:00" version="1.1.1" xmlns:stixCommon="http://stix.mitre.org/common-1" xmlns:tlpMarking="http://data-marking.mitre.org/extensions/MarkingStructure#TLP-1" xmlns:simpleMarking="http://data-marking.mitre.org/extensions/MarkingStructure#Simple-1" xmlns:edge="http://soltra.com/" xmlns:DomainNameObj="http://cybox.mitre.org/objects#DomainNameObject-1" xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1" xmlns:opensource="http://hailataxii.com" xmlns:cyboxVocabs="http://cybox.mitre.org

<TRUNCATED>


[21/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/BroExampleOutput
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/BroExampleOutput b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/BroExampleOutput
new file mode 100644
index 0000000..6ded8a8
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/BroExampleOutput
@@ -0,0 +1,23411 @@
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CuJT272SKaJSuqO0Ia","id.orig_h":"10.122.196.204","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"KIRAN","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"KIRAN12312312","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"method":"GET","host":"www.cisco.com","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"10.122.196.204","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"email":"abullis@mail.csuchico.edu","method":"GET","host":"gabacentre.pw","uri":"/","user_agent":"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3","request_body_len":0,"response_body_len":25523,"status_code":200,"status_msg":"OK","tags":[],"resp_fuids":["FJDyMC15lxUn5ngPfd"],"resp_mime_types":["text/html"]}}
+{"dns":{"ts":1402308259609,"uid":"CYbbOHvj","id.orig_h":"93.188.160.43","id.orig_p":33976,"id.resp_h":"144.254.71.184","id.resp_p":53,"proto":"udp","trans_id":62418,"query":"www.cisco.com","qclass":1,"qclass_name":"C_INTERNET","qtype":28,"qtype_name":"AAAA","rcode":0,"rcode_name":"NOERROR","AA":true,"TC":false,"RD":true,"RA":true,"Z":0,"answers":["gabacentre.pw","www.cisco.com.akadns.net","origin-www.cisco.com","2001:420:1201:2::a"],"TTLs":[3600.0,289.0,14.0],"rejected":false}}
+{"http":{"ts":1402307733473,"uid":"CTo78A11g7CYbbOHvj","id.orig_h":"192.249.113.37","id.orig_p":58808,"id.resp_h":"72.163.4.161","id.resp_p":80,"trans_depth":1,"

<TRUNCATED>


[32/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoader.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoader.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoader.java
new file mode 100644
index 0000000..cbd3beb
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoader.java
@@ -0,0 +1,261 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.nonbulk.flatfile;
+
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.collect.ImmutableList;
+import org.apache.commons.cli.*;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.log4j.PropertyConfigurator;
+import org.apache.metron.dataloads.extractor.Extractor;
+import org.apache.metron.dataloads.extractor.ExtractorHandler;
+import org.apache.metron.dataloads.extractor.inputformat.WholeFileFormat;
+import org.apache.metron.common.configuration.EnrichmentConfig;
+import org.apache.metron.hbase.HTableProvider;
+import org.apache.metron.enrichment.converter.HbaseConverter;
+import org.apache.metron.enrichment.converter.EnrichmentConverter;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.apache.metron.common.utils.JSONUtils;
+
+import javax.annotation.Nullable;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Stack;
+
+public class SimpleEnrichmentFlatFileLoader {
+  private static abstract class OptionHandler implements Function<String, Option> {}
+  public static enum LoadOptions {
+    HELP("h", new OptionHandler() {
+
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        return new Option(s, "help", false, "Generate Help screen");
+      }
+    })
+    ,HBASE_TABLE("t", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "hbase_table", true, "HBase table to ingest the data into.");
+        o.setArgName("TABLE");
+        o.setRequired(true);
+        return o;
+      }
+    })
+    ,HBASE_CF("c", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "hbase_cf", true, "HBase column family to ingest the data into.");
+        o.setArgName("CF");
+        o.setRequired(true);
+        return o;
+      }
+    })
+    ,EXTRACTOR_CONFIG("e", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "extractor_config", true, "JSON Document describing the extractor for this input data source");
+        o.setArgName("JSON_FILE");
+        o.setRequired(true);
+        return o;
+      }
+    })
+    ,ENRICHMENT_CONFIG("n", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "enrichment_config", true
+                , "JSON Document describing the enrichment configuration details." +
+                "  This is used to associate an enrichment type with a field type in zookeeper."
+        );
+        o.setArgName("JSON_FILE");
+        o.setRequired(false);
+        return o;
+      }
+    })
+    ,LOG4J_PROPERTIES("l", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "log4j", true, "The log4j properties file to load");
+        o.setArgName("FILE");
+        o.setRequired(false);
+        return o;
+      }
+    })
+    ,INPUT("i", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "input", true, "The CSV File to load");
+        o.setArgName("FILE");
+        o.setRequired(true);
+        return o;
+      }
+    })
+    ;
+    Option option;
+    String shortCode;
+    LoadOptions(String shortCode, OptionHandler optionHandler) {
+      this.shortCode = shortCode;
+      this.option = optionHandler.apply(shortCode);
+    }
+
+    public boolean has(CommandLine cli) {
+      return cli.hasOption(shortCode);
+    }
+
+    public String get(CommandLine cli) {
+      return cli.getOptionValue(shortCode);
+    }
+
+    public static CommandLine parse(CommandLineParser parser, String[] args) {
+      try {
+        CommandLine cli = parser.parse(getOptions(), args);
+        if(HELP.has(cli)) {
+          printHelp();
+          System.exit(0);
+        }
+        return cli;
+      } catch (ParseException e) {
+        System.err.println("Unable to parse args: " + Joiner.on(' ').join(args));
+        e.printStackTrace(System.err);
+        printHelp();
+        System.exit(-1);
+        return null;
+      }
+    }
+
+    public static void printHelp() {
+      HelpFormatter formatter = new HelpFormatter();
+      formatter.printHelp( "SimpleEnrichmentFlatFileLoader", getOptions());
+    }
+
+    public static Options getOptions() {
+      Options ret = new Options();
+      for(LoadOptions o : LoadOptions.values()) {
+        ret.addOption(o.option);
+      }
+      return ret;
+    }
+  }
+  public static List<File> getFiles(File root) {
+    if(!root.isDirectory())  {
+      return ImmutableList.of(root);
+    }
+    List<File> ret = new ArrayList<>();
+    Stack<File> stack = new Stack<File>();
+    stack.push(root);
+    while(!stack.isEmpty()) {
+      File f = stack.pop();
+      if(f.isDirectory()) {
+        for(File child : f.listFiles()) {
+          stack.push(child);
+        }
+      }
+      else {
+        ret.add(f);
+      }
+    }
+    return ret;
+  }
+
+  public HTableProvider getProvider() {
+    return new HTableProvider();
+  }
+
+  public List<Put> extract( String line
+                     , Extractor extractor
+                     , String cf
+                     , HbaseConverter converter
+                     ) throws IOException
+  {
+    List<Put> ret = new ArrayList<>();
+    Iterable<LookupKV> kvs = extractor.extract(line);
+    for(LookupKV kv : kvs) {
+      Put put = converter.toPut(cf, kv.getKey(), kv.getValue());
+      ret.add(put);
+    }
+    return ret;
+  }
+
+
+  public void loadFile( File inputFile
+                      , Extractor extractor
+                      , HTableInterface table
+                      , String cf
+                      , HbaseConverter converter
+                      , boolean lineByLine
+                      ) throws IOException
+  {
+    if(!lineByLine) {
+      table.put(extract(FileUtils.readFileToString(inputFile), extractor, cf, converter));
+    }
+    else {
+      BufferedReader br = new BufferedReader(new FileReader(inputFile));
+      for(String line = null;(line = br.readLine()) != null;) {
+        table.put(extract(line, extractor, cf, converter));
+      }
+    }
+  }
+  public static void main(String... argv) throws Exception {
+    Configuration conf = HBaseConfiguration.create();
+    String[] otherArgs = new GenericOptionsParser(conf, argv).getRemainingArgs();
+
+    CommandLine cli = LoadOptions.parse(new PosixParser(), otherArgs);
+    if(LoadOptions.LOG4J_PROPERTIES.has(cli)) {
+      PropertyConfigurator.configure(LoadOptions.LOG4J_PROPERTIES.get(cli));
+    }
+    ExtractorHandler handler = ExtractorHandler.load(
+            FileUtils.readFileToString(new File(LoadOptions.EXTRACTOR_CONFIG.get(cli)))
+    );
+    boolean lineByLine = !handler.getInputFormatHandler().getClass().equals(WholeFileFormat.class);
+    Extractor e = handler.getExtractor();
+    EnrichmentConfig enrichmentConfig = null;
+    if(LoadOptions.ENRICHMENT_CONFIG.has(cli)) {
+      enrichmentConfig = JSONUtils.INSTANCE.load( new File(LoadOptions.ENRICHMENT_CONFIG.get(cli))
+              , EnrichmentConfig.class
+      );
+    }
+    HbaseConverter converter = new EnrichmentConverter();
+    List<File> inputFiles = getFiles(new File(LoadOptions.INPUT.get(cli)));
+    SimpleEnrichmentFlatFileLoader loader = new SimpleEnrichmentFlatFileLoader();
+    HTableInterface table = loader.getProvider()
+            .getTable(conf, LoadOptions.HBASE_TABLE.get(cli));
+
+    for (File f : inputFiles) {
+      loader.loadFile(f, e, table, LoadOptions.HBASE_CF.get(cli), converter, lineByLine);
+    }
+    if(enrichmentConfig != null) {
+      enrichmentConfig.updateSensorConfigs();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/ConnectionType.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/ConnectionType.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/ConnectionType.java
new file mode 100644
index 0000000..77d1698
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/ConnectionType.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.dataloads.nonbulk.taxii;
+
+public enum ConnectionType {
+   POLL, DISCOVER;
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/TableInfo.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/TableInfo.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/TableInfo.java
new file mode 100644
index 0000000..6bbf8e3
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/TableInfo.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.dataloads.nonbulk.taxii;
+
+import com.google.common.base.Splitter;
+import com.google.common.collect.Iterables;
+
+public class TableInfo {
+    private String tableName;
+    private String columnFamily;
+    public TableInfo(String s) {
+        Iterable<String> i = Splitter.on(":").split(s);
+        if(Iterables.size(i) != 2) {
+            throw new IllegalStateException("Malformed table:cf => " + s);
+        }
+        tableName = Iterables.getFirst(i, null);
+        columnFamily = Iterables.getLast(i);
+    }
+
+    public String getTableName() {
+        return tableName;
+    }
+
+    public String getColumnFamily() {
+        return columnFamily;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) return true;
+        if (o == null || getClass() != o.getClass()) return false;
+
+        TableInfo tableInfo = (TableInfo) o;
+
+        if (getTableName() != null ? !getTableName().equals(tableInfo.getTableName()) : tableInfo.getTableName() != null)
+            return false;
+        return getColumnFamily() != null ? getColumnFamily().equals(tableInfo.getColumnFamily()) : tableInfo.getColumnFamily() == null;
+
+    }
+
+    @Override
+    public int hashCode() {
+        int result = getTableName() != null ? getTableName().hashCode() : 0;
+        result = 31 * result + (getColumnFamily() != null ? getColumnFamily().hashCode() : 0);
+        return result;
+    }
+
+    @Override
+    public String toString() {
+        return "TableInfo{" +
+                "tableName='" + tableName + '\'' +
+                ", columnFamily='" + columnFamily + '\'' +
+                '}';
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiConnectionConfig.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiConnectionConfig.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiConnectionConfig.java
new file mode 100644
index 0000000..678f98b
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiConnectionConfig.java
@@ -0,0 +1,222 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.dataloads.nonbulk.taxii;
+
+import com.google.common.base.Joiner;
+import org.apache.metron.dataloads.extractor.stix.types.ObjectTypeHandlers;
+import org.codehaus.jackson.map.ObjectMapper;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.nio.charset.Charset;
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.*;
+
+public class TaxiiConnectionConfig {
+  final static ObjectMapper _mapper = new ObjectMapper();
+  private URL endpoint;
+  private int port = 443;
+  private URL proxy;
+  private String username;
+  private String password;
+  private ConnectionType type;
+  private String collection = "default";
+  private String subscriptionId = null;
+  private Date beginTime;
+  private String table;
+  private String columnFamily;
+  private Set<String> allowedIndicatorTypes = new HashSet<String>();
+
+  public TaxiiConnectionConfig withAllowedIndicatorTypes(List<String> indicatorTypes) {
+    allowedIndicatorTypes = new HashSet(indicatorTypes);
+    return this;
+  }
+
+  public TaxiiConnectionConfig withTable(String table) {
+    this.table = table;
+    return this;
+  }
+  public TaxiiConnectionConfig withColumnFamily(String cf) {
+    this.columnFamily = cf;
+    return this;
+  }
+  public TaxiiConnectionConfig withBeginTime(Date time) {
+    this.beginTime = time;
+    return this;
+  }
+  public TaxiiConnectionConfig withSubscriptionId(String subId) {
+    this.subscriptionId = subId;
+    return this;
+  }
+  public TaxiiConnectionConfig withCollection(String collection) {
+    this.collection = collection;
+    return this;
+  }
+
+  public TaxiiConnectionConfig withPort(int port) {
+    this.port = port;
+    return this;
+  }
+  public TaxiiConnectionConfig withEndpoint(URL endpoint) {
+    this.endpoint = endpoint;
+    return this;
+  }
+  public TaxiiConnectionConfig withProxy(URL proxy) {
+    this.proxy = proxy;
+    return this;
+  }
+  public TaxiiConnectionConfig withUsername(String username) {
+    this.username = username;
+    return this;
+  }
+  public TaxiiConnectionConfig withPassword(String password) {
+    this.password = password;
+    return this;
+  }
+  public TaxiiConnectionConfig withConnectionType(ConnectionType type) {
+    this.type= type;
+    return this;
+  }
+
+  public void setEndpoint(String endpoint) throws MalformedURLException {
+    this.endpoint = new URL(endpoint);
+  }
+
+  public void setPort(int port) {
+    this.port = port;
+  }
+
+  public void setProxy(String proxy) throws MalformedURLException {
+    this.proxy = new URL(proxy);
+  }
+
+  public void setUsername(String username) {
+    this.username = username;
+  }
+
+  public void setPassword(String password) {
+    this.password = password;
+  }
+
+  public void setType(ConnectionType type) {
+    this.type = type;
+  }
+
+  public void setCollection(String collection) {
+    this.collection = collection;
+  }
+
+  public void setSubscriptionId(String subscriptionId) {
+    this.subscriptionId = subscriptionId;
+  }
+
+  public void setBeginTime(String beginTime) throws ParseException {
+    SimpleDateFormat sdf = (SimpleDateFormat)DateFormat.getDateInstance(DateFormat.MEDIUM);
+    this.beginTime = sdf.parse(beginTime);
+  }
+
+  public String getTable() {
+    return table;
+  }
+
+  public void setTable(String table) {
+    this.table = table;
+  }
+
+  public String getColumnFamily() {
+    return columnFamily;
+  }
+
+  public void setColumnFamily(String columnFamily) {
+    this.columnFamily = columnFamily;
+  }
+
+  public Date getBeginTime() {
+    return beginTime;
+  }
+  public int getPort() {
+    return port;
+  }
+  public URL getEndpoint() {
+    return endpoint;
+  }
+
+  public URL getProxy() {
+    return proxy;
+  }
+
+  public String getUsername() {
+    return username;
+  }
+
+  public String getPassword() {
+    return password;
+  }
+
+  public ConnectionType getType() {
+    return type;
+  }
+
+  public String getCollection() {
+    return collection;
+  }
+  public String getSubscriptionId() {
+    return subscriptionId;
+  }
+
+  public void setAllowedIndicatorTypes(List<String> allowedIndicatorTypes) {
+    withAllowedIndicatorTypes(allowedIndicatorTypes);
+  }
+
+  public Set<String> getAllowedIndicatorTypes() {
+    return allowedIndicatorTypes;
+  }
+  public static synchronized TaxiiConnectionConfig load(InputStream is) throws IOException {
+    TaxiiConnectionConfig ret = _mapper.readValue(is, TaxiiConnectionConfig.class);
+    return ret;
+  }
+  public static synchronized TaxiiConnectionConfig load(String s, Charset c) throws IOException {
+    return load( new ByteArrayInputStream(s.getBytes(c)));
+  }
+  public static synchronized TaxiiConnectionConfig load(String s) throws IOException {
+    return load( s, Charset.defaultCharset());
+  }
+
+  @Override
+  public String toString() {
+    return "TaxiiConnectionConfig{" +
+            "endpoint=" + endpoint +
+            ", port=" + port +
+            ", proxy=" + proxy +
+            ", username='" + username + '\'' +
+            ", password=" + (password == null?"null" : "'******'") +
+            ", type=" + type +
+            ", allowedIndicatorTypes=" + Joiner.on(',').join(allowedIndicatorTypes)+
+            ", collection='" + collection + '\'' +
+            ", subscriptionId='" + subscriptionId + '\'' +
+            ", beginTime=" + beginTime +
+            ", table=" + table + ":" + columnFamily+
+            '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiHandler.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiHandler.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiHandler.java
new file mode 100644
index 0000000..be571e1
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiHandler.java
@@ -0,0 +1,406 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.dataloads.nonbulk.taxii;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.http.HttpHost;
+import org.apache.http.auth.AuthScope;
+import org.apache.http.auth.UsernamePasswordCredentials;
+import org.apache.http.client.AuthCache;
+import org.apache.http.client.CredentialsProvider;
+import org.apache.http.client.protocol.HttpClientContext;
+import org.apache.http.config.Registry;
+import org.apache.http.config.RegistryBuilder;
+import org.apache.http.conn.socket.ConnectionSocketFactory;
+import org.apache.http.conn.socket.PlainConnectionSocketFactory;
+import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
+import org.apache.http.conn.ssl.SSLContextBuilder;
+import org.apache.http.conn.ssl.TrustSelfSignedStrategy;
+import org.apache.http.impl.auth.BasicScheme;
+import org.apache.http.impl.client.BasicAuthCache;
+import org.apache.http.impl.client.BasicCredentialsProvider;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
+import org.apache.log4j.Logger;
+import org.apache.metron.dataloads.extractor.Extractor;
+import org.apache.metron.enrichment.converter.EnrichmentConverter;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.mitre.taxii.client.HttpClient;
+import org.mitre.taxii.messages.xml11.*;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+import javax.xml.bind.JAXBException;
+import javax.xml.datatype.DatatypeConfigurationException;
+import javax.xml.datatype.DatatypeFactory;
+import javax.xml.datatype.XMLGregorianCalendar;
+import javax.xml.transform.Transformer;
+import javax.xml.transform.TransformerException;
+import javax.xml.transform.TransformerFactory;
+import javax.xml.transform.dom.DOMSource;
+import javax.xml.transform.stream.StreamResult;
+import java.io.*;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.text.SimpleDateFormat;
+import java.util.*;
+
+public class TaxiiHandler extends TimerTask {
+    private static final Logger LOG = Logger.getLogger(TaxiiHandler.class);
+
+    private static ThreadLocal<TaxiiXmlFactory> xmlFactory = new ThreadLocal<TaxiiXmlFactory>() {
+        @Override
+        protected TaxiiXmlFactory initialValue() {
+            return new TaxiiXmlFactory();
+        }
+    };
+    private static ThreadLocal<ObjectFactory> messageFactory = new ThreadLocal<ObjectFactory>() {
+        @Override
+        protected ObjectFactory initialValue() {
+            return new ObjectFactory();
+        }
+    };
+
+    private HttpClient taxiiClient;
+    private URL endpoint;
+    private Extractor extractor;
+    private String hbaseTable;
+    private String columnFamily;
+    private Map<String, HTableInterface> connectionCache = new HashMap<>();
+    private HttpClientContext context;
+    private String collection;
+    private String subscriptionId;
+    private EnrichmentConverter converter = new EnrichmentConverter();
+    private Date beginTime;
+    private Configuration config;
+    private boolean inProgress = false;
+    private Set<String> allowedIndicatorTypes;
+    public TaxiiHandler( TaxiiConnectionConfig connectionConfig
+                       , Extractor extractor
+                       , Configuration config
+                       ) throws Exception
+    {
+        LOG.info("Loading configuration: " + connectionConfig);
+        this.allowedIndicatorTypes = connectionConfig.getAllowedIndicatorTypes();
+        this.extractor = extractor;
+        this.collection = connectionConfig.getCollection();
+        this.subscriptionId = connectionConfig.getSubscriptionId();
+        hbaseTable = connectionConfig.getTable();
+        columnFamily = connectionConfig.getColumnFamily();
+        this.beginTime = connectionConfig.getBeginTime();
+        this.config = config;
+        initializeClient(connectionConfig);
+        LOG.info("Configured, starting polling " + endpoint + " for " + collection);
+    }
+
+    protected synchronized HTableInterface getTable(String table) throws IOException {
+        HTableInterface ret = connectionCache.get(table);
+        if(ret == null) {
+            ret = createHTable(table);
+            connectionCache.put(table, ret);
+        }
+        return ret;
+    }
+
+    protected synchronized HTableInterface createHTable(String tableInfo) throws IOException {
+        return new HTable(config, tableInfo);
+    }
+    /**
+     * The action to be performed by this timer task.
+     */
+    @Override
+    public void run() {
+        if(inProgress) {
+            return;
+        }
+        Date ts = new Date();
+        LOG.info("Polling..." + new SimpleDateFormat().format(ts));
+        try {
+            inProgress = true;
+            // Prepare the message to send.
+            String sessionID = MessageHelper.generateMessageId();
+            PollRequest request = messageFactory.get().createPollRequest()
+                    .withMessageId(sessionID)
+                    .withCollectionName(collection);
+            if (subscriptionId != null) {
+                request = request.withSubscriptionID(subscriptionId);
+            } else {
+                request = request.withPollParameters(messageFactory.get().createPollParametersType());
+            }
+            if (beginTime != null) {
+                Calendar gc = GregorianCalendar.getInstance();
+                gc.setTime(beginTime);
+                XMLGregorianCalendar gTime = null;
+                try {
+                    gTime = DatatypeFactory.newInstance().newXMLGregorianCalendar((GregorianCalendar) gc).normalize();
+                } catch (DatatypeConfigurationException e) {
+                    LOG.error("Unable to set the begin time", e);
+                }
+                gTime.setFractionalSecond(null);
+                LOG.info("Begin Time: " + gTime);
+                request.setExclusiveBeginTimestamp(gTime);
+            }
+
+            try {
+                PollResponse response = call(request, PollResponse.class);
+                LOG.info("Got Poll Response with " + response.getContentBlocks().size() + " blocks");
+                int numProcessed = 0;
+                long avgTimeMS = 0;
+                long timeStartedBlock = System.currentTimeMillis();
+                for (ContentBlock block : response.getContentBlocks()) {
+                    AnyMixedContentType content = block.getContent();
+                    for (Object o : content.getContent()) {
+                        numProcessed++;
+                        long timeS = System.currentTimeMillis();
+                        String xml = null;
+                        if (o instanceof Element) {
+                            Element element = (Element) o;
+                            xml = getStringFromDocument(element.getOwnerDocument());
+                            if(LOG.isDebugEnabled() && Math.random() < 0.01) {
+                                LOG.debug("Random Stix doc: " + xml);
+                            }
+                            for (LookupKV<EnrichmentKey, EnrichmentValue> kv : extractor.extract(xml)) {
+                                if(allowedIndicatorTypes.isEmpty()
+                                || allowedIndicatorTypes.contains(kv.getKey().type)
+                                  )
+                                {
+                                    kv.getValue().getMetadata().put("source_type", "taxii");
+                                    kv.getValue().getMetadata().put("taxii_url", endpoint.toString());
+                                    kv.getValue().getMetadata().put("taxii_collection", collection);
+                                    Put p = converter.toPut(columnFamily, kv.getKey(), kv.getValue());
+                                    HTableInterface table = getTable(hbaseTable);
+                                    table.put(p);
+                                    LOG.info("Found Threat Intel: " + kv.getKey() + " => " + kv.getValue());
+                                }
+                            }
+                        }
+                        avgTimeMS += System.currentTimeMillis() - timeS;
+                    }
+                    if( (numProcessed + 1) % 100 == 0) {
+                        LOG.info("Processed " + numProcessed + " in " + (System.currentTimeMillis() - timeStartedBlock) + " ms, avg time: " + avgTimeMS / content.getContent().size());
+                        timeStartedBlock = System.currentTimeMillis();
+                        avgTimeMS = 0;
+                        numProcessed = 0;
+                    }
+                }
+            } catch (Exception e) {
+                LOG.error(e.getMessage(), e);
+                throw new RuntimeException("Unable to make request", e);
+            }
+        }
+        finally {
+            inProgress = false;
+            beginTime = ts;
+        }
+    }
+    public String getStringFromDocument(Document doc)
+    {
+        try
+        {
+            DOMSource domSource = new DOMSource(doc);
+            StringWriter writer = new StringWriter();
+            StreamResult result = new StreamResult(writer);
+            TransformerFactory tf = TransformerFactory.newInstance();
+            Transformer transformer = tf.newTransformer();
+            transformer.transform(domSource, result);
+            return writer.toString();
+        }
+        catch(TransformerException ex)
+        {
+            ex.printStackTrace();
+            return null;
+        }
+    }
+    private <RESPONSE_T> RESPONSE_T call( Object request, Class<RESPONSE_T> responseClazz) throws URISyntaxException, JAXBException, IOException {
+        return call(taxiiClient, endpoint.toURI(), request, context, responseClazz);
+    }
+
+    private void initializeClient(TaxiiConnectionConfig config) throws Exception {
+        LOG.info("Initializing client..");
+        if(context == null) {
+            context = createContext(config.getEndpoint(), config.getUsername(), config.getPassword(), config.getPort());
+        }
+        URL endpoint = config.getEndpoint();
+        if(config.getType() == ConnectionType.DISCOVER) {
+            LOG.info("Discovering endpoint");
+            endpoint = discoverPollingClient(config.getProxy(), endpoint, config.getUsername(), config.getPassword(), context, collection).pollEndpoint;
+            this.endpoint = endpoint;
+            LOG.info("Discovered endpoint as " + endpoint);
+        }
+        taxiiClient = buildClient(config.getProxy(), config.getUsername(), config.getPassword());
+    }
+
+    private static class DiscoveryResults {
+        URL pollEndpoint;
+        URL collectionManagementEndpoint;
+        List<String> collections = new ArrayList<>();
+    }
+    private static DiscoveryResults discoverPollingClient(URL proxy, URL endpoint, String username, String password, HttpClientContext context, String defaultCollection) throws Exception {
+
+        DiscoveryResults results = new DiscoveryResults();
+        {
+            HttpClient discoverClient = buildClient(proxy, username, password);
+            String sessionID = MessageHelper.generateMessageId();
+            // Prepare the message to send.
+            DiscoveryRequest request = messageFactory.get().createDiscoveryRequest()
+                    .withMessageId(sessionID);
+            DiscoveryResponse response = call(discoverClient, endpoint.toURI(), request, context, DiscoveryResponse.class);
+            for (ServiceInstanceType serviceInstance : response.getServiceInstances()) {
+                if (serviceInstance.isAvailable() && serviceInstance.getServiceType() == ServiceTypeEnum.POLL) {
+                    results.pollEndpoint = new URL(serviceInstance.getAddress());
+                }
+                else if(serviceInstance.isAvailable() && serviceInstance.getServiceType() == ServiceTypeEnum.COLLECTION_MANAGEMENT) {
+                    results.collectionManagementEndpoint= new URL(serviceInstance.getAddress());
+                }
+            }
+            if (results.pollEndpoint == null) {
+                throw new RuntimeException("Unable to discover a poll TAXII feed");
+            }
+        }
+        if(defaultCollection == null)
+        //get collections
+        {
+            HttpClient discoverClient = buildClient(proxy, username, password);
+            String sessionID = MessageHelper.generateMessageId();
+            CollectionInformationRequest request = messageFactory.get().createCollectionInformationRequest()
+                                                                 .withMessageId(sessionID);
+            CollectionInformationResponse response = call(discoverClient, results.collectionManagementEndpoint.toURI(), request, context, CollectionInformationResponse.class);
+            LOG.info("Unable to find the default collection; available collections are:");
+            for(CollectionRecordType c : response.getCollections()) {
+                LOG.info(c.getCollectionName());
+                results.collections.add(c.getCollectionName());
+            }
+            System.exit(0);
+        }
+        return results;
+    }
+
+    private static HttpClientContext createContext(URL endpoint, String username, String password, int port) {
+        HttpClientContext context = null;
+        HttpHost target = new HttpHost(endpoint.getHost(), port, endpoint.getProtocol());
+        if (username != null && password != null) {
+
+            CredentialsProvider credsProvider = new BasicCredentialsProvider();
+            credsProvider.setCredentials(
+                    new AuthScope(target.getHostName(), target.getPort()),
+                    new UsernamePasswordCredentials(username, password));
+
+            // http://hc.apache.org/httpcomponents-client-ga/tutorial/html/authentication.html
+            AuthCache authCache = new BasicAuthCache();
+            authCache.put(target, new BasicScheme());
+
+            // Add AuthCache to the execution context
+            context = HttpClientContext.create();
+            context.setCredentialsProvider(credsProvider);
+            context.setAuthCache(authCache);
+        } else {
+            context = null;
+        }
+        return context;
+    }
+
+
+    public static <RESPONSE_T, REQUEST_T> RESPONSE_T call( HttpClient taxiiClient
+            , URI endpoint
+            , REQUEST_T request
+            , HttpClientContext context
+            , Class<RESPONSE_T> responseClazz
+    ) throws JAXBException, IOException {
+        //TaxiiXml taxiiXml = xmlFactory.get().createTaxiiXml();
+        //String req = taxiiXml.marshalToString(request, true);
+        // Call the service
+        Object responseObj =  taxiiClient.callTaxiiService(endpoint, request, context);
+        LOG.info("Request made : " + request.getClass().getCanonicalName() + " => " + responseObj.getClass().getCanonicalName() + " (expected " + responseClazz.getCanonicalName() + ")");
+        //String resp = taxiiXml.marshalToString(responseObj, true);
+        try {
+            return responseClazz.cast(responseObj);
+        }
+        catch(ClassCastException cce) {
+            TaxiiXml taxiiXml = xmlFactory.get().createTaxiiXml();
+            String resp = taxiiXml.marshalToString(responseObj, true);
+            String msg = "Didn't return the response we expected: " + responseObj.getClass() + " \n" + resp;
+            LOG.error(msg, cce);
+            throw new RuntimeException(msg, cce);
+        }
+    }
+    private static HttpClient buildClient(URL proxy, String username, String password) throws Exception
+    {
+        HttpClient client = new HttpClient(); // Start with a default TAXII HTTP client.
+
+        // Create an Apache HttpClientBuilder to be customized by the command line arguments.
+        HttpClientBuilder builder = HttpClientBuilder.create().useSystemProperties();
+
+        // Proxy
+        if (proxy != null) {
+            HttpHost proxyHost = new HttpHost(proxy.getHost(), proxy.getPort(), proxy.getProtocol());
+            builder.setProxy(proxyHost);
+        }
+
+        // Basic authentication. User & Password
+        if (username != null ^ password != null) {
+            throw new Exception("'username' and 'password' arguments are required to appear together.");
+        }
+
+
+        // from:  http://stackoverflow.com/questions/19517538/ignoring-ssl-certificate-in-apache-httpclient-4-3
+        SSLContextBuilder ssbldr = new SSLContextBuilder();
+        ssbldr.loadTrustMaterial(null, new TrustSelfSignedStrategy());
+        SSLConnectionSocketFactory sslsf = new SSLConnectionSocketFactory(ssbldr.build(),SSLConnectionSocketFactory.BROWSER_COMPATIBLE_HOSTNAME_VERIFIER);
+
+
+        Registry<ConnectionSocketFactory> registry = RegistryBuilder.<ConnectionSocketFactory>create()
+                .register("http", new PlainConnectionSocketFactory())
+                .register("https", sslsf)
+                .build();
+
+
+        PoolingHttpClientConnectionManager cm = new PoolingHttpClientConnectionManager(registry);
+        cm.setMaxTotal(20);//max connection
+
+        System.setProperty("jsse.enableSNIExtension", "false"); //""
+        CloseableHttpClient httpClient = builder
+                .setSSLSocketFactory(sslsf)
+                .setConnectionManager(cm)
+                .build();
+
+        client.setHttpclient(httpClient);
+        return client;
+    }
+    public static void main(String... argv) throws Exception {
+        URL endpoint = new URL("http://hailataxii.com/taxii-discovery-service");
+        String username = "guest";
+        String password = "guest";
+        TaxiiConnectionConfig config = new TaxiiConnectionConfig();
+        config = config.withConnectionType(ConnectionType.DISCOVER)
+                       .withEndpoint(endpoint)
+                       .withUsername(username)
+                       .withCollection("guest.Abuse_ch")
+                       .withPassword(password);
+        //TaxiiHandler handler = new TaxiiHandler(config, null);
+        //handler.run();
+        //discoverPollingClient(null, endpoint, username, password, context);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiLoader.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiLoader.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiLoader.java
new file mode 100644
index 0000000..b5385a9
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiLoader.java
@@ -0,0 +1,205 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.dataloads.nonbulk.taxii;
+
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import org.apache.commons.cli.*;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.log4j.PropertyConfigurator;
+import org.apache.metron.dataloads.extractor.Extractor;
+import org.apache.metron.dataloads.extractor.ExtractorHandler;
+import org.apache.metron.dataloads.extractor.stix.StixExtractor;
+import org.apache.metron.common.configuration.EnrichmentConfig;
+import org.apache.metron.common.utils.JSONUtils;
+
+import javax.annotation.Nullable;
+import java.io.File;
+import java.text.*;
+import java.util.Date;
+import java.util.Timer;
+
+public class TaxiiLoader {
+  private static abstract class OptionHandler implements Function<String, Option> {}
+  private enum TaxiiOptions {
+    HELP("h", new OptionHandler() {
+
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        return new Option(s, "help", false, "Generate Help screen");
+      }
+    })
+    ,EXTRACTOR_CONFIG("e", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "extractor_config", true, "JSON Document describing the extractor for this input data source");
+        o.setArgName("JSON_FILE");
+        o.setRequired(true);
+        return o;
+      }
+    })
+    ,CONNECTION_CONFIG("c", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "taxii_connection_config", true, "The JSON config file to configure the connection");
+        o.setArgName("config_file");
+        o.setRequired(true);
+        return o;
+      }
+    })
+    ,TIME_BETWEEN_POLLS("p", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "time_between_polls", true, "The time between polls (in ms)");
+        o.setArgName("MS");
+        o.setRequired(false);
+        return o;
+      }
+    })
+    ,BEGIN_TIME("b", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "begin_time", true, "Start time to poll the Taxii server (all data from that point will be gathered in the first pull).");
+        o.setArgName(DATE_FORMAT.toPattern());
+        o.setRequired(false);
+        return o;
+      }
+    })
+    ,LOG4J_PROPERTIES("l", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "log4j", true, "The log4j properties file to load");
+        o.setArgName("FILE");
+        o.setRequired(false);
+        return o;
+      }
+    })
+    ,ENRICHMENT_CONFIG("n", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "enrichment_config", true
+                , "JSON Document describing the enrichment configuration details." +
+                "  This is used to associate an enrichment type with a field type in zookeeper."
+        );
+        o.setArgName("JSON_FILE");
+        o.setRequired(false);
+        return o;
+      }
+    })
+    ;
+    Option option;
+    String shortCode;
+    TaxiiOptions(String shortCode, OptionHandler optionHandler) {
+      this.shortCode = shortCode;
+      this.option = optionHandler.apply(shortCode);
+    }
+
+    public boolean has(CommandLine cli) {
+      return cli.hasOption(shortCode);
+    }
+
+    public String get(CommandLine cli) {
+      return cli.getOptionValue(shortCode);
+    }
+
+    public static CommandLine parse(CommandLineParser parser, String[] args) {
+      try {
+        CommandLine cli = parser.parse(getOptions(), args);
+        if(TaxiiOptions.HELP.has(cli)) {
+          printHelp();
+          System.exit(0);
+        }
+        return cli;
+      } catch (ParseException e) {
+        System.err.println("Unable to parse args: " + Joiner.on(' ').join(args));
+        e.printStackTrace(System.err);
+        printHelp();
+        System.exit(-1);
+        return null;
+      }
+    }
+
+    public static void printHelp() {
+      HelpFormatter formatter = new HelpFormatter();
+      formatter.printHelp( "TaxiiLoader", getOptions());
+    }
+
+    public static Options getOptions() {
+      Options ret = new Options();
+      for(TaxiiOptions o : TaxiiOptions.values()) {
+        ret.addOption(o.option);
+      }
+      return ret;
+    }
+  }
+  public static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+  public static final long ONE_HR_IN_MS = 60*60*1000;
+  public static final long DEFAULT_TIME_BETWEEN_POLLS = ONE_HR_IN_MS;
+
+
+  public static void main(String... argv) throws Exception {
+    Configuration conf = HBaseConfiguration.create();
+    String zkQuorum = conf.get(HConstants.ZOOKEEPER_QUORUM);
+    String[] otherArgs = new GenericOptionsParser(conf, argv).getRemainingArgs();
+
+    CommandLine cli = TaxiiOptions.parse(new PosixParser(), otherArgs);
+    if(TaxiiOptions.LOG4J_PROPERTIES.has(cli)) {
+      PropertyConfigurator.configure(TaxiiOptions.LOG4J_PROPERTIES.get(cli));
+    }
+    ExtractorHandler handler = ExtractorHandler.load(FileUtils.readFileToString(new File(TaxiiOptions.EXTRACTOR_CONFIG.get(cli))));
+    Extractor e = handler.getExtractor();
+    EnrichmentConfig enrichmentConfig = null;
+    if(TaxiiOptions.ENRICHMENT_CONFIG.has(cli)) {
+      enrichmentConfig = JSONUtils.INSTANCE.load( new File(TaxiiOptions.ENRICHMENT_CONFIG.get(cli))
+              , EnrichmentConfig.class
+      );
+      enrichmentConfig.updateSensorConfigs();
+    }
+
+    Timer timer = new Timer();
+    if(e instanceof StixExtractor) {
+      StixExtractor extractor = (StixExtractor)e;
+      TaxiiConnectionConfig connectionConfig = TaxiiConnectionConfig.load(FileUtils.readFileToString(new File(TaxiiOptions.CONNECTION_CONFIG.get(cli))));
+      if(TaxiiOptions.BEGIN_TIME.has(cli)) {
+        Date d = DATE_FORMAT.parse(TaxiiOptions.BEGIN_TIME.get(cli));
+        connectionConfig.withBeginTime(d);
+      }
+      long timeBetween = DEFAULT_TIME_BETWEEN_POLLS;
+      if(TaxiiOptions.TIME_BETWEEN_POLLS.has(cli)) {
+        timeBetween = Long.parseLong(TaxiiOptions.TIME_BETWEEN_POLLS.get(cli));
+      }
+      timer.scheduleAtFixedRate(new TaxiiHandler(connectionConfig, extractor, conf), 0, timeBetween);
+    }
+    else {
+      throw new IllegalStateException("Extractor must be a STIX Extractor");
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerIntegrationTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerIntegrationTest.java
new file mode 100644
index 0000000..08f95b9
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerIntegrationTest.java
@@ -0,0 +1,153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.bulk;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.metron.TestConstants;
+import org.apache.metron.common.configuration.Configuration;
+import org.elasticsearch.common.settings.ImmutableSettings;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.indices.IndexMissingException;
+import org.elasticsearch.test.ElasticsearchIntegrationTest;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.File;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.TimeZone;
+import java.util.concurrent.TimeUnit;
+
+@ElasticsearchIntegrationTest.ClusterScope(scope = ElasticsearchIntegrationTest.Scope.SUITE, numDataNodes = 1, numClientNodes = 0)
+public class ElasticsearchDataPrunerIntegrationTest extends ElasticsearchIntegrationTest {
+
+    private static File dataPath = new File("./target/elasticsearch-test");
+    private Date testingDate;
+    private Date yesterday = new Date();
+    private DateFormat dateFormat = new SimpleDateFormat("yyyy.MM.dd.HH");
+    private Configuration configuration;
+
+    @BeforeClass
+    public static void setupClass() throws Exception {
+
+        if (dataPath.isDirectory()) {
+            FileUtils.deleteDirectory(dataPath);
+        }
+
+        if (!dataPath.mkdirs()) {
+            throw new RuntimeException("Couldn't create dataPath at: " + dataPath.getAbsolutePath());
+        }
+
+    }
+
+    @AfterClass
+    public static void teardownClass() throws Exception {
+
+        if (dataPath.isDirectory()) {
+            FileUtils.deleteDirectory(dataPath);
+        }
+
+    }
+
+    @Before
+    public void setUp() throws Exception {
+
+        super.setUp();
+        ensureGreen();
+
+        TimeZone timeZone = TimeZone.getTimeZone("UTC");
+        Calendar calendar = Calendar.getInstance(timeZone);
+        calendar.set(Calendar.HOUR_OF_DAY,0);
+        calendar.set(Calendar.MINUTE,0);
+        calendar.set(Calendar.SECOND,0);
+        testingDate = calendar.getTime();
+        yesterday.setTime(testingDate.getTime() - TimeUnit.DAYS.toMillis(1));
+        dateFormat.setTimeZone(timeZone);
+
+        File resourceFile = new File(TestConstants.SAMPLE_CONFIG_PATH);
+        Path resourcePath = Paths.get(resourceFile.getCanonicalPath());
+
+        configuration = new Configuration(resourcePath);
+    }
+
+    @Test(expected = IndexMissingException.class)
+    public void testWillThrowOnMissingIndex() throws Exception {
+
+        ElasticsearchDataPruner pruner = new ElasticsearchDataPruner(yesterday, 30, configuration,client(), "*");
+        pruner.deleteIndex(admin(), "baz");
+
+    }
+
+    @Test
+    public void testDeletesCorrectIndexes() throws Exception {
+
+        Integer numDays = 5;
+
+        Date createStartDate = new Date();
+
+        createStartDate.setTime(yesterday.getTime() - TimeUnit.DAYS.toMillis(numDays - 1));
+
+        ElasticsearchDataPruner pruner = new ElasticsearchDataPruner(yesterday, 30, configuration,client(), "*");
+        String indexesToDelete = "sensor_index_" + new SimpleDateFormat("yyyy.MM.dd").format(createStartDate) + ".*";
+        Boolean deleted = pruner.deleteIndex(admin(), indexesToDelete);
+
+        assertTrue("Index deletion should be acknowledged", deleted);
+
+    }
+
+    @Test
+    public void testHandlesNoIndicesToDelete() throws Exception {
+
+        ElasticsearchDataPruner pruner = new ElasticsearchDataPruner(yesterday, 1, configuration, client(), "sensor_index_");
+        Long deleteCount = pruner.prune();
+        assertEquals("Should have pruned 0 indices", 0L, deleteCount.longValue());
+
+
+    }
+
+    @Override
+    protected Settings nodeSettings(int nodeOrdinal) {
+
+        return ImmutableSettings.settingsBuilder()
+                .put("node.data", true)
+                .put("gateway.type", "none")
+                .put("path.data", dataPath.getPath() + "/data")
+                .put("path.work", dataPath.getPath() + "/work")
+                .put("path.logs", dataPath.getPath() + "/logs")
+                .put("cluster.routing.schedule", "50ms")
+                .put("node.local", true).build();
+
+    }
+
+    public Settings indexSettings() {
+
+        return ImmutableSettings.settingsBuilder()
+                .put("index.store.type", "memory")
+                .put("index.store.fs.memory.enabled", "true")
+                .put("index.number_of_shards", 1)
+                .put("index.number_of_replicas", 0).build();
+
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerRunnerTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerRunnerTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerRunnerTest.java
new file mode 100644
index 0000000..5f32bee
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerRunnerTest.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.bulk;
+
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.ByteArrayOutputStream;
+import java.io.FileDescriptor;
+import java.io.FileOutputStream;
+import java.io.PrintStream;
+
+public class ElasticsearchDataPrunerRunnerTest {
+
+    private Options options;
+    private Options help;
+
+    private ByteArrayOutputStream outContent;
+    private ByteArrayOutputStream errContent;
+
+    @Before
+    public void setUp(){
+
+        options = ElasticsearchDataPrunerRunner.buildOptions();
+        help = new Options();
+
+        Option o = new Option("h", "help", false, "This screen");
+        o.setRequired(false);
+        help.addOption(o);
+
+        outContent = new ByteArrayOutputStream();
+        errContent = new ByteArrayOutputStream();
+
+        System.setOut(new PrintStream(outContent));
+        System.setErr(new PrintStream(errContent));
+
+    }
+
+    @Test(expected = RuntimeException.class)
+    public void testThrowsWithoutZookeeperOrConfigLocation() throws Exception {
+
+        String[] args = new String[]{"-n","30","-p","sensor_index","-s","03/30/2016"};
+        ElasticsearchDataPrunerRunner.checkOptions(help,options,args);
+
+    }
+
+    @Test(expected = RuntimeException.class)
+    public void testThrowsWithZookeeperAndConfiguration() throws Exception {
+
+        String[] args = new String[]{"-n","30","-p","sensor_index","-s","03/30/2016"};
+        ElasticsearchDataPrunerRunner.checkOptions(help,options,args);
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerTest.java
new file mode 100644
index 0000000..0cc5d28
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/ElasticsearchDataPrunerTest.java
@@ -0,0 +1,211 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.bulk;
+
+import org.apache.commons.collections.IteratorUtils;
+import org.apache.metron.TestConstants;
+import org.apache.metron.common.configuration.Configuration;
+import org.easymock.EasyMock;
+import org.elasticsearch.action.ActionFuture;
+import org.elasticsearch.action.admin.cluster.state.ClusterStateRequestBuilder;
+import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
+import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
+import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder;
+import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
+import org.elasticsearch.client.*;
+import org.elasticsearch.cluster.ClusterState;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.cluster.metadata.MetaData;
+import org.elasticsearch.common.collect.ImmutableOpenMap;
+import org.elasticsearch.common.hppc.ObjectObjectOpenHashMap;
+import org.elasticsearch.index.Index;
+import org.elasticsearch.indices.IndexMissingException;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Matchers;
+import org.powermock.api.easymock.PowerMock;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.PrintStream;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.concurrent.TimeUnit;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Matchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+import static org.powermock.api.easymock.PowerMock.replayAll;
+import static org.powermock.api.easymock.PowerMock.verifyAll;
+
+@RunWith(PowerMockRunner.class)
+@PrepareForTest(DeleteIndexResponse.class)
+public class ElasticsearchDataPrunerTest {
+
+    private Date testDate;
+    private DateFormat dateFormat = new SimpleDateFormat("yyyy.MM.dd.HH");
+    private Configuration configuration;
+
+    private Client indexClient = mock(Client.class);
+    private AdminClient adminClient = mock(AdminClient.class);
+    private IndicesAdminClient indicesAdminClient = mock(FilterClient.IndicesAdmin.class);
+    private DeleteIndexRequestBuilder deleteIndexRequestBuilder = mock(DeleteIndexRequestBuilder.class);
+    private DeleteIndexRequest deleteIndexRequest = mock(DeleteIndexRequest.class);
+    private ActionFuture<DeleteIndexResponse> deleteIndexAction = mock(ActionFuture.class);
+    private DeleteIndexResponse deleteIndexResponse = PowerMock.createMock(DeleteIndexResponse.class);
+
+
+    private ByteArrayOutputStream outContent;
+    private ByteArrayOutputStream errContent;
+
+    @Before
+    public void setUp() throws Exception {
+
+        Calendar calendar = Calendar.getInstance();
+        calendar.set(Calendar.MONTH, Calendar.MARCH);
+        calendar.set(Calendar.YEAR, 2016);
+        calendar.set(Calendar.DATE, 31);
+        calendar.set(Calendar.HOUR_OF_DAY, 0);
+        calendar.set(Calendar.MINUTE, 0);
+        calendar.set(Calendar.SECOND, 0);
+        calendar.set(Calendar.MILLISECOND,0);
+        testDate = calendar.getTime();
+
+        when(indexClient.admin()).thenReturn(adminClient);
+        when(adminClient.indices()).thenReturn(indicesAdminClient);
+        when(indicesAdminClient.prepareDelete(Matchers.<String>anyVararg())).thenReturn(deleteIndexRequestBuilder);
+        when(indicesAdminClient.delete((DeleteIndexRequest) any())).thenReturn(deleteIndexAction);
+        when(deleteIndexRequestBuilder.request()).thenReturn(deleteIndexRequest);
+        when(deleteIndexAction.actionGet()).thenReturn(deleteIndexResponse);
+
+        File resourceFile = new File(TestConstants.SAMPLE_CONFIG_PATH);
+        Path resourcePath = Paths.get(resourceFile.getCanonicalPath());
+
+        configuration = new Configuration(resourcePath);
+
+        outContent = new ByteArrayOutputStream();
+        errContent = new ByteArrayOutputStream();
+
+        System.setOut(new PrintStream(outContent));
+        System.setErr(new PrintStream(errContent));
+
+    }
+
+    @Test(expected = IndexMissingException.class)
+    public void testWillThrowOnMissingIndex() throws Exception {
+
+        when(indicesAdminClient.delete((DeleteIndexRequest) any())).thenThrow(new IndexMissingException(new Index("Test Exception")));
+        ElasticsearchDataPruner pruner = new ElasticsearchDataPruner(testDate, 30, configuration, indexClient,"*");
+        pruner.deleteIndex(adminClient, "baz");
+
+    }
+
+    @Test
+    public void testDeletesCorrectIndexes() throws Exception {
+
+        //Mock Cluster Admin
+        ClusterAdminClient clusterAdminClient = mock(ClusterAdminClient.class);
+        ClusterStateRequestBuilder clusterStateRequestBuilder = mock(ClusterStateRequestBuilder.class);
+        ClusterStateResponse clusterStateResponse = mock(ClusterStateResponse.class);
+        ClusterState clusterState = mock(ClusterState.class);
+        ObjectObjectOpenHashMap<String, IndexMetaData> clusterIndexes = new ObjectObjectOpenHashMap();
+        MetaData clusterMetadata = mock(MetaData.class);
+        when(adminClient.cluster()).thenReturn(clusterAdminClient);
+        when(clusterAdminClient.prepareState()).thenReturn(clusterStateRequestBuilder);
+        when(clusterStateRequestBuilder.get()).thenReturn(clusterStateResponse);
+        when(clusterStateResponse.getState()).thenReturn(clusterState);
+        when(clusterState.getMetaData()).thenReturn(clusterMetadata);
+
+        int numDays = 5;
+
+        Date indexDate = new Date();
+
+        indexDate.setTime(testDate.getTime() - TimeUnit.DAYS.toMillis(numDays));
+
+        for (int i = 0; i < numDays * 24; i++) {
+
+            String indexName = "sensor_index_" + dateFormat.format(indexDate);
+            clusterIndexes.put(indexName, null);
+            indexDate.setTime(indexDate.getTime() + TimeUnit.HOURS.toMillis(1));
+
+        }
+
+        when(clusterMetadata.getIndices()).thenReturn(ImmutableOpenMap.copyOf(clusterIndexes));
+
+
+        EasyMock.expect(deleteIndexResponse.isAcknowledged()).andReturn(true);
+
+        replayAll();
+        ElasticsearchDataPruner pruner = new ElasticsearchDataPruner(testDate, 1, configuration, indexClient, "sensor_index_");
+        pruner.indexClient = indexClient;
+        Long deleteCount = pruner.prune();
+        assertEquals("Should have pruned 24 indices", 24L, deleteCount.longValue());
+        verifyAll();
+
+    }
+
+    @Test
+    public void testFilter() throws Exception {
+
+        ObjectObjectOpenHashMap<String, IndexMetaData> indexNames = new ObjectObjectOpenHashMap();
+        SimpleDateFormat dateChecker = new SimpleDateFormat("yyyyMMdd");
+        int numDays = 5;
+        String[] expectedIndices = new String[24];
+        Date indexDate = new Date();
+
+        indexDate.setTime(testDate.getTime() - TimeUnit.DAYS.toMillis(numDays));
+
+        for (int i = 0, j=0; i < numDays * 24; i++) {
+
+            String indexName = "sensor_index_" + dateFormat.format(indexDate);
+            //Delete 20160330
+            if( dateChecker.format(indexDate).equals("20160330") ){
+                expectedIndices[j++] = indexName;
+            }
+
+            indexNames.put(indexName, null);
+            indexDate.setTime(indexDate.getTime() + TimeUnit.HOURS.toMillis(1));
+
+        }
+
+        ImmutableOpenMap<String, IndexMetaData> testIndices = ImmutableOpenMap.copyOf(indexNames);
+
+        ElasticsearchDataPruner pruner = new ElasticsearchDataPruner(testDate, 1, configuration,  indexClient, "sensor_index_");
+        pruner.indexClient = indexClient;
+
+        Iterable<String> filteredIndices = pruner.getFilteredIndices(testIndices);
+
+        Object[] indexArray = IteratorUtils.toArray(filteredIndices.iterator());
+        Arrays.sort(indexArray);
+        Arrays.sort(expectedIndices);
+
+        assertArrayEquals(expectedIndices,indexArray);
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/HDFSDataPrunerTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/HDFSDataPrunerTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/HDFSDataPrunerTest.java
new file mode 100644
index 0000000..9bc695c
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/bulk/HDFSDataPrunerTest.java
@@ -0,0 +1,178 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.bulk;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.concurrent.TimeUnit;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.*;
+
+
+public class HDFSDataPrunerTest {
+
+
+    private static File dataPath = new File("src/test/resources/HDFSDataPrunerTest");
+
+    private Date todaysDate;
+    private Date yesterday = new Date();
+
+
+    @BeforeClass
+    public static void beforeClass() throws Exception {
+
+        if (dataPath.isDirectory()) {
+            dataPath.delete();
+        }
+
+        if (!dataPath.mkdirs()) {
+            throw new RuntimeException("Couldn't create dataPath at: " + dataPath.getAbsolutePath());
+        }
+
+        dataPath.deleteOnExit();
+
+    }
+
+
+    @Before
+    public void setUp() throws Exception {
+
+        Calendar today = Calendar.getInstance();
+        today.clear(Calendar.HOUR);
+        today.clear(Calendar.MINUTE);
+        today.clear(Calendar.SECOND);
+        todaysDate = today.getTime();
+        yesterday.setTime(todaysDate.getTime() - TimeUnit.DAYS.toMillis(1));
+
+    }
+
+    @Test(expected = StartDateException.class)
+    public void testFailsOnTodaysDate() throws Exception {
+
+        HDFSDataPruner pruner = new HDFSDataPruner(todaysDate, 30, "file:///", dataPath.getAbsolutePath() + "/file-*");
+
+    }
+
+    @Test
+    public void testDeletesCorrectFiles() throws Exception {
+
+        createTestFiles();
+
+        HDFSDataPruner pruner = new HDFSDataPruner(yesterday, 30, "file:///", dataPath.getAbsolutePath() + "/file-*");
+
+        Long prunedCount = pruner.prune();
+        assertTrue("Should have pruned 45 files- pruned: " + prunedCount, 45 == prunedCount);
+
+        //Verify first five files remain
+        File[] filesLeft = dataPath.listFiles();
+        File[] filesList = new File[filesLeft.length];
+        for (int i = 0; i < 5; i++) {
+            filesList[i] = new File(dataPath.getPath() + "//file-" + String.format("%02d", i));
+        }
+        assertArrayEquals("First four files should have been left behind", filesLeft, filesList);
+
+
+    }
+
+    @Test(expected = RuntimeException.class)
+    public void testThrowsIsDirectory() throws Exception {
+
+        FileSystem testFS = mock(FileSystem.class);
+        when(testFS.isDirectory((Path) any())).thenThrow(new IOException("Test Exception"));
+
+        HDFSDataPruner pruner = new HDFSDataPruner(yesterday, 30, "file:///", dataPath.getAbsolutePath() + "/file-*");
+        pruner.fileSystem = testFS;
+        HDFSDataPruner.DateFileFilter filter = pruner.new DateFileFilter(pruner, true);
+
+        filter.accept(new Path("foo"));
+
+    }
+
+    @Test
+    public void testIgnoresDirectoies() throws Exception {
+
+        FileSystem testFS = mock(FileSystem.class);
+        when(testFS.isDirectory((Path) any())).thenReturn(true);
+
+        HDFSDataPruner pruner = new HDFSDataPruner(yesterday, 30, "file:///", dataPath.getAbsolutePath() + "/file-*");
+        pruner.fileSystem = testFS;
+        HDFSDataPruner.DateFileFilter filter = pruner.new DateFileFilter(pruner, false);
+        assertFalse("Should ignore directories",filter.accept(new Path("/tmp")));
+
+    }
+
+    @Test(expected = RuntimeException.class)
+    public void testThrowBadFile() throws Exception {
+
+        FileSystem testFS = mock(FileSystem.class);
+        when(testFS.isDirectory((Path) any())).thenReturn(false);
+        when(testFS.getFileStatus((Path) any())).thenThrow(new IOException("Test Exception"));
+
+        HDFSDataPruner pruner = new HDFSDataPruner(yesterday, 30, "file:///", dataPath.getAbsolutePath() + "/file-*");
+
+        pruner.fileSystem = testFS;
+        HDFSDataPruner.DateFileFilter filter = pruner.new DateFileFilter(pruner, true);
+
+        filter.accept(new Path("foo"));
+
+    }
+
+    private void createTestFiles() throws IOException {
+
+        //create files
+        for (int i = 0; i < 50; i++) {
+            File file = new File(dataPath.getAbsolutePath() + "//file-" + String.format("%02d", i));
+            file.createNewFile();
+            file.deleteOnExit();
+        }
+
+        //Set modification date today - 1 day
+        for (int i = 5; i < 25; i++) {
+            File file = new File(dataPath.getAbsolutePath() + "//file-" + String.format("%02d", i));
+            file.setLastModified(todaysDate.getTime() - TimeUnit.DAYS.toMillis(1));
+            file.deleteOnExit();
+        }
+
+        //Set modification date today - 10 days
+        for (int i = 25; i < 40; i++) {
+            File file = new File(dataPath.getAbsolutePath() + "//file-" + String.format("%02d", i));
+            file.setLastModified(todaysDate.getTime() - TimeUnit.DAYS.toMillis(10));
+            file.deleteOnExit();
+        }
+
+        //Set modification date today - 20 days
+        for (int i = 40; i < 50; i++) {
+            File file = new File(dataPath.getAbsolutePath() + "//file-" + String.format("%02d", i));
+            file.setLastModified(todaysDate.getTime() - TimeUnit.DAYS.toMillis(20));
+            file.deleteOnExit();
+        }
+
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/extractor/ExtractorTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/extractor/ExtractorTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/extractor/ExtractorTest.java
new file mode 100644
index 0000000..0179193
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/extractor/ExtractorTest.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.extractor;
+
+import com.google.common.collect.Iterables;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+
+public class ExtractorTest {
+    public static class DummyExtractor implements Extractor
+    {
+
+        @Override
+        public Iterable<LookupKV> extract(String line) throws IOException {
+            EnrichmentKey key = new EnrichmentKey();
+            key.indicator = "dummy";
+            key.type = "type";
+            Map<String, String> value = new HashMap<>();
+            value.put("indicator", "dummy");
+            return Arrays.asList(new LookupKV(key, new EnrichmentValue(value)));
+        }
+
+        @Override
+        public void initialize(Map<String, Object> config) {
+
+        }
+    }
+    @Test
+    public void testDummyExtractor() throws IllegalAccessException, InstantiationException, ClassNotFoundException, IOException {
+        Extractor extractor = Extractors.create(DummyExtractor.class.getName());
+        LookupKV results = Iterables.getFirst(extractor.extract(null), null);
+        EnrichmentKey key = (EnrichmentKey) results.getKey();
+        EnrichmentValue value = (EnrichmentValue) results.getValue();
+        Assert.assertEquals("dummy", key.indicator);
+        Assert.assertEquals("type", key.type);
+        Assert.assertEquals("dummy", value.getMetadata().get("indicator"));
+    }
+
+    @Test
+    public void testExtractionLoading() throws Exception {
+        /**
+         config:
+         {
+            "config" : {}
+            ,"extractor" : "org.apache.metron.dataloads.extractor.ExtractorTest$DummyExtractor"
+         }
+         */
+        String config = "{\n" +
+                "            \"config\" : {}\n" +
+                "            ,\"extractor\" : \"org.apache.metron.dataloads.extractor.ExtractorTest$DummyExtractor\"\n" +
+                "         }";
+        ExtractorHandler handler = ExtractorHandler.load(config);
+        LookupKV results = Iterables.getFirst(handler.getExtractor().extract(null), null);
+        EnrichmentKey key = (EnrichmentKey) results.getKey();
+        EnrichmentValue value = (EnrichmentValue) results.getValue();
+        Assert.assertEquals("dummy", key.indicator);
+        Assert.assertEquals("type", key.type);
+        Assert.assertEquals("dummy", value.getMetadata().get("indicator"));
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/extractor/csv/CSVExtractorTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/extractor/csv/CSVExtractorTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/extractor/csv/CSVExtractorTest.java
new file mode 100644
index 0000000..6cd82c7
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/extractor/csv/CSVExtractorTest.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.extractor.csv;
+
+import com.google.common.collect.Iterables;
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.metron.dataloads.extractor.ExtractorHandler;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+
+public class CSVExtractorTest {
+
+  /**
+   {
+     "config" : {
+        "columns" : {
+            "host" : 0
+           ,"meta" : 2
+                    }
+       ,"indicator_column" : "host"
+       ,"type" : "threat"
+       ,"separator" : ","
+               }
+     ,"extractor" : "CSV"
+   }
+   */
+  @Multiline
+  static String testCSVConfig;
+
+  @Test
+  public void testCSVExtractor() throws Exception {
+
+    ExtractorHandler handler = ExtractorHandler.load(testCSVConfig);
+    validate(handler);
+  }
+
+  public void validate(ExtractorHandler handler) throws IOException {
+    {
+      LookupKV results = Iterables.getFirst(handler.getExtractor().extract("google.com,1.0,foo"), null);
+      EnrichmentKey key = (EnrichmentKey) results.getKey();
+      EnrichmentValue value = (EnrichmentValue) results.getValue();
+      Assert.assertEquals("google.com", key.indicator);
+      Assert.assertEquals("threat", key.type);
+      Assert.assertEquals("google.com", value.getMetadata().get("host"));
+      Assert.assertEquals("foo", value.getMetadata().get("meta"));
+      Assert.assertEquals(2, value.getMetadata().size());
+    }
+    {
+      Iterable<LookupKV> results = handler.getExtractor().extract("#google.com,1.0,foo");
+      Assert.assertEquals(0, Iterables.size(results));
+    }
+  }
+}


[26/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/PersistentAccessTracker.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/PersistentAccessTracker.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/PersistentAccessTracker.java
new file mode 100644
index 0000000..13ecfd5
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/PersistentAccessTracker.java
@@ -0,0 +1,209 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.lookup.accesstracker;
+
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.log4j.Logger;
+import org.apache.metron.enrichment.lookup.LookupKey;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.Map;
+import java.util.Timer;
+import java.util.TimerTask;
+
+public class PersistentAccessTracker implements AccessTracker {
+    private static final Logger LOG = Logger.getLogger(PersistentAccessTracker.class);
+    private static final long serialVersionUID = 1L;
+
+    public static class AccessTrackerKey {
+        String name;
+        String containerName;
+        long timestamp;
+        public AccessTrackerKey(String name, String containerName, long timestamp) {
+            this.name = name;
+            this.containerName = containerName;
+            this.timestamp = timestamp;
+        }
+
+        public byte[] toRowKey() {
+            ByteArrayOutputStream os = new ByteArrayOutputStream();
+            DataOutputStream dos = new DataOutputStream(os);
+            try {
+                dos.writeUTF(name);
+                dos.writeLong(timestamp);
+                dos.writeUTF(containerName);
+                dos.flush();
+            } catch (IOException e) {
+                throw new RuntimeException("Unable to write rowkey: " + this, e);
+            }
+
+            return os.toByteArray();
+        }
+
+        public static byte[] getTimestampScanKey(String name, long timestamp) {
+            ByteArrayOutputStream os = new ByteArrayOutputStream();
+            DataOutputStream dos = new DataOutputStream(os);
+            try {
+                dos.writeUTF(name);
+                dos.writeLong(timestamp);
+            } catch (IOException e) {
+                throw new RuntimeException("Unable to create scan key " , e);
+            }
+
+            return os.toByteArray();
+        }
+
+        public static AccessTrackerKey fromRowKey(byte[] rowKey) {
+            ByteArrayInputStream is = new ByteArrayInputStream(rowKey);
+            DataInputStream dis = new DataInputStream(is);
+            try {
+                String name = dis.readUTF();
+                long timestamp = dis.readLong();
+                String containerName = dis.readUTF();
+                return new AccessTrackerKey(name, containerName, timestamp);
+            } catch (IOException e) {
+                throw new RuntimeException("Unable to read rowkey: ", e);
+            }
+        }
+    }
+
+    private static class Persister extends TimerTask {
+        PersistentAccessTracker tracker;
+        public Persister(PersistentAccessTracker tracker) {
+            this.tracker = tracker;
+        }
+        /**
+         * The action to be performed by this timer task.
+         */
+        @Override
+        public void run() {
+            tracker.persist(false);
+        }
+    }
+
+    Object sync = new Object();
+    HTableInterface accessTrackerTable;
+    String accessTrackerColumnFamily;
+    AccessTracker underlyingTracker;
+    long timestamp = System.currentTimeMillis();
+    String name;
+    String containerName;
+    private Timer timer;
+    long maxMillisecondsBetweenPersists;
+
+    public PersistentAccessTracker( String name
+                                  , String containerName
+                                  , HTableInterface accessTrackerTable
+                                  , String columnFamily
+                                  , AccessTracker underlyingTracker
+                                  , long maxMillisecondsBetweenPersists
+                                  )
+    {
+        this.containerName = containerName;
+        this.accessTrackerTable = accessTrackerTable;
+        this.name = name;
+        this.accessTrackerColumnFamily = columnFamily;
+        this.underlyingTracker = underlyingTracker;
+        this.maxMillisecondsBetweenPersists = maxMillisecondsBetweenPersists;
+        timer = new Timer();
+        if(maxMillisecondsBetweenPersists > 0) {
+            timer.scheduleAtFixedRate(new Persister(this), maxMillisecondsBetweenPersists, maxMillisecondsBetweenPersists);
+        }
+    }
+
+    public void persist(boolean force) {
+        synchronized(sync) {
+            if(force || (System.currentTimeMillis() - timestamp) >= maxMillisecondsBetweenPersists) {
+                //persist
+                try {
+                    AccessTrackerUtil.INSTANCE.persistTracker(accessTrackerTable, accessTrackerColumnFamily, new AccessTrackerKey(name, containerName, timestamp), underlyingTracker);
+                    timestamp = System.currentTimeMillis();
+                    reset();
+                } catch (IOException e) {
+                    LOG.error("Unable to persist access tracker.", e);
+                }
+            }
+        }
+    }
+
+    @Override
+    public void logAccess(LookupKey key) {
+        synchronized (sync) {
+            underlyingTracker.logAccess(key);
+            if (isFull()) {
+                persist(true);
+            }
+        }
+    }
+
+    @Override
+    public void configure(Map<String, Object> config) {
+        underlyingTracker.configure(config);
+    }
+
+    @Override
+    public boolean hasSeen(LookupKey key) {
+        synchronized(sync) {
+            return underlyingTracker.hasSeen(key);
+        }
+    }
+
+    @Override
+    public String getName() {
+        return underlyingTracker.getName();
+    }
+
+    @Override
+    public AccessTracker union(AccessTracker tracker) {
+        PersistentAccessTracker t1 = (PersistentAccessTracker)tracker;
+        underlyingTracker = underlyingTracker.union(t1.underlyingTracker);
+        return this;
+    }
+
+    @Override
+    public void reset() {
+        synchronized(sync) {
+            underlyingTracker.reset();
+        }
+    }
+
+    @Override
+    public boolean isFull() {
+        synchronized (sync) {
+            return underlyingTracker.isFull();
+        }
+    }
+
+    @Override
+    public void cleanup() throws IOException {
+        synchronized(sync) {
+            try {
+                persist(true);
+            }
+            catch(Throwable t) {
+                LOG.error("Unable to persist underlying tracker", t);
+            }
+            underlyingTracker.cleanup();
+            accessTrackerTable.close();
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/handler/Handler.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/handler/Handler.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/handler/Handler.java
new file mode 100644
index 0000000..4ba4de4
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/handler/Handler.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.lookup.handler;
+
+import org.apache.metron.enrichment.lookup.LookupKey;
+
+import java.io.IOException;
+
+public interface Handler<CONTEXT_T, KEY_T extends LookupKey, RESULT_T> extends AutoCloseable{
+  boolean exists(KEY_T key, CONTEXT_T context, boolean logAccess) throws IOException;
+  RESULT_T get(KEY_T key, CONTEXT_T context, boolean logAccess) throws IOException;
+  Iterable<Boolean> exists(Iterable<KEY_T> key, CONTEXT_T context, boolean logAccess) throws IOException;
+  Iterable<RESULT_T> get(Iterable<KEY_T> key, CONTEXT_T context, boolean logAccess) throws IOException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/tldextractor/BasicTldExtractor.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/tldextractor/BasicTldExtractor.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/tldextractor/BasicTldExtractor.java
new file mode 100644
index 0000000..016870f
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/tldextractor/BasicTldExtractor.java
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.tldextractor;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class BasicTldExtractor implements Serializable {
+	private static final long serialVersionUID = -7440226111118873815L;
+	private StringBuilder sb = new StringBuilder();
+
+    private Pattern pattern;
+    
+    /**
+    * The inputFile.
+    */
+   private String inputFile ="effective_tld_names.dat";
+   
+   public BasicTldExtractor(String filePath) {
+       this.inputFile=filePath;
+       this.init();
+   }
+   
+	public BasicTldExtractor() {
+      this.init();
+	}
+
+	private void init(){
+	       try {
+	            ArrayList<String> terms = new ArrayList<String>();
+
+	            
+	            BufferedReader br = new BufferedReader(new InputStreamReader(
+	                    getClass().getClassLoader().getResourceAsStream(inputFile)));
+	            String s = null;
+	            while ((s = br.readLine()) != null) {
+	                s = s.trim();
+	                if (s.length() == 0 || s.startsWith("//") || s.startsWith("!"))
+	                    continue;
+	                terms.add(s);
+	            }
+	            Collections.sort(terms, new StringLengthComparator());
+	            for (String t : terms)
+	                add(t);
+	            compile();
+	            br.close();
+	        } catch (IOException e) {
+	            throw new IllegalStateException(e);
+	        }
+	}
+	protected void add(String s) {
+		s = s.replace(".", "\\.");
+		s = "\\." + s;
+		if (s.startsWith("*")) {
+			s = s.replace("*", ".+");
+			sb.append(s).append("|");
+		} else {
+			sb.append(s).append("|");
+		}
+	}
+
+	public void compile() {
+		if (sb.length() > 0)
+			sb.deleteCharAt(sb.length() - 1);
+		sb.insert(0, "[^.]+?(");
+		sb.append(")$");
+		pattern = Pattern.compile(sb.toString());
+		sb = null;
+	}
+
+	public String extract2LD(String host) {
+		Matcher m = pattern.matcher(host);
+		if (m.find()) {
+			return m.group(0);
+		}
+		return null;
+	}
+
+	public String extractTLD(String host) {
+		Matcher m = pattern.matcher(host);
+		if (m.find()) {
+			return m.group(1);
+		}
+		return null;
+	}
+
+	public static class StringLengthComparator implements Comparator<String> {
+		public int compare(String s1, String s2) {
+			if (s1.length() > s2.length())
+				return -1;
+			if (s1.length() < s2.length())
+				return 1;
+			return 0;
+		}
+	}
+    /**
+     * Returns the sb.
+     * @return the sb.
+     */
+    
+    public StringBuilder getSb() {
+        return sb;
+    }
+
+    /**
+     * Sets the sb.
+     * @param sb the sb.
+     */
+    
+    public void setSb(StringBuilder sb) {
+    
+        this.sb = sb;
+    }
+    /**
+     * Returns the inputFile.
+     * @return the inputFile.
+     */
+    
+    public String getInputFile() {
+        return inputFile;
+    }
+
+    /**
+     * Sets the inputFile.
+     * @param inputFile the inputFile.
+     */
+    
+    public void setInputFile(String inputFile) {
+    
+        this.inputFile = inputFile;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/utils/EnrichmentUtils.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/utils/EnrichmentUtils.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/utils/EnrichmentUtils.java
new file mode 100644
index 0000000..655188c
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/utils/EnrichmentUtils.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.utils;
+
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.base.Splitter;
+import com.google.common.collect.Iterables;
+import org.apache.metron.hbase.TableProvider;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+
+import javax.annotation.Nullable;
+import java.lang.reflect.InvocationTargetException;
+
+public class EnrichmentUtils {
+
+  public static final String KEY_PREFIX = "enrichments";
+
+  public static String getEnrichmentKey(String enrichmentName, String field) {
+    return Joiner.on(".").join(new String[]{KEY_PREFIX, enrichmentName, field});
+  }
+
+  public static class TypeToKey implements Function<String, EnrichmentKey> {
+    private final String indicator;
+
+    public TypeToKey(String indicator) {
+      this.indicator = indicator;
+
+    }
+    @Nullable
+    @Override
+    public EnrichmentKey apply(@Nullable String enrichmentType) {
+      return new EnrichmentKey(enrichmentType, indicator);
+    }
+  }
+  public static String toTopLevelField(String field) {
+    if(field == null) {
+      return null;
+    }
+    return Iterables.getLast(Splitter.on('.').split(field));
+  }
+
+  public static TableProvider getTableProvider(String connectorImpl, TableProvider defaultImpl) {
+    if(connectorImpl == null || connectorImpl.length() == 0 || connectorImpl.charAt(0) == '$') {
+      return defaultImpl;
+    }
+    else {
+      try {
+        Class<? extends TableProvider> clazz = (Class<? extends TableProvider>) Class.forName(connectorImpl);
+        return clazz.getConstructor().newInstance();
+      } catch (InstantiationException e) {
+        throw new IllegalStateException("Unable to instantiate connector.", e);
+      } catch (IllegalAccessException e) {
+        throw new IllegalStateException("Unable to instantiate connector: illegal access", e);
+      } catch (InvocationTargetException e) {
+        throw new IllegalStateException("Unable to instantiate connector", e);
+      } catch (NoSuchMethodException e) {
+        throw new IllegalStateException("Unable to instantiate connector: no such method", e);
+      } catch (ClassNotFoundException e) {
+        throw new IllegalStateException("Unable to instantiate connector: class not found", e);
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/utils/ThreatIntelUtils.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/utils/ThreatIntelUtils.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/utils/ThreatIntelUtils.java
new file mode 100644
index 0000000..7898ccd
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/utils/ThreatIntelUtils.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.utils;
+
+import com.google.common.base.Joiner;
+
+public class ThreatIntelUtils {
+
+  public static final String KEY_PREFIX = "threatintels";
+
+  public static String getThreatIntelKey(String threatIntelName, String field) {
+    return Joiner.on(".").join(new String[]{KEY_PREFIX, threatIntelName, field});
+  }
+
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/writer/hdfs/HdfsWriter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/writer/hdfs/HdfsWriter.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/writer/hdfs/HdfsWriter.java
new file mode 100644
index 0000000..a364419
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/writer/hdfs/HdfsWriter.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.writer.hdfs;
+
+import backtype.storm.tuple.Tuple;
+import org.apache.metron.common.configuration.Configurations;
+import org.apache.metron.common.interfaces.BulkMessageWriter;
+import org.apache.storm.hdfs.bolt.format.FileNameFormat;
+import org.apache.storm.hdfs.bolt.rotation.FileRotationPolicy;
+import org.apache.storm.hdfs.bolt.rotation.NoRotationPolicy;
+import org.apache.storm.hdfs.bolt.sync.CountSyncPolicy;
+import org.apache.storm.hdfs.bolt.sync.SyncPolicy;
+import org.apache.storm.hdfs.common.rotation.RotationAction;
+import org.json.simple.JSONObject;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class HdfsWriter implements BulkMessageWriter<JSONObject>, Serializable {
+  List<RotationAction> rotationActions = new ArrayList<>();
+  FileRotationPolicy rotationPolicy = new NoRotationPolicy();
+  SyncPolicy syncPolicy = new CountSyncPolicy(1); //sync every time, duh.
+  FileNameFormat fileNameFormat;
+  Map<String, SourceHandler> sourceHandlerMap = new HashMap<>();
+  transient Map stormConfig;
+  public HdfsWriter withFileNameFormat(FileNameFormat fileNameFormat){
+    this.fileNameFormat = fileNameFormat;
+    return this;
+  }
+
+  public HdfsWriter withSyncPolicy(SyncPolicy syncPolicy){
+    this.syncPolicy = syncPolicy;
+    return this;
+  }
+  public HdfsWriter withRotationPolicy(FileRotationPolicy rotationPolicy){
+    this.rotationPolicy = rotationPolicy;
+    return this;
+  }
+
+  public HdfsWriter addRotationAction(RotationAction action){
+    this.rotationActions.add(action);
+    return this;
+  }
+
+  @Override
+  public void init(Map stormConfig, Configurations configurations) {
+    this.stormConfig = stormConfig;
+  }
+
+  @Override
+  public void write( String sourceType
+                   , Configurations configurations
+                   , List<Tuple> tuples
+                   , List<JSONObject> messages
+                   ) throws Exception
+  {
+    SourceHandler handler = getSourceHandler(sourceType);
+    handler.handle(messages);
+  }
+
+  @Override
+  public void close() {
+    for(SourceHandler handler : sourceHandlerMap.values()) {
+      handler.close();
+    }
+  }
+  private synchronized SourceHandler getSourceHandler(String sourceType) throws IOException {
+    SourceHandler ret = sourceHandlerMap.get(sourceType);
+    if(ret == null) {
+      ret = new SourceHandler(rotationActions, rotationPolicy, syncPolicy, new SourceFileNameFormat(sourceType, fileNameFormat), stormConfig);
+      sourceHandlerMap.put(sourceType, ret);
+    }
+    return ret;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/writer/hdfs/SourceAwareMoveAction.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/writer/hdfs/SourceAwareMoveAction.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/writer/hdfs/SourceAwareMoveAction.java
new file mode 100644
index 0000000..1c345b4
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/writer/hdfs/SourceAwareMoveAction.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.writer.hdfs;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.log4j.Logger;
+import org.apache.storm.hdfs.common.rotation.RotationAction;
+
+import java.io.IOException;
+
+public class SourceAwareMoveAction implements RotationAction{
+  private static final Logger LOG = Logger.getLogger(SourceHandler.class);
+  private String destination;
+
+  public SourceAwareMoveAction toDestination(String destDir){
+    destination = destDir;
+    return this;
+  }
+
+  private static String getSource(Path filePath) {
+    return filePath.getParent().getName();
+  }
+
+  @Override
+  public void execute(FileSystem fileSystem, Path filePath) throws IOException {
+    Path destPath = new Path(new Path(destination, getSource(filePath)), filePath.getName());
+    LOG.info("Moving file " + filePath + " to " + destPath);
+    boolean success = fileSystem.rename(filePath, destPath);
+    return;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/writer/hdfs/SourceFileNameFormat.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/writer/hdfs/SourceFileNameFormat.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/writer/hdfs/SourceFileNameFormat.java
new file mode 100644
index 0000000..ae0242d
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/writer/hdfs/SourceFileNameFormat.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.writer.hdfs;
+
+import backtype.storm.task.TopologyContext;
+import org.apache.storm.hdfs.bolt.format.FileNameFormat;
+
+import java.util.Map;
+
+public class SourceFileNameFormat implements FileNameFormat {
+  FileNameFormat delegate;
+  String sourceType;
+  public SourceFileNameFormat(String sourceType, FileNameFormat delegate) {
+    this.delegate = delegate;
+    this.sourceType = sourceType;
+  }
+
+  @Override
+  public void prepare(Map map, TopologyContext topologyContext) {
+    this.delegate.prepare(map, topologyContext);
+  }
+
+  @Override
+  public String getName(long l, long l1) {
+    return delegate.getName(l, l1);
+  }
+
+  @Override
+  public String getPath() {
+    return delegate.getPath() + "/" + sourceType;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/writer/hdfs/SourceHandler.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/writer/hdfs/SourceHandler.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/writer/hdfs/SourceHandler.java
new file mode 100644
index 0000000..0225137
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/writer/hdfs/SourceHandler.java
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.writer.hdfs;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
+import org.apache.hadoop.hdfs.util.MD5FileUtils;
+import org.apache.hadoop.io.MD5Hash;
+import org.apache.log4j.Logger;
+import org.apache.storm.hdfs.bolt.format.FileNameFormat;
+import org.apache.storm.hdfs.bolt.rotation.FileRotationPolicy;
+import org.apache.storm.hdfs.bolt.rotation.TimedRotationPolicy;
+import org.apache.storm.hdfs.bolt.sync.SyncPolicy;
+import org.apache.storm.hdfs.common.rotation.RotationAction;
+import org.apache.storm.hdfs.common.security.HdfsSecurityUtil;
+import org.json.simple.JSONObject;
+
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.util.*;
+
+public class SourceHandler {
+  private static final Logger LOG = Logger.getLogger(SourceHandler.class);
+  List<RotationAction> rotationActions = new ArrayList<>();
+  FileRotationPolicy rotationPolicy;
+  SyncPolicy syncPolicy;
+  FileNameFormat fileNameFormat;
+  private long offset = 0;
+  private int rotation = 0;
+  private transient FSDataOutputStream out;
+  private transient Object writeLock;
+  protected transient Timer rotationTimer; // only used for TimedRotationPolicy
+  protected transient FileSystem fs;
+  protected transient Path currentFile;
+  public SourceHandler(List<RotationAction> rotationActions
+                      , FileRotationPolicy rotationPolicy
+                      , SyncPolicy syncPolicy
+                      , FileNameFormat fileNameFormat
+                      , Map config
+                      ) throws IOException {
+    this.rotationActions = rotationActions;
+    this.rotationPolicy = rotationPolicy;
+    this.syncPolicy = syncPolicy;
+    this.fileNameFormat = fileNameFormat;
+    initialize(config);
+  }
+
+  public void handle(List<JSONObject> messages) throws Exception{
+
+    for(JSONObject message : messages) {
+      byte[] bytes = (message.toJSONString() + "\n").getBytes();
+      synchronized (this.writeLock) {
+        out.write(bytes);
+        this.offset += bytes.length;
+
+        if (this.syncPolicy.mark(null, this.offset)) {
+          if (this.out instanceof HdfsDataOutputStream) {
+            ((HdfsDataOutputStream) this.out).hsync(EnumSet.of(HdfsDataOutputStream.SyncFlag.UPDATE_LENGTH));
+          } else {
+            this.out.hsync();
+          }
+          this.syncPolicy.reset();
+        }
+      }
+
+      if (this.rotationPolicy.mark(null, this.offset)) {
+        rotateOutputFile(); // synchronized
+        this.offset = 0;
+        this.rotationPolicy.reset();
+      }
+    }
+  }
+
+  private void initialize(Map config) throws IOException {
+    this.writeLock = new Object();
+    Configuration hdfsConfig = new Configuration();
+    this.fs = FileSystem.get(new Configuration());
+    HdfsSecurityUtil.login(config, hdfsConfig);
+    this.currentFile = createOutputFile();
+    if(this.rotationPolicy instanceof TimedRotationPolicy){
+      long interval = ((TimedRotationPolicy)this.rotationPolicy).getInterval();
+      this.rotationTimer = new Timer(true);
+      TimerTask task = new TimerTask() {
+        @Override
+        public void run() {
+          try {
+            rotateOutputFile();
+          } catch(IOException e){
+            LOG.warn("IOException during scheduled file rotation.", e);
+          }
+        }
+      };
+      this.rotationTimer.scheduleAtFixedRate(task, interval, interval);
+    }
+  }
+
+  protected void rotateOutputFile() throws IOException {
+    LOG.info("Rotating output file...");
+    long start = System.currentTimeMillis();
+    synchronized (this.writeLock) {
+      closeOutputFile();
+      this.rotation++;
+
+      Path newFile = createOutputFile();
+      LOG.info("Performing " +  this.rotationActions.size() + " file rotation actions." );
+      for (RotationAction action : this.rotationActions) {
+        action.execute(this.fs, this.currentFile);
+      }
+      this.currentFile = newFile;
+    }
+    long time = System.currentTimeMillis() - start;
+    LOG.info("File rotation took " + time + " ms.");
+  }
+
+  private Path createOutputFile() throws IOException {
+    Path path = new Path(this.fileNameFormat.getPath(), this.fileNameFormat.getName(this.rotation, System.currentTimeMillis()));
+    if(fs.getScheme().equals("file")) {
+      //in the situation where we're running this in a local filesystem, flushing doesn't work.
+      fs.mkdirs(path.getParent());
+      this.out = new FSDataOutputStream(new FileOutputStream(path.toString()), null);
+    }
+    else {
+      this.out = this.fs.create(path);
+    }
+    return path;
+  }
+
+  private void closeOutputFile() throws IOException {
+    this.out.close();
+  }
+
+
+  public void close() {
+    try {
+      closeOutputFile();
+    } catch (IOException e) {
+      throw new RuntimeException("Unable to close output file.", e);
+    }
+  }
+}


[39/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/ConfigurationUtil.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/ConfigurationUtil.java b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/ConfigurationUtil.java
new file mode 100644
index 0000000..e5c464f
--- /dev/null
+++ b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/ConfigurationUtil.java
@@ -0,0 +1,286 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import org.apache.commons.configuration.Configuration;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.springframework.util.Assert;
+
+import org.apache.metron.api.ConfigurationManager;
+
+
+
+/**
+ * utility class for this module which loads commons configuration to fetch
+ * properties from underlying resources to communicate with hbase.
+ * 
+ * @author Sayi
+ */
+public class ConfigurationUtil {
+
+	/** Configuration definition file name for fetching pcaps from hbase */
+	private static final String configDefFileName = "config-definition-hbase.xml";
+	
+	/** property configuration. */
+	private static Configuration propConfiguration = null;
+
+
+	/**
+	 * The Enum SizeUnit.
+	 */
+	public enum SizeUnit {
+
+		/** The kb. */
+		KB,
+		/** The mb. */
+		MB
+	};
+
+	/** The Constant DEFAULT_HCONNECTION_RETRY_LIMIT. */
+	private static final int DEFAULT_HCONNECTION_RETRY_LIMIT = 0;
+
+	/**
+	 * Loads configuration resources 
+	 * @return Configuration
+	 */
+	public static Configuration getConfiguration() {
+		if(propConfiguration == null){
+			propConfiguration =  ConfigurationManager.getConfiguration(configDefFileName);
+		}
+		return propConfiguration;
+	}
+
+	/**
+	 * Returns the configured default result size in bytes, if the user input is
+	 * null; otherwise, returns the user input after validating with the
+	 * configured max value. Throws IllegalArgumentException if : 1. input is
+	 * less than or equals to 0 OR 2. input is greater than configured
+	 * {hbase.scan.max.result.size} value
+	 * 
+	 * @param input
+	 *            the input
+	 * @return long
+	 */
+	public static long validateMaxResultSize(String input) {
+		if (input == null) {
+			return getDefaultResultSize();
+		}
+		// validate the user input
+		long value = convertToBytes(Long.parseLong(input), getResultSizeUnit());
+		Assert.isTrue(
+				isAllowableResultSize(value),
+				"'maxResponseSize' param value must be positive and less than {hbase.scan.max.result.size} value");
+		return convertToBytes(value, getResultSizeUnit());
+	}
+
+	/**
+	 * Checks if is allowable result size.
+	 * 
+	 * @param input
+	 *            the input
+	 * @return true, if is allowable result size
+	 */
+	public static boolean isAllowableResultSize(long input) {
+		if (input <= 0 || input > getMaxResultSize()) {
+			return false;
+		}
+		return true;
+	}
+
+	/**
+	 * Returns the configured default result size in bytes.
+	 * 
+	 * @return long
+	 */
+	public static long getDefaultResultSize() {
+		float value = ConfigurationUtil.getConfiguration().getFloat(
+				"hbase.scan.default.result.size");
+		return convertToBytes(value, getResultSizeUnit());
+	}
+
+	/**
+	 * Returns the configured max result size in bytes.
+	 * 
+	 * @return long
+	 */
+	public static long getMaxResultSize() {
+		float value = ConfigurationUtil.getConfiguration().getFloat(
+				"hbase.scan.max.result.size");
+		return convertToBytes(value, getResultSizeUnit());
+	}
+
+	/**
+	 * Returns the configured max row size in bytes.
+	 * 
+	 * @return long
+	 */
+	public static long getMaxRowSize() {
+		float maxRowSize = ConfigurationUtil.getConfiguration().getFloat(
+				"hbase.table.max.row.size");
+		return convertToBytes(maxRowSize, getRowSizeUnit());
+	}
+
+	/**
+	 * Gets the result size unit.
+	 * 
+	 * @return the result size unit
+	 */
+	public static SizeUnit getResultSizeUnit() {
+		return SizeUnit.valueOf(ConfigurationUtil.getConfiguration()
+				.getString("hbase.scan.result.size.unit"));
+	}
+
+	/**
+	 * Gets the row size unit.
+	 * 
+	 * @return the row size unit
+	 */
+	public static SizeUnit getRowSizeUnit() {
+		return SizeUnit.valueOf(ConfigurationUtil.getConfiguration()
+				.getString("hbase.table.row.size.unit"));
+	}
+
+	/**
+	 * Gets the connection retry limit.
+	 * 
+	 * @return the connection retry limit
+	 */
+	public static int getConnectionRetryLimit() {
+		return ConfigurationUtil.getConfiguration().getInt(
+				"hbase.hconnection.retries.number",
+				DEFAULT_HCONNECTION_RETRY_LIMIT);
+	}
+
+	/**
+	 * Checks if is default include reverse traffic.
+	 * 
+	 * @return true, if is default include reverse traffic
+	 */
+	public static boolean isDefaultIncludeReverseTraffic() {
+		return ConfigurationUtil.getConfiguration().getBoolean(
+				"pcaps.include.reverse.traffic");
+	}
+
+	/**
+	 * Gets the table name.
+	 * 
+	 * @return the table name
+	 */
+	public static byte[] getTableName() {
+		return Bytes.toBytes(ConfigurationUtil.getConfiguration().getString(
+				"hbase.table.name"));
+	}
+
+	/**
+	 * Gets the column family.
+	 * 
+	 * @return the column family
+	 */
+	public static byte[] getColumnFamily() {
+		return Bytes.toBytes(ConfigurationUtil.getConfiguration().getString(
+				"hbase.table.column.family"));
+	}
+
+	/**
+	 * Gets the column qualifier.
+	 * 
+	 * @return the column qualifier
+	 */
+	public static byte[] getColumnQualifier() {
+		return Bytes.toBytes(ConfigurationUtil.getConfiguration().getString(
+				"hbase.table.column.qualifier"));
+	}
+
+	/**
+	 * Gets the max versions.
+	 * 
+	 * @return the max versions
+	 */
+	public static int getMaxVersions() {
+		return ConfigurationUtil.getConfiguration().getInt(
+				"hbase.table.column.maxVersions");
+	}
+
+	/**
+	 * Gets the configured tokens in rowkey.
+	 * 
+	 * @return the configured tokens in rowkey
+	 */
+	public static int getConfiguredTokensInRowkey() {
+		return ConfigurationUtil.getConfiguration().getInt(
+				"hbase.table.row.key.tokens");
+	}
+
+	/**
+	 * Gets the minimum tokens in inputkey.
+	 * 
+	 * @return the minimum tokens in inputkey
+	 */
+	public static int getMinimumTokensInInputkey() {
+		return ConfigurationUtil.getConfiguration().getInt(
+				"rest.api.input.key.min.tokens");
+	}
+
+	/**
+	 * Gets the appending token digits.
+	 * 
+	 * @return the appending token digits
+	 */
+	public static int getAppendingTokenDigits() {
+		return ConfigurationUtil.getConfiguration().getInt(
+				"hbase.table.row.key.token.appending.digits");
+	}
+
+	/**
+	 * Convert to bytes.
+	 * 
+	 * @param value
+	 *            the value
+	 * @param unit
+	 *            the unit
+	 * @return the long
+	 */
+	public static long convertToBytes(float value, SizeUnit unit) {
+		if (SizeUnit.KB == unit) {
+			return (long) (value * 1024);
+		}
+		if (SizeUnit.MB == unit) {
+			return (long) (value * 1024 * 1024);
+		}
+		return (long) value;
+	}
+
+	/**
+	 * The main method.
+	 * 
+	 * @param args
+	 *            the arguments
+	 */
+	public static void main(String[] args) {
+		long r1 = getMaxRowSize();
+		System.out.println("getMaxRowSizeInBytes = " + r1);
+		long r2 = getMaxResultSize();
+		System.out.println("getMaxAllowableResultSizeInBytes = " + r2);
+
+		SizeUnit u1 = getRowSizeUnit();
+		System.out.println("getMaxRowSizeUnit = " + u1.toString());
+		SizeUnit u2 = getResultSizeUnit();
+		System.out.println("getMaxAllowableResultsSizeUnit = " + u2.toString());
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/HBaseConfigConstants.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/HBaseConfigConstants.java b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/HBaseConfigConstants.java
new file mode 100644
index 0000000..ffd81ff
--- /dev/null
+++ b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/HBaseConfigConstants.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+/**
+ * HBase configuration properties.
+ * 
+ * @author Sayi
+ */
+public class HBaseConfigConstants {
+
+  /** The Constant HBASE_ZOOKEEPER_QUORUM. */
+  public static final String HBASE_ZOOKEEPER_QUORUM = "hbase.zookeeper.quorum";
+
+  /** The Constant HBASE_ZOOKEEPER_CLIENT_PORT. */
+  public static final String HBASE_ZOOKEEPER_CLIENT_PORT = "hbase.zookeeper.clientPort";
+
+  /** The Constant HBASE_ZOOKEEPER_SESSION_TIMEOUT. */
+  public static final String HBASE_ZOOKEEPER_SESSION_TIMEOUT = "zookeeper.session.timeout";
+
+  /** The Constant HBASE_ZOOKEEPER_RECOVERY_RETRY. */
+  public static final String HBASE_ZOOKEEPER_RECOVERY_RETRY = "zookeeper.recovery.retry";
+
+  /** The Constant HBASE_CLIENT_RETRIES_NUMBER. */
+  public static final String HBASE_CLIENT_RETRIES_NUMBER = "hbase.client.retries.number";
+
+  /** The delimeter. */
+  String delimeter = "-";
+
+  /** The regex. */
+  String regex = "\\-";
+
+  /** The Constant PCAP_KEY_DELIMETER. */
+  public static final String PCAP_KEY_DELIMETER = "-";
+
+  /** The Constant START_KEY. */
+  public static final String START_KEY = "startKey";
+
+  /** The Constant END_KEY. */
+  public static final String END_KEY = "endKey";
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/HBaseConfigurationUtil.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/HBaseConfigurationUtil.java b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/HBaseConfigurationUtil.java
new file mode 100644
index 0000000..75932ab
--- /dev/null
+++ b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/HBaseConfigurationUtil.java
@@ -0,0 +1,179 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.log4j.Logger;
+import org.mortbay.log.Log;
+
+/**
+ * Utility class which creates HConnection instance when the first request is
+ * received and registers a shut down hook which closes the connection when the
+ * JVM exits. Creates new connection to the cluster only if the existing
+ * connection is closed for unknown reasons. Also creates Configuration with
+ * HBase resources using configuration properties.
+ * 
+ * @author Sayi
+ * 
+ */
+public class HBaseConfigurationUtil {
+
+  /** The Constant LOGGER. */
+  private static final Logger LOGGER = Logger
+      .getLogger(HBaseConfigurationUtil.class);
+
+  /** Configuration which holds all HBase properties. */
+  private static Configuration config;
+
+  /**
+   * A cluster connection which knows how to find master node and locate regions
+   * on the cluster.
+   */
+  private static HConnection clusterConnection = null;
+
+  /**
+   * Creates HConnection instance when the first request is received and returns
+   * the same instance for all subsequent requests if the connection is still
+   * open.
+   * 
+   * @return HConnection instance
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public static HConnection getConnection() throws IOException {
+    if (!connectionAvailable()) {
+      synchronized (HBaseConfigurationUtil.class) {
+        createClusterConncetion();
+      }
+    }
+    return clusterConnection;
+  }
+
+  /**
+   * Creates the cluster conncetion.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private static void createClusterConncetion() throws IOException {
+    try {
+      if (connectionAvailable()) {
+        return;
+      }
+      clusterConnection = HConnectionManager.createConnection(HBaseConfiguration.create());
+      addShutdownHook();
+      System.out.println("Created HConnection and added shutDownHook");
+    } catch (IOException e) {
+      LOGGER
+          .error(
+              "Exception occurred while creating HConnection using HConnectionManager",
+              e);
+      throw e;
+    }
+  }
+
+  /**
+   * Connection available.
+   * 
+   * @return true, if successful
+   */
+  private static boolean connectionAvailable() {
+    if (clusterConnection == null) {
+      System.out.println("clusterConnection=" + clusterConnection);
+      return false;
+    }
+    System.out.println("clusterConnection.isClosed()="
+        + clusterConnection.isClosed());
+    return clusterConnection != null && !clusterConnection.isClosed();
+  }
+
+  /**
+   * Adds the shutdown hook.
+   */
+  private static void addShutdownHook() {
+    Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
+      public void run() {
+        System.out
+            .println("Executing ShutdownHook HBaseConfigurationUtil : Closing HConnection");
+        try {
+          clusterConnection.close();
+        } catch (IOException e) {
+          Log.debug("Caught ignorable exception ", e);
+        }
+      }
+    }, "HBaseConfigurationUtilShutDown"));
+  }
+
+  /**
+   * Closes the underlying connection to cluster; ignores if any exception is
+   * thrown.
+   */
+  public static void closeConnection() {
+    if (clusterConnection != null) {
+      try {
+        clusterConnection.close();
+      } catch (IOException e) {
+        Log.debug("Caught ignorable exception ", e);
+      }
+    }
+  }
+
+  /**
+   * This method creates Configuration with HBase resources using configuration
+   * properties. The same Configuration object will be used to communicate with
+   * all HBase tables;
+   * 
+   * @return Configuration object
+   */
+  public static Configuration read() {
+    if (config == null) {
+      synchronized (HBaseConfigurationUtil.class) {
+        if (config == null) {
+          config = HBaseConfiguration.create();
+
+          config.set(
+              HBaseConfigConstants.HBASE_ZOOKEEPER_QUORUM,
+              ConfigurationUtil.getConfiguration().getString(
+                  "hbase.zookeeper.quorum"));
+          config.set(
+              HBaseConfigConstants.HBASE_ZOOKEEPER_CLIENT_PORT,
+              ConfigurationUtil.getConfiguration().getString(
+                  "hbase.zookeeper.clientPort"));
+          config.set(
+              HBaseConfigConstants.HBASE_CLIENT_RETRIES_NUMBER,
+              ConfigurationUtil.getConfiguration().getString(
+                  "hbase.client.retries.number"));
+          config.set(
+              HBaseConfigConstants.HBASE_ZOOKEEPER_SESSION_TIMEOUT,
+              ConfigurationUtil.getConfiguration().getString(
+                  "zookeeper.session.timeout"));
+          config.set(
+              HBaseConfigConstants.HBASE_ZOOKEEPER_RECOVERY_RETRY,
+              ConfigurationUtil.getConfiguration().getString(
+                  "zookeeper.recovery.retry"));
+        }
+      }
+    }
+    return config;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/IPcapGetter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/IPcapGetter.java b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/IPcapGetter.java
new file mode 100644
index 0000000..6176707
--- /dev/null
+++ b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/IPcapGetter.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * interface to all 'keys' based pcaps fetching methods.
+ * 
+ * @author Sayi
+ */
+public interface IPcapGetter {
+
+  /**
+   * Gets the pcaps for the input list of keys and lastRowKey.
+   * 
+   * @param keys
+   *          the list of keys for which pcaps are to be retrieved
+   * @param lastRowKey
+   *          last row key from the previous partial response
+   * @param startTime
+   *          the start time in system milliseconds to be used to filter the
+   *          pcaps. The value is set to '0' if the caller sends negative value
+   * @param endTime
+   *          the end time in system milliseconds to be used to filter the
+   *          pcaps. The value is set to Long.MAX_VALUE if the caller sends
+   *          negative value. 'endTime' must be greater than the 'startTime'.
+   * @param includeReverseTraffic
+   *          indicates whether or not to include pcaps from the reverse traffic
+   * @param includeDuplicateLastRow
+   *          indicates whether or not to include the last row from the previous
+   *          partial response
+   * @param maxResultSize
+   *          the max result size
+   * @return PcapsResponse with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public PcapsResponse getPcaps(List<String> keys, String lastRowKey,
+      long startTime, long endTime, boolean includeReverseTraffic,
+      boolean includeDuplicateLastRow, long maxResultSize) throws IOException;
+
+  /**
+   * Gets the pcaps for the input key.
+   * 
+   * @param key
+   *          the key for which pcaps is to be retrieved.
+   * @param startTime
+   *          the start time in system milliseconds to be used to filter the
+   *          pcaps. The value is set to '0' if the caller sends negative value
+   * @param endTime
+   *          the end time in system milliseconds to be used to filter the
+   *          pcaps.The value is set to Long.MAX_VALUE if the caller sends
+   *          negative value. 'endTime' must be greater than the 'startTime'.
+   * @param includeReverseTraffic
+   *          indicates whether or not to include pcaps from the reverse traffic
+   * @return PcapsResponse with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public PcapsResponse getPcaps(String key, long startTime, long endTime,
+      boolean includeReverseTraffic) throws IOException;
+
+  /**
+   * Gets the pcaps for the input list of keys.
+   * 
+   * @param keys
+   *          the list of keys for which pcaps are to be retrieved.
+   * @return PcapsResponse with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public PcapsResponse getPcaps(List<String> keys) throws IOException;
+
+  /**
+   * Gets the pcaps for the input key.
+   * 
+   * @param key
+   *          the key for which pcaps is to be retrieved.
+   * @return PcapsResponse with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public PcapsResponse getPcaps(String key) throws IOException;
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/IPcapScanner.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/IPcapScanner.java b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/IPcapScanner.java
new file mode 100644
index 0000000..4101328
--- /dev/null
+++ b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/IPcapScanner.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import java.io.IOException;
+
+/**
+ * The Interface for all pcaps fetching methods based on key range.
+ */
+public interface IPcapScanner {
+
+  /**
+   * Gets the pcaps for between startKey (inclusive) and endKey (exclusive).
+   * 
+   * @param startKey
+   *          the start key of a key range for which pcaps is to be retrieved.
+   * @param endKey
+   *          the end key of a key range for which pcaps is to be retrieved.
+   * @param maxResponseSize
+   *          indicates the maximum response size in MegaBytes(MB). User needs
+   *          to pass positive value and must be less than 60 (MB)
+   * @param startTime
+   *          the start time in system milliseconds to be used to filter the
+   *          pcaps. The value is set to '0' if the caller sends negative value
+   * @param endTime
+   *          the end time in system milliseconds to be used to filter the
+   *          pcaps. The value is set Long.MAX_VALUE if the caller sends
+   *          negative value
+   * @return byte array with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public byte[] getPcaps(String startKey, String endKey, long maxResponseSize,
+      long startTime, long endTime) throws IOException;
+
+  /**
+   * Gets the pcaps for between startKey (inclusive) and endKey (exclusive).
+   * 
+   * @param startKey
+   *          the start key (inclusive) of a key range for which pcaps is to be
+   *          retrieved.
+   * @param endKey
+   *          the end key (exclusive) of a key range for which pcaps is to be
+   *          retrieved.
+   * @return byte array with all matching pcaps merged together
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public byte[] getPcaps(String startKey, String endKey) throws IOException;
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapGetterHBaseImpl.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapGetterHBaseImpl.java b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapGetterHBaseImpl.java
new file mode 100644
index 0000000..58fecb9
--- /dev/null
+++ b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapGetterHBaseImpl.java
@@ -0,0 +1,826 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import javax.annotation.Resource;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Response;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.ZooKeeperConnectionException;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.NoServerForRegionException;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.log4j.Logger;
+import org.springframework.util.Assert;
+import org.springframework.util.CollectionUtils;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * Singleton class which integrates with HBase table and returns pcaps sorted by
+ * timestamp(dsc) for the given list of keys. Creates HConnection if it is not
+ * already created and the same connection instance is being used for all
+ * requests
+ * 
+ * @author sheetal
+ * @version $Revision: 1.0 $
+ */
+
+@Path("/")
+public class PcapGetterHBaseImpl implements IPcapGetter {
+
+  /** The pcap getter h base. */
+  private static IPcapGetter pcapGetterHBase = null;
+
+  /** The Constant LOG. */
+  private static final Logger LOGGER = Logger
+      .getLogger(PcapGetterHBaseImpl.class);
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapGetter#getPcaps(java.util.List,
+   * java.lang.String, long, long, boolean, boolean, long)
+   */
+ 
+  
+	@GET
+	@Path("pcap/test")
+	@Produces("text/html")
+	public Response  index() throws URISyntaxException { 
+		return Response.ok("ALL GOOD").build();   
+	}
+	
+	
+  public PcapsResponse getPcaps(List<String> keys, String lastRowKey,
+      long startTime, long endTime, boolean includeReverseTraffic,
+      boolean includeDuplicateLastRow, long maxResultSize) throws IOException {
+    Assert
+        .isTrue(
+            checkIfValidInput(keys, lastRowKey),
+            "No valid input. One of the value must be present from {keys, lastRowKey}");
+    LOGGER.info(" keys=" + keys.toString() + ";  lastRowKey="
+        + lastRowKey);
+
+    PcapsResponse pcapsResponse = new PcapsResponse();
+    // 1. Process partial response key
+    if (StringUtils.isNotEmpty(lastRowKey)) {
+      pcapsResponse = processKey(pcapsResponse, lastRowKey, startTime,
+          endTime, true, includeDuplicateLastRow, maxResultSize);
+      // LOGGER.debug("after scanning lastRowKey=" +
+      // pcapsResponse.toString()+"*********************************************************************");
+      if (pcapsResponse.getStatus() == PcapsResponse.Status.PARTIAL) {
+        return pcapsResponse;
+      }
+    }
+    // 2. Process input keys
+    List<String> sortedKeys = sortKeysByAscOrder(keys, includeReverseTraffic);
+    List<String> unprocessedKeys = new ArrayList<String>();
+    unprocessedKeys.addAll(sortedKeys);
+    if (StringUtils.isNotEmpty(lastRowKey)) {
+      unprocessedKeys.clear();
+      unprocessedKeys = getUnprocessedSublistOfKeys(sortedKeys,
+          lastRowKey);
+    }
+    LOGGER.info("unprocessedKeys in getPcaps" + unprocessedKeys.toString());
+    if (!CollectionUtils.isEmpty(unprocessedKeys)) {
+      for (int i = 0; i < unprocessedKeys.size(); i++) {
+        pcapsResponse = processKey(pcapsResponse, unprocessedKeys.get(i),
+            startTime, endTime, false, includeDuplicateLastRow, maxResultSize);
+        // LOGGER.debug("after scanning input unprocessedKeys.get(" + i + ") ="
+        // +
+        // pcapsResponse.toString()+"*********************************************************************");
+        if (pcapsResponse.getStatus() == PcapsResponse.Status.PARTIAL) {
+          return pcapsResponse;
+        }
+      }
+    }
+    return pcapsResponse;
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapGetter#getPcaps(java.lang.String, long,
+   * long, boolean)
+   */
+ 
+  public PcapsResponse getPcaps(String key, long startTime, long endTime,
+      boolean includeReverseTraffic) throws IOException {
+    Assert.hasText(key, "key must not be null or empty");
+    return getPcaps(Arrays.asList(key), null, startTime, endTime,
+        includeReverseTraffic, false, ConfigurationUtil.getDefaultResultSize());
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapGetter#getPcaps(java.util.List)
+   */
+ 
+  public PcapsResponse getPcaps(List<String> keys) throws IOException {
+    Assert.notEmpty(keys, "'keys' must not be null or empty");
+    return getPcaps(keys, null, -1, -1,
+        ConfigurationUtil.isDefaultIncludeReverseTraffic(), false,
+        ConfigurationUtil.getDefaultResultSize());
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapGetter#getPcaps(java.lang.String)
+   */
+ 
+  public PcapsResponse getPcaps(String key) throws IOException {
+    Assert.hasText(key, "key must not be null or empty");
+    return getPcaps(Arrays.asList(key), null, -1, -1,
+        ConfigurationUtil.isDefaultIncludeReverseTraffic(), false,
+        ConfigurationUtil.getDefaultResultSize());
+  }
+
+  /**
+   * Always returns the singleton instance.
+   * 
+   * @return IPcapGetter singleton instance
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public static IPcapGetter getInstance() throws IOException {
+    if (pcapGetterHBase == null) {
+      synchronized (PcapGetterHBaseImpl.class) {
+        if (pcapGetterHBase == null) {
+          pcapGetterHBase = new PcapGetterHBaseImpl();
+        }
+      }
+    }
+    return pcapGetterHBase;
+  }
+
+  /**
+   * Instantiates a new pcap getter h base impl.
+   */
+  private PcapGetterHBaseImpl() {
+  }
+
+  /**
+   * Adds reverse keys to the list if the flag 'includeReverseTraffic' is set to
+   * true; removes duplicates and sorts the list by ascending order;.
+   * 
+   * @param keys
+   *          input keys
+   * @param includeReverseTraffic
+   *          flag whether or not to include reverse traffic
+   * @return List<String>
+   */
+  @VisibleForTesting
+  List<String> sortKeysByAscOrder(List<String> keys,
+      boolean includeReverseTraffic) {
+    Assert.notEmpty(keys, "'keys' must not be null");
+    if (includeReverseTraffic) {
+      keys.addAll(PcapHelper.reverseKey(keys));
+    }
+    List<String> deDupKeys = removeDuplicateKeys(keys);
+    Collections.sort(deDupKeys);
+    return deDupKeys;
+  }
+
+  /**
+   * Removes the duplicate keys.
+   * 
+   * @param keys
+   *          the keys
+   * @return the list
+   */
+  @VisibleForTesting
+public
+  List<String> removeDuplicateKeys(List<String> keys) {
+    Set<String> set = new HashSet<String>(keys);
+    return new ArrayList<String>(set);
+  }
+
+  /**
+   * <p>
+   * Returns the sublist starting from the element after the lastRowKey
+   * to the last element in the list; if the 'lastRowKey' is not matched
+   * the complete list will be returned.
+   * </p>
+   * 
+   * <pre>
+   * Eg :
+   *  keys = [18800006-1800000b-06-0019-caac, 18800006-1800000b-06-0050-5af6, 18800006-1800000b-11-0035-3810]
+   *  lastRowKey = "18800006-1800000b-06-0019-caac-65140-40815"
+   *  and the response from this method [18800006-1800000b-06-0050-5af6, 18800006-1800000b-11-0035-3810]
+   * </pre>
+   * 
+   * @param keys
+   *          keys
+   * @param lastRowKey
+   *          last row key of the previous partial response
+   * @return List<String>
+   */
+  @VisibleForTesting
+  List<String> getUnprocessedSublistOfKeys(List<String> keys,
+      String lastRowKey) {
+    Assert.notEmpty(keys, "'keys' must not be null");
+    Assert.hasText(lastRowKey, "'lastRowKey' must not be null");
+    String partialKey = getTokens(lastRowKey, 5);
+    int startIndex = 0;
+    for (int i = 0; i < keys.size(); i++) {
+      if (partialKey.equals(keys.get(i))) {
+        startIndex = i + 1;
+        break;
+      }
+    }
+    List<String> unprocessedKeys = keys.subList(startIndex, keys.size());
+    return unprocessedKeys;
+  }
+
+  /**
+   * Returns the first 'noOfTokens' tokens from the given key; token delimiter
+   * "-";.
+   * 
+   * @param key
+   *          given key
+   * @param noOfTokens
+   *          number of tokens to retrieve
+   * @return the tokens
+   */
+  @VisibleForTesting
+  String getTokens(String key, int noOfTokens) {
+    String delimeter = HBaseConfigConstants.PCAP_KEY_DELIMETER;
+    String regex = "\\" + delimeter;
+    String[] keyTokens = key.split(regex);
+    Assert.isTrue(noOfTokens < keyTokens.length,
+        "Invalid value for 'noOfTokens'");
+    StringBuffer sbf = new StringBuffer();
+    for (int i = 0; i < noOfTokens; i++) {
+      sbf.append(keyTokens[i]);
+      if (i != (noOfTokens - 1)) {
+        sbf.append(HBaseConfigConstants.PCAP_KEY_DELIMETER);
+      }
+
+    }
+    return sbf.toString();
+  }
+
+  /**
+   * Process key.
+   * 
+   * @param pcapsResponse
+   *          the pcaps response
+   * @param key
+   *          the key
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @param isPartialResponse
+   *          the is partial response
+   * @param includeDuplicateLastRow
+   *          the include duplicate last row
+   * @param maxResultSize
+   *          the max result size
+   * @return the pcaps response
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @VisibleForTesting
+  PcapsResponse processKey(PcapsResponse pcapsResponse, String key,
+      long startTime, long endTime, boolean isPartialResponse,
+      boolean includeDuplicateLastRow, long maxResultSize) throws IOException {
+    HTable table = null;
+    Scan scan = null;
+    List<Cell> scannedCells = null;
+    try {
+      // 1. Create start and stop row for the key;
+      Map<String, String> keysMap = createStartAndStopRowKeys(key,
+          isPartialResponse, includeDuplicateLastRow);
+
+      // 2. if the input key contains all fragments (7) and it is not part
+      // of previous partial response (isPartialResponse),
+      // 'keysMap' will be null; do a Get; currently not doing any
+      // response size related checks for Get;
+      // by default all cells from a specific row are sorted by timestamp
+      if (keysMap == null) {
+        Get get = createGetRequest(key, startTime, endTime);
+        List<Cell> cells = executeGetRequest(table, get);
+        for (Cell cell : cells) {
+          pcapsResponse.addPcaps(CellUtil.cloneValue(cell));
+        }
+        return pcapsResponse;
+      }
+      // 3. Create and execute Scan request
+      scan = createScanRequest(pcapsResponse, keysMap, startTime, endTime,
+          maxResultSize);
+      scannedCells = executeScanRequest(table, scan);
+      LOGGER.info("scannedCells size :" + scannedCells.size());
+      addToResponse(pcapsResponse, scannedCells, maxResultSize);
+
+    } catch (IOException e) {
+      LOGGER.error("Exception occurred while fetching Pcaps for the keys :"
+          + key, e);
+      if (e instanceof ZooKeeperConnectionException
+          || e instanceof MasterNotRunningException
+          || e instanceof NoServerForRegionException) {
+        int maxRetryLimit = ConfigurationUtil.getConnectionRetryLimit();
+        System.out.println("maxRetryLimit =" + maxRetryLimit);
+        for (int attempt = 1; attempt <= maxRetryLimit; attempt++) {
+          System.out.println("attempting  =" + attempt);
+          try {
+            HBaseConfigurationUtil.closeConnection(); // closing the
+            // existing
+            // connection
+            // and retry,
+            // it will
+            // create a new
+            // HConnection
+            scannedCells = executeScanRequest(table, scan);
+            addToResponse(pcapsResponse, scannedCells, maxResultSize);
+            break;
+          } catch (IOException ie) {
+            if (attempt == maxRetryLimit) {
+              LOGGER.error("Throwing the exception after retrying "
+                  + maxRetryLimit + " times.");
+              throw e;
+            }
+          }
+        }
+      }
+
+    } finally {
+      if (table != null) {
+        table.close();
+      }
+    }
+    return pcapsResponse;
+  }
+
+  /**
+   * Adds the to response.
+   * 
+   * @param pcapsResponse
+   *          the pcaps response
+   * @param scannedCells
+   *          the scanned cells
+   * @param maxResultSize
+   *          the max result size
+   */
+  private void addToResponse(PcapsResponse pcapsResponse,
+      List<Cell> scannedCells, long maxResultSize) {
+    String lastKeyFromCurrentScan = null;
+    if (scannedCells != null && scannedCells.size() > 0) {
+      lastKeyFromCurrentScan = new String(CellUtil.cloneRow(scannedCells
+          .get(scannedCells.size() - 1)));
+    }
+    // 4. calculate the response size
+    Collections.sort(scannedCells, PcapHelper.getCellTimestampComparator());
+    for (Cell sortedCell : scannedCells) {
+      pcapsResponse.addPcaps(CellUtil.cloneValue(sortedCell));
+    }
+    if (!pcapsResponse.isResonseSizeWithinLimit(maxResultSize)) {
+      pcapsResponse.setStatus(PcapsResponse.Status.PARTIAL); // response size
+                                                             // reached
+      pcapsResponse.setLastRowKey(new String(lastKeyFromCurrentScan));
+    }
+  }
+
+  /**
+   * Builds start and stop row keys according to the following logic : 1.
+   * Creates tokens out of 'key' using pcap_id delimiter ('-') 2. if the input
+   * 'key' contains (assume : configuredTokensInRowKey=7 and
+   * minimumTokensIninputKey=5): a). 5 tokens
+   * ("srcIp-dstIp-protocol-srcPort-dstPort") startKey =
+   * "srcIp-dstIp-protocol-srcPort-dstPort-00000-00000" stopKey =
+   * "srcIp-dstIp-protocol-srcPort-dstPort-99999-99999" b). 6 tokens
+   * ("srcIp-dstIp-protocol-srcPort-dstPort-id1") startKey =
+   * "srcIp-dstIp-protocol-srcPort-dstPort-id1-00000" stopKey =
+   * "srcIp-dstIp-protocol-srcPort-dstPort-id1-99999"
+   * 
+   * c). 7 tokens ("srcIp-dstIp-protocol-srcPort-dstPort-id1-id2") 1>. if the
+   * key is NOT part of the partial response from previous request, return
+   * 'null' 2>. if the key is part of partial response from previous request
+   * startKey = "srcIp-dstIp-protocol-srcPort-dstPort-id1-(id2+1)"; 1 is added
+   * to exclude this key as it was included in the previous request stopKey =
+   * "srcIp-dstIp-protocol-srcPort-dstPort-99999-99999"
+   * 
+   * @param key
+   *          the key
+   * @param isLastRowKey
+   *          if the key is part of partial response
+   * @param includeDuplicateLastRow
+   *          the include duplicate last row
+   * @return Map<String, String>
+   */
+  @VisibleForTesting
+  Map<String, String> createStartAndStopRowKeys(String key,
+      boolean isLastRowKey, boolean includeDuplicateLastRow) {
+    String delimeter = HBaseConfigConstants.PCAP_KEY_DELIMETER;
+    String regex = "\\" + delimeter;
+    String[] keyTokens = key.split(regex);
+
+    String startKey = null;
+    String endKey = null;
+    Map<String, String> map = new HashMap<String, String>();
+
+    int configuredTokensInRowKey = ConfigurationUtil
+        .getConfiguredTokensInRowkey();
+    int minimumTokensIninputKey = ConfigurationUtil
+        .getMinimumTokensInInputkey();
+    Assert
+        .isTrue(
+            minimumTokensIninputKey <= configuredTokensInRowKey,
+            "tokens in the input key (separated by '-'), must be less than or equal to the tokens used in hbase table row key ");
+    // in case if the input key contains 'configuredTokensInRowKey' tokens and
+    // it is NOT a
+    // partial response key, do a Get instead of Scan
+    if (keyTokens.length == configuredTokensInRowKey) {
+      if (!isLastRowKey) {
+        return null;
+      }
+      // it is a partial response key; 'startKey' is same as input partial
+      // response key; 'endKey' can be built by replacing
+      // (configuredTokensInRowKey - minimumTokensIninputKey) tokens
+      // of input partial response key with '99999'
+      if (keyTokens.length == minimumTokensIninputKey) {
+        return null;
+      }
+      int appendingTokenSlots = configuredTokensInRowKey
+          - minimumTokensIninputKey;
+      if (appendingTokenSlots > 0) {
+        String partialKey = getTokens(key, minimumTokensIninputKey);
+        StringBuffer sbfStartNew = new StringBuffer(partialKey);
+        StringBuffer sbfEndNew = new StringBuffer(partialKey);
+        for (int i = 0; i < appendingTokenSlots; i++) {
+          if (i == (appendingTokenSlots - 1)) {
+            if (!includeDuplicateLastRow) {
+              sbfStartNew
+                  .append(HBaseConfigConstants.PCAP_KEY_DELIMETER)
+                  .append(
+                      Integer.valueOf(keyTokens[minimumTokensIninputKey + i]) + 1);
+            } else {
+              sbfStartNew.append(HBaseConfigConstants.PCAP_KEY_DELIMETER)
+                  .append(keyTokens[minimumTokensIninputKey + i]);
+            }
+          } else {
+            sbfStartNew.append(HBaseConfigConstants.PCAP_KEY_DELIMETER).append(
+                keyTokens[minimumTokensIninputKey + i]);
+          }
+          sbfEndNew.append(HBaseConfigConstants.PCAP_KEY_DELIMETER).append(
+              getMaxLimitForAppendingTokens());
+        }
+        startKey = sbfStartNew.toString();
+        endKey = sbfEndNew.toString();
+      }
+    } else {
+      StringBuffer sbfStart = new StringBuffer(key);
+      StringBuffer sbfEnd = new StringBuffer(key);
+      for (int i = keyTokens.length; i < configuredTokensInRowKey; i++) {
+        sbfStart.append(HBaseConfigConstants.PCAP_KEY_DELIMETER).append(
+            getMinLimitForAppendingTokens());
+        sbfEnd.append(HBaseConfigConstants.PCAP_KEY_DELIMETER).append(
+            getMaxLimitForAppendingTokens());
+      }
+      startKey = sbfStart.toString();
+      endKey = sbfEnd.toString();
+    }
+    map.put(HBaseConfigConstants.START_KEY, startKey);
+    map.put(HBaseConfigConstants.END_KEY, endKey);
+
+    return map;
+  }
+
+  /**
+   * Returns false if keys is empty or null AND lastRowKey is null or
+   * empty; otherwise returns true;.
+   * 
+   * @param keys
+   *          input row keys
+   * @param lastRowKey
+   *          partial response key
+   * @return boolean
+   */
+  @VisibleForTesting
+  boolean checkIfValidInput(List<String> keys, String lastRowKey) {
+    if (CollectionUtils.isEmpty(keys)
+        && StringUtils.isEmpty(lastRowKey)) {
+      return false;
+    }
+    return true;
+  }
+
+  /**
+   * Executes the given Get request.
+   * 
+   * @param table
+   *          hbase table
+   * @param get
+   *          Get
+   * @return List<Cell>
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private List<Cell> executeGetRequest(HTable table, Get get)
+      throws IOException {
+    LOGGER.info("Get :" + get.toString());
+    table = (HTable) HBaseConfigurationUtil.getConnection().getTable(
+        ConfigurationUtil.getTableName());
+    Result result = table.get(get);
+    List<Cell> cells = result.getColumnCells(
+        ConfigurationUtil.getColumnFamily(),
+        ConfigurationUtil.getColumnQualifier());
+    return cells;
+  }
+
+  /**
+   * Execute scan request.
+   * 
+   * @param table
+   *          hbase table
+   * @param scan
+   *          the scan
+   * @return the list
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private List<Cell> executeScanRequest(HTable table, Scan scan)
+      throws IOException {
+    LOGGER.info("Scan :" + scan.toString());
+    table = (HTable) HBaseConfigurationUtil.getConnection().getTable(
+    		ConfigurationUtil.getConfiguration().getString("hbase.table.name"));
+    ResultScanner resultScanner = table.getScanner(scan);
+    List<Cell> scannedCells = new ArrayList<Cell>();
+    for (Result result = resultScanner.next(); result != null; result = resultScanner
+        .next()) {
+      List<Cell> cells = result.getColumnCells(
+          ConfigurationUtil.getColumnFamily(),
+          ConfigurationUtil.getColumnQualifier());
+      if (cells != null) {
+        for (Cell cell : cells) {
+          scannedCells.add(cell);
+        }
+      }
+    }
+    return scannedCells;
+  }
+
+  /**
+   * Creates the get request.
+   * 
+   * @param key
+   *          the key
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @return the gets the
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @VisibleForTesting
+  Get createGetRequest(String key, long startTime, long endTime)
+      throws IOException {
+    Get get = new Get(Bytes.toBytes(key));
+    // set family name
+    get.addFamily(ConfigurationUtil.getColumnFamily());
+
+    // set column family, qualifier
+    get.addColumn(ConfigurationUtil.getColumnFamily(),
+        ConfigurationUtil.getColumnQualifier());
+
+    // set max versions
+    get.setMaxVersions(ConfigurationUtil.getMaxVersions());
+
+    // set time range
+    setTimeRangeOnGet(get, startTime, endTime);
+    return get;
+  }
+
+  /**
+   * Creates the scan request.
+   * 
+   * @param pcapsResponse
+   *          the pcaps response
+   * @param keysMap
+   *          the keys map
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @param maxResultSize
+   *          the max result size
+   * @return the scan
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @VisibleForTesting
+  Scan createScanRequest(PcapsResponse pcapsResponse,
+      Map<String, String> keysMap, long startTime, long endTime,
+      long maxResultSize) throws IOException {
+    Scan scan = new Scan();
+    // set column family, qualifier
+    scan.addColumn(ConfigurationUtil.getColumnFamily(),
+        ConfigurationUtil.getColumnQualifier());
+
+    // set start and stop keys
+    scan.setStartRow(keysMap.get(HBaseConfigConstants.START_KEY).getBytes());
+    scan.setStopRow(keysMap.get(HBaseConfigConstants.END_KEY).getBytes());
+
+    // set max results size : remaining size = max results size - ( current
+    // pcaps response size + possible maximum row size)
+    long remainingSize = maxResultSize
+        - (pcapsResponse.getResponseSize() + ConfigurationUtil.getMaxRowSize());
+
+    if (remainingSize > 0) {
+      scan.setMaxResultSize(remainingSize);
+    }
+    // set max versions
+    scan.setMaxVersions(ConfigurationUtil.getConfiguration().getInt(
+        "hbase.table.column.maxVersions"));
+
+    // set time range
+    setTimeRangeOnScan(scan, startTime, endTime);
+    return scan;
+  }
+
+  /**
+   * Sets the time range on scan.
+   * 
+   * @param scan
+   *          the scan
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private void setTimeRangeOnScan(Scan scan, long startTime, long endTime)
+      throws IOException {
+    boolean setTimeRange = true;
+    if (startTime < 0 && endTime < 0) {
+      setTimeRange = false;
+    }
+    if (setTimeRange) {
+      if (startTime < 0) {
+        startTime = 0;
+      } else {
+        startTime = PcapHelper.convertToDataCreationTimeUnit(startTime);
+      }
+      if (endTime < 0) {
+        endTime = Long.MAX_VALUE;
+      } else {
+        endTime = PcapHelper.convertToDataCreationTimeUnit(endTime);
+      }
+      Assert.isTrue(startTime < endTime,
+          "startTime value must be less than endTime value");
+      scan.setTimeRange(startTime, endTime);
+    }
+  }
+
+  /**
+   * Sets the time range on get.
+   * 
+   * @param get
+   *          the get
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private void setTimeRangeOnGet(Get get, long startTime, long endTime)
+      throws IOException {
+    boolean setTimeRange = true;
+    if (startTime < 0 && endTime < 0) {
+      setTimeRange = false;
+    }
+    if (setTimeRange) {
+      if (startTime < 0) {
+        startTime = 0;
+      } else {
+        startTime = PcapHelper.convertToDataCreationTimeUnit(startTime);
+      }
+      if (endTime < 0) {
+        endTime = Long.MAX_VALUE;
+      } else {
+        endTime = PcapHelper.convertToDataCreationTimeUnit(endTime);
+      }
+      Assert.isTrue(startTime < endTime,
+          "startTime value must be less than endTime value");
+      get.setTimeRange(startTime, endTime);
+    }
+  }
+
+  /**
+   * Gets the min limit for appending tokens.
+   * 
+   * @return the min limit for appending tokens
+   */
+  private String getMinLimitForAppendingTokens() {
+    int digits = ConfigurationUtil.getAppendingTokenDigits();
+    StringBuffer sbf = new StringBuffer();
+    for (int i = 0; i < digits; i++) {
+      sbf.append("0");
+    }
+    return sbf.toString();
+  }
+
+  /**
+   * Gets the max limit for appending tokens.
+   * 
+   * @return the max limit for appending tokens
+   */
+  private String getMaxLimitForAppendingTokens() {
+    int digits = ConfigurationUtil.getAppendingTokenDigits();
+    StringBuffer sbf = new StringBuffer();
+    for (int i = 0; i < digits; i++) {
+      sbf.append("9");
+    }
+    return sbf.toString();
+  }
+
+  /**
+   * The main method.
+   * 
+   * @param args
+   *          the arguments
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public static void main(String[] args) throws IOException {
+    if (args == null || args.length < 2) {
+      usage();
+      return;
+    }
+    String outputFileName = null;
+    outputFileName = args[1];
+    List<String> keys = Arrays.asList(StringUtils.split(args[2], ","));
+    System.out.println("Geting keys " + keys);
+    long startTime = 0;
+    long endTime = Long.MAX_VALUE;
+    if (args.length > 3) {
+      startTime = Long.valueOf(args[3]);
+    }
+    if (args.length > 4) {
+      endTime = Long.valueOf(args[4]);
+    }
+    System.out.println("With start time " + startTime + " and end time "
+        + endTime);
+    PcapGetterHBaseImpl downloader = new PcapGetterHBaseImpl();
+    PcapsResponse pcaps = downloader.getPcaps(keys, null, startTime, endTime,
+        false, false, 6);
+    File file = new File(outputFileName);
+    FileUtils.write(file, "", false);
+    FileUtils.writeByteArrayToFile(file, pcaps.getPcaps(), true);
+  }
+
+  /**
+   * Usage.
+   */
+  private static void usage() {
+    System.out.println("java " + PcapGetterHBaseImpl.class.getName() // $codepro.audit.disable
+        // debuggingCode
+        + " <zk quorum> <output file> <start key> [stop key]");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapHelper.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapHelper.java b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapHelper.java
new file mode 100644
index 0000000..893d176
--- /dev/null
+++ b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapHelper.java
@@ -0,0 +1,222 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+import org.mortbay.log.Log;
+import org.springframework.util.Assert;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * utility class which holds methods related to time conversions, building
+ * reverse keys.
+ */
+public class PcapHelper {
+
+  /** The Constant LOGGER. */
+  private static final Logger LOGGER = Logger.getLogger(PcapHelper.class);
+
+  /** The cell timestamp comparator. */
+  private static CellTimestampComparator CELL_TIMESTAMP_COMPARATOR = new CellTimestampComparator();
+
+  /**
+   * The Enum TimeUnit.
+   */
+  public enum TimeUnit {
+
+    /** The seconds. */
+    SECONDS,
+    /** The millis. */
+    MILLIS,
+    /** The micros. */
+    MICROS,
+    /** The unknown. */
+    UNKNOWN
+  };
+
+  /**
+   * Converts the given time to the 'hbase' data creation time unit.
+   * 
+   * @param inputTime
+   *          the input time
+   * @return the long
+   */
+  public static long convertToDataCreationTimeUnit(long inputTime) {
+    if (inputTime <= 9999999999L) {
+      return convertSecondsToDataCreationTimeUnit(inputTime); // input time unit
+                                                              // is in seconds
+    } else if (inputTime <= 9999999999999L) {
+      return convertMillisToDataCreationTimeUnit(inputTime); // input time unit
+                                                             // is in millis
+    } else if (inputTime <= 9999999999999999L) {
+      return convertMicrosToDataCreationTimeUnit(inputTime); // input time unit
+                                                             // it in micros
+    }
+    return inputTime; // input time unit is unknown
+  }
+
+  /**
+   * Returns the 'hbase' data creation time unit by reading
+   * 'hbase.table.data.time.unit' property in 'hbase-config' properties file; If
+   * none is mentioned in properties file, returns <code>TimeUnit.UNKNOWN</code>
+   * 
+   * @return TimeUnit
+   */
+  @VisibleForTesting
+  public static TimeUnit getDataCreationTimeUnit() {
+    String timeUnit = ConfigurationUtil.getConfiguration().getString(
+        "hbase.table.data.time.unit");
+    LOGGER.debug("hbase.table.data.time.unit=" + timeUnit.toString());
+    if (StringUtils.isNotEmpty(timeUnit)) {
+      return TimeUnit.valueOf(timeUnit);
+    }
+    return TimeUnit.UNKNOWN;
+  }
+
+  /**
+   * Convert seconds to data creation time unit.
+   * 
+   * @param inputTime
+   *          the input time
+   * @return the long
+   */
+  @VisibleForTesting
+  public static long convertSecondsToDataCreationTimeUnit(long inputTime) {
+    System.out.println("convert Seconds To DataCreation TimeUnit");
+    TimeUnit dataCreationTimeUnit = getDataCreationTimeUnit();
+    if (TimeUnit.SECONDS == dataCreationTimeUnit) {
+      return inputTime;
+    } else if (TimeUnit.MILLIS == dataCreationTimeUnit) {
+      return inputTime * 1000;
+    } else if (TimeUnit.MICROS == dataCreationTimeUnit) {
+      return inputTime * 1000 * 1000;
+    }
+    return inputTime;
+  }
+
+  /**
+   * Builds the reverseKey to fetch the pcaps in the reverse traffic
+   * (destination to source).
+   * 
+   * @param key
+   *          indicates hbase rowKey (partial or full) in the format
+   *          "srcAddr-dstAddr-protocol-srcPort-dstPort-fragment"
+   * @return String indicates the key in the format
+   *         "dstAddr-srcAddr-protocol-dstPort-srcPort"
+   */
+  public static String reverseKey(String key) {
+    Assert.hasText(key, "key must not be null or empty");
+    String delimeter = HBaseConfigConstants.PCAP_KEY_DELIMETER;
+    String regex = "\\" + delimeter;
+    StringBuffer sb = new StringBuffer();
+    try {
+      String[] tokens = key.split(regex);
+      Assert
+          .isTrue(
+              (tokens.length == 5 || tokens.length == 6 || tokens.length == 7),
+              "key is not in the format : 'srcAddr-dstAddr-protocol-srcPort-dstPort-{ipId-fragment identifier}'");
+      sb.append(tokens[1]).append(delimeter).append(tokens[0])
+          .append(delimeter).append(tokens[2]).append(delimeter)
+          .append(tokens[4]).append(delimeter).append(tokens[3]);
+    } catch (Exception e) {
+      Log.warn("Failed to reverse the key. Reverse scan won't be performed.", e);
+    }
+    return sb.toString();
+  }
+
+  /**
+   * Builds the reverseKeys to fetch the pcaps in the reverse traffic
+   * (destination to source). If all keys in the input are not in the expected
+   * format, it returns an empty list;
+   * 
+   * @param keys
+   *          indicates list of hbase rowKeys (partial or full) in the format
+   *          "srcAddr-dstAddr-protocol-srcPort-dstPort-fragment"
+   * @return List<String> indicates the list of keys in the format
+   *         "dstAddr-srcAddr-protocol-dstPort-srcPort"
+   */
+  public static List<String> reverseKey(List<String> keys) {
+    Assert.notEmpty(keys, "'keys' must not be null or empty");
+    List<String> reverseKeys = new ArrayList<String>();
+    for (String key : keys) {
+      if (key != null) {
+        String reverseKey = reverseKey(key);
+        if (StringUtils.isNotEmpty(reverseKey)) {
+          reverseKeys.add(reverseKey);
+        }
+      }
+    }
+    return reverseKeys;
+  }
+
+  /**
+   * Returns Comparator for sorting pcaps cells based on the timestamp (dsc).
+   * 
+   * @return CellTimestampComparator
+   */
+  public static CellTimestampComparator getCellTimestampComparator() {
+    return CELL_TIMESTAMP_COMPARATOR;
+  }
+
+  /**
+   * Convert millis to data creation time unit.
+   * 
+   * @param inputTime
+   *          the input time
+   * @return the long
+   */
+  @VisibleForTesting
+  private static long convertMillisToDataCreationTimeUnit(long inputTime) {
+    System.out.println("convert Millis To DataCreation TimeUnit");
+    TimeUnit dataCreationTimeUnit = getDataCreationTimeUnit();
+    if (TimeUnit.SECONDS == dataCreationTimeUnit) {
+      return (inputTime / 1000);
+    } else if (TimeUnit.MILLIS == dataCreationTimeUnit) {
+      return inputTime;
+    } else if (TimeUnit.MICROS == dataCreationTimeUnit) {
+      return inputTime * 1000;
+    }
+    return inputTime;
+  }
+
+  /**
+   * Convert micros to data creation time unit.
+   * 
+   * @param inputTime
+   *          the input time
+   * @return the long
+   */
+  @VisibleForTesting
+  private static long convertMicrosToDataCreationTimeUnit(long inputTime) {
+    System.out.println("convert Micros To DataCreation TimeUnit");
+    TimeUnit dataCreationTimeUnit = getDataCreationTimeUnit();
+    if (TimeUnit.SECONDS == dataCreationTimeUnit) {
+      return inputTime / (1000 * 1000);
+    } else if (TimeUnit.MILLIS == dataCreationTimeUnit) {
+      return inputTime / 1000;
+    } else if (TimeUnit.MICROS == dataCreationTimeUnit) {
+      return inputTime;
+    }
+    return inputTime;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapReceiverImplRestEasy.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapReceiverImplRestEasy.java b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapReceiverImplRestEasy.java
new file mode 100644
index 0000000..ce3cec9
--- /dev/null
+++ b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapReceiverImplRestEasy.java
@@ -0,0 +1,267 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.metron.pcap.utils.PcapUtils;
+
+@Path("/")
+public class PcapReceiverImplRestEasy {
+
+	/** The Constant LOGGER. */
+	private static final Logger LOGGER = Logger
+			.getLogger(PcapReceiverImplRestEasy.class);
+
+	/** The Constant HEADER_CONTENT_DISPOSITION_NAME. */
+	private static final String HEADER_CONTENT_DISPOSITION_NAME = "Content-Disposition";
+
+	/** The Constant HEADER_CONTENT_DISPOSITION_VALUE. */
+	private static final String HEADER_CONTENT_DISPOSITION_VALUE = "attachment; filename=\"managed-threat.pcap\"";
+
+	/** partial response key header name. */
+	private static final String HEADER_PARTIAL_RESPONE_KEY = "lastRowKey";
+
+	@GET
+	@Path("pcapGetter/getPcapsByKeys")
+	public Response getPcapsByKeys(
+			@QueryParam("keys") List<String> keys,
+			@QueryParam("lastRowKey") String lastRowKey,
+			@DefaultValue("-1") @QueryParam("startTime") long startTime,
+			@DefaultValue("-1") @QueryParam("endTime") long endTime,
+			@QueryParam("includeDuplicateLastRow") boolean includeDuplicateLastRow,
+			@QueryParam("includeReverseTraffic") boolean includeReverseTraffic,
+			@QueryParam("maxResponseSize") String maxResponseSize,
+			@Context HttpServletResponse response) throws IOException {
+		PcapsResponse pcapResponse = null;
+
+		if (keys == null || keys.size() == 0)
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'keys' must not be null or empty").build();
+
+		try {
+			IPcapGetter pcapGetter = PcapGetterHBaseImpl.getInstance();
+			pcapResponse = pcapGetter.getPcaps(parseKeys(keys), lastRowKey,
+					startTime, endTime, includeReverseTraffic,
+					includeDuplicateLastRow,
+					ConfigurationUtil.validateMaxResultSize(maxResponseSize));
+			LOGGER.info("pcaps response in REST layer ="
+					+ pcapResponse.toString());
+
+			// return http status '204 No Content' if the pcaps response size is
+			// 0
+			if (pcapResponse == null || pcapResponse.getResponseSize() == 0) {
+
+				return Response.status(Response.Status.NO_CONTENT).build();
+			}
+
+			// return http status '206 Partial Content', the partial response
+			// file and
+			// 'lastRowKey' header , if the pcaps response status is 'PARTIAL'
+
+			response.setHeader(HEADER_CONTENT_DISPOSITION_NAME,
+					HEADER_CONTENT_DISPOSITION_VALUE);
+
+			if (pcapResponse.getStatus() == PcapsResponse.Status.PARTIAL) {
+
+				response.setHeader(HEADER_PARTIAL_RESPONE_KEY,
+						pcapResponse.getLastRowKey());
+
+				return Response
+						.ok(pcapResponse.getPcaps(),
+								MediaType.APPLICATION_OCTET_STREAM).status(206)
+						.build();
+
+			}
+
+		} catch (IOException e) {
+			LOGGER.error(
+					"Exception occurred while fetching Pcaps for the keys :"
+							+ keys.toString(), e);
+			throw e;
+		}
+
+		// return http status '200 OK' along with the complete pcaps response
+		// file,
+		// and headers
+		// return new ResponseEntity<byte[]>(pcapResponse.getPcaps(), headers,
+		// HttpStatus.OK);
+
+		return Response
+				.ok(pcapResponse.getPcaps(), MediaType.APPLICATION_OCTET_STREAM)
+				.status(200).build();
+
+	}
+	
+	
+	@GET
+	@Path("/pcapGetter/getPcapsByKeyRange")
+
+	  public Response getPcapsByKeyRange(
+	      @QueryParam("startKey") String startKey,
+	      @QueryParam("endKey")String endKey,
+	      @QueryParam("maxResponseSize") String maxResponseSize,
+	      @DefaultValue("-1") @QueryParam("startTime")long startTime,
+	      @DefaultValue("-1") @QueryParam("endTime") long endTime, 
+	      @Context HttpServletResponse servlet_response) throws IOException {
+
+		if (startKey == null || startKey.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'start key' must not be null or empty").build();
+		
+		if (startKey == null || startKey.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'end key' must not be null or empty").build();
+		
+		
+	    byte[] response = null;
+	    try {
+	      IPcapScanner pcapScanner = PcapScannerHBaseImpl.getInstance();
+	      response = pcapScanner.getPcaps(startKey, endKey,
+	          ConfigurationUtil.validateMaxResultSize(maxResponseSize), startTime,
+	          endTime);
+	      if (response == null || response.length == 0) {
+	    	  
+	    	  return Response.status(Response.Status.NO_CONTENT).build();
+	        
+	      }
+	      servlet_response.setHeader(HEADER_CONTENT_DISPOSITION_NAME,
+					HEADER_CONTENT_DISPOSITION_VALUE);
+
+	    } catch (IOException e) {
+	      LOGGER.error(
+	          "Exception occurred while fetching Pcaps for the key range : startKey="
+	              + startKey + ", endKey=" + endKey, e);
+	      throw e;
+	    }
+	    // return http status '200 OK' along with the complete pcaps response file,
+	    // and headers
+	    
+		return Response
+				.ok(response, MediaType.APPLICATION_OCTET_STREAM)
+				.status(200).build();
+	  }
+
+	  /*
+	   * (non-Javadoc)
+	   * 
+	   * @see
+	   * com.cisco.opensoc.hbase.client.IPcapReceiver#getPcapsByIdentifiers(java.lang
+	   * .String, java.lang.String, java.lang.String, java.lang.String,
+	   * java.lang.String, long, long, boolean,
+	   * javax.servlet.http.HttpServletResponse)
+	   */
+	  
+	@GET
+	@Path("/pcapGetter/getPcapsByIdentifiers")
+
+	  public Response getPcapsByIdentifiers(
+	      @QueryParam ("srcIp") String srcIp, 
+	      @QueryParam ("dstIp") String dstIp,
+	      @QueryParam ("protocol") String protocol, 
+	      @QueryParam ("srcPort") String srcPort,
+	      @QueryParam ("dstPort") String dstPort,
+	      @DefaultValue("-1") @QueryParam ("startTime")long startTime,
+	      @DefaultValue("-1") @QueryParam ("endTime")long endTime,
+	      @DefaultValue("false") @QueryParam ("includeReverseTraffic") boolean includeReverseTraffic,
+	      @Context HttpServletResponse servlet_response)
+	      
+	      throws IOException {
+		
+		if (srcIp == null || srcIp.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'srcIp' must not be null or empty").build();
+		
+		if (dstIp == null || dstIp.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'dstIp' must not be null or empty").build();
+		
+		if (protocol == null || protocol.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'protocol' must not be null or empty").build();
+		
+		if (srcPort == null || srcPort.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'srcPort' must not be null or empty").build();
+		
+		if (dstPort == null || dstPort.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'dstPort' must not be null or empty").build();
+		
+	
+	    PcapsResponse response = null;
+	    try {
+	      String sessionKey = PcapUtils.getPartialSessionKey(srcIp, dstIp, protocol,
+	          srcPort, dstPort);
+	      LOGGER.info("sessionKey =" + sessionKey);
+	      IPcapGetter pcapGetter = PcapGetterHBaseImpl.getInstance();
+	      response = pcapGetter.getPcaps(Arrays.asList(sessionKey), null,
+	          startTime, endTime, includeReverseTraffic, false,
+	          ConfigurationUtil.getDefaultResultSize());
+	      if (response == null || response.getResponseSize() == 0) {
+	         return Response.status(Response.Status.NO_CONTENT).build();
+	      }
+	      servlet_response.setHeader(HEADER_CONTENT_DISPOSITION_NAME,
+					HEADER_CONTENT_DISPOSITION_VALUE);
+
+	    } catch (IOException e) {
+	      LOGGER.error("Exception occurred while fetching Pcaps by identifiers :",
+	          e);
+	      throw e;
+	    }
+	    // return http status '200 OK' along with the complete pcaps response file,
+	    // and headers
+	    return Response
+				.ok(response.getPcaps(), MediaType.APPLICATION_OCTET_STREAM)
+				.status(200).build();
+	  }
+	/**
+	 * This method parses the each value in the List using delimiter ',' and
+	 * builds a new List;.
+	 * 
+	 * @param keys
+	 *            list of keys to be parsed
+	 * @return list of keys
+	 */
+	@VisibleForTesting
+	List<String> parseKeys(List<String> keys) {
+		// Assert.notEmpty(keys);
+		List<String> parsedKeys = new ArrayList<String>();
+		for (String key : keys) {
+			parsedKeys.addAll(Arrays.asList(StringUtils.split(
+					StringUtils.trim(key), ",")));
+		}
+		return parsedKeys;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapScannerHBaseImpl.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapScannerHBaseImpl.java b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapScannerHBaseImpl.java
new file mode 100644
index 0000000..f163408
--- /dev/null
+++ b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapScannerHBaseImpl.java
@@ -0,0 +1,319 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.ZooKeeperConnectionException;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.NoServerForRegionException;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.log4j.Logger;
+import org.springframework.util.Assert;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.metron.pcap.PcapMerger;
+
+/**
+ * Singleton class which integrates with HBase table and returns sorted pcaps
+ * based on the timestamp for the given range of keys. Creates HConnection if it
+ * is not already created and the same connection instance is being used for all
+ * requests
+ * 
+ * @author sheetal
+ * @version $Revision: 1.0 $
+ */
+public class PcapScannerHBaseImpl implements IPcapScanner {
+
+  /** The Constant LOGGER. */
+  private static final Logger LOGGER = Logger
+      .getLogger(PcapScannerHBaseImpl.class);
+
+  /** The Constant DEFAULT_HCONNECTION_RETRY_LIMIT. */
+  private static final int DEFAULT_HCONNECTION_RETRY_LIMIT = 0;
+
+  /** The pcap scanner h base. */
+  private static IPcapScanner pcapScannerHBase = null;
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapScanner#getPcaps(java.lang.String,
+   * java.lang.String, long, long, long)
+   */
+  
+  public byte[] getPcaps(String startKey, String endKey, long maxResultSize,
+      long startTime, long endTime) throws IOException {
+    Assert.hasText(startKey, "startKey must no be null or empty");
+    byte[] cf = Bytes.toBytes(ConfigurationUtil.getConfiguration()
+        .getString("hbase.table.column.family"));
+    byte[] cq = Bytes.toBytes(ConfigurationUtil.getConfiguration()
+        .getString("hbase.table.column.qualifier"));
+    // create scan request
+    Scan scan = createScanRequest(cf, cq, startKey, endKey, maxResultSize,
+        startTime, endTime);
+    List<byte[]> pcaps = new ArrayList<byte[]>();
+    HTable table = null;
+    try {
+      pcaps = scanPcaps(pcaps, table, scan, cf, cq);
+    } catch (IOException e) {
+      LOGGER.error(
+          "Exception occurred while fetching Pcaps for the key range : startKey="
+              + startKey + ", endKey=" + endKey, e);
+      if (e instanceof ZooKeeperConnectionException
+          || e instanceof MasterNotRunningException
+          || e instanceof NoServerForRegionException) {
+        int maxRetryLimit = getConnectionRetryLimit();
+        for (int attempt = 1; attempt <= maxRetryLimit; attempt++) {
+          try {
+            HBaseConfigurationUtil.closeConnection(); // closing the existing
+                                                      // connection and retry,
+                                                      // it will create a new
+                                                      // HConnection
+            pcaps = scanPcaps(pcaps, table, scan, cf, cq);
+            break;
+          } catch (IOException ie) {
+            if (attempt == maxRetryLimit) {
+              System.out.println("Throwing the exception after retrying "
+                  + maxRetryLimit + " times.");
+              throw e;
+            }
+          }
+        }
+      } else {
+        throw e;
+      }
+    } finally {
+      if (table != null) {
+        table.close();
+      }
+    }
+    if (pcaps.size() == 1) {
+      return pcaps.get(0);
+    }
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    PcapMerger.merge(baos, pcaps);
+    byte[] response = baos.toByteArray();
+    return response;
+  }
+
+  /**
+   * Creates the scan request.
+   * 
+   * @param cf
+   *          the cf
+   * @param cq
+   *          the cq
+   * @param startKey
+   *          the start key
+   * @param endKey
+   *          the end key
+   * @param maxResultSize
+   *          the max result size
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @return the scan
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @VisibleForTesting
+  Scan createScanRequest(byte[] cf, byte[] cq, String startKey, String endKey,
+      long maxResultSize, long startTime, long endTime) throws IOException {
+    Scan scan = new Scan();
+    scan.addColumn(cf, cq);
+    scan.setMaxVersions(ConfigurationUtil.getConfiguration().getInt(
+        "hbase.table.column.maxVersions"));
+    scan.setStartRow(startKey.getBytes());
+    if (endKey != null) {
+      scan.setStopRow(endKey.getBytes());
+    }
+    scan.setMaxResultSize(maxResultSize);
+    boolean setTimeRange = true;
+    if (startTime < 0 && endTime < 0) {
+      setTimeRange = false;
+    }
+    if (setTimeRange) {
+      if (startTime < 0) {
+        startTime = 0;
+      } else {
+        startTime = PcapHelper.convertToDataCreationTimeUnit(startTime);
+      }
+      if (endTime < 0) {
+        endTime = Long.MAX_VALUE;
+      } else {
+        endTime = PcapHelper.convertToDataCreationTimeUnit(endTime);
+      }
+      Assert.isTrue(startTime < endTime,
+          "startTime value must be less than endTime value");
+    }
+    // create Scan request;
+    if (setTimeRange) {
+      scan.setTimeRange(startTime, endTime);
+    }
+    return scan;
+  }
+
+  /**
+   * Scan pcaps.
+   * 
+   * @param pcaps
+   *          the pcaps
+   * @param table
+   *          the table
+   * @param scan
+   *          the scan
+   * @param cf
+   *          the cf
+   * @param cq
+   *          the cq
+   * @return the list
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @VisibleForTesting
+  List<byte[]> scanPcaps(List<byte[]> pcaps, HTable table, Scan scan,
+      byte[] cf, byte[] cq) throws IOException {
+    LOGGER.info("Scan =" + scan.toString());
+    table = (HTable) HBaseConfigurationUtil.getConnection().getTable(
+    		ConfigurationUtil.getConfiguration().getString("hbase.table.name"));
+    ResultScanner resultScanner = table.getScanner(scan);
+    List<Cell> scannedCells = new ArrayList<Cell>();
+    for (Result result = resultScanner.next(); result != null; result = resultScanner
+        .next()) {
+      List<Cell> cells = result.getColumnCells(cf, cq);
+      if (cells != null) {
+        for (Cell cell : cells) {
+          scannedCells.add(cell);
+        }
+      }
+    }
+    Collections.sort(scannedCells, PcapHelper.getCellTimestampComparator());
+    LOGGER.info("sorted cells :" + scannedCells.toString());
+    for (Cell sortedCell : scannedCells) {
+      pcaps.add(CellUtil.cloneValue(sortedCell));
+    }
+    return pcaps;
+  }
+
+  /**
+   * Gets the connection retry limit.
+   * 
+   * @return the connection retry limit
+   */
+  private int getConnectionRetryLimit() {
+    return ConfigurationUtil.getConfiguration().getInt(
+        "hbase.hconnection.retries.number", DEFAULT_HCONNECTION_RETRY_LIMIT);
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapScanner#getPcaps(java.lang.String,
+   * java.lang.String)
+   */
+  
+  public byte[] getPcaps(String startKey, String endKey) throws IOException {
+    Assert.hasText(startKey, "startKey must no be null or empty");
+    Assert.hasText(endKey, "endKey must no be null or empty");
+    return getPcaps(startKey, endKey, ConfigurationUtil.getDefaultResultSize(),
+        -1, -1);
+  }
+
+  /**
+   * Always returns the singleton instance.
+   * 
+   * @return IPcapScanner singleton instance
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public static IPcapScanner getInstance() throws IOException {
+    if (pcapScannerHBase == null) {
+      synchronized (PcapScannerHBaseImpl.class) {
+        if (pcapScannerHBase == null) {
+          pcapScannerHBase = new PcapScannerHBaseImpl();
+        }
+      }
+    }
+    return pcapScannerHBase;
+  }
+
+  /**
+   * Instantiates a new pcap scanner h base impl.
+   */
+  private PcapScannerHBaseImpl() {
+  }
+
+  /**
+   * The main method.
+   */
+  // public static void main(String[] args) throws IOException {
+  // if (args == null || args.length < 3) {
+  // usage();
+  // return;
+  // }
+  // String outputFileName = null;
+  // String startKey = null;
+  // String stopKey = null;
+  // outputFileName = args[0];
+  // startKey = args[1];
+  // if (args.length > 2) { // NOPMD by sheetal on 1/29/14 3:55 PM
+  // stopKey = args[2];
+  // }
+  // PcapScannerHBaseImpl downloader = new PcapScannerHBaseImpl();
+  // byte[] pcaps = downloader.getPcaps(startKey, stopKey, defaultResultSize, 0,
+  // Long.MAX_VALUE);
+  // File file = new File(outputFileName);
+  // FileUtils.write(file, "", false);
+  // ByteArrayOutputStream baos = new ByteArrayOutputStream(); //
+  // $codepro.audit.disable
+  // // closeWhereCreated
+  // PcapMerger.merge(baos, pcaps);
+  // FileUtils.writeByteArrayToFile(file, baos.toByteArray(), true);
+  // }
+
+  /**
+   * Usage.
+   */
+  @SuppressWarnings("unused")
+  private static void usage() {
+    System.out.println("java " + PcapScannerHBaseImpl.class.getName() // NOPMD
+                                                                      // by
+        // sheetal
+        // <!-- //
+        // $codepro.audit.disable
+        // debuggingCode
+        // -->
+        // on
+        // 1/29/14
+        // 3:55
+        // PM
+        + " <zk quorum> <output file> <start key> [stop key]");
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapsResponse.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapsResponse.java b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapsResponse.java
new file mode 100644
index 0000000..b2fada1
--- /dev/null
+++ b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/PcapsResponse.java
@@ -0,0 +1,167 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.metron.pcap.PcapMerger;
+
+
+
+/**
+ * Holds pcaps data, status and the partial response key.
+ * 
+ * @author Sayi
+ */
+public class PcapsResponse {
+
+  /**
+   * The Enum Status.
+   */
+  public enum Status {
+    
+    /** The partial. */
+    PARTIAL, 
+ /** The complete. */
+ COMPLETE
+  };
+
+  /** response of the processed keys. */
+  private List<byte[]> pcaps = new ArrayList<byte[]>();;
+
+  /** partial response key. */
+  private String lastRowKey;
+
+  /** The status. */
+  private Status status = Status.COMPLETE;
+
+  /**
+   * Sets the pcaps.
+   * 
+   * @param pcaps
+   *          the new pcaps
+   */
+  public void setPcaps(List<byte[]> pcaps) {
+    this.pcaps = pcaps;
+  }
+
+  /**
+   * Adds the pcaps.
+   * 
+   * @param pcaps
+   *          the pcaps
+   */
+  public void addPcaps(byte[] pcaps) {
+    this.pcaps.add(pcaps);
+  }
+
+  /**
+   * Gets the partial response key.
+   * 
+   * @return the partial response key
+   */
+  public String getLastRowKey() {
+    return lastRowKey;
+  }
+
+  /**
+   * Sets the partial response key.
+   * 
+   * @param lastRowKey
+   *          the last row key
+   */
+  public void setLastRowKey(String lastRowKey) {
+    this.lastRowKey = lastRowKey;
+  }
+
+  /**
+   * Gets the status.
+   * 
+   * @return the status
+   */
+  public Status getStatus() {
+    return status;
+  }
+
+  /**
+   * Sets the status.
+   * 
+   * @param status
+   *          the new status
+   */
+  public void setStatus(Status status) {
+    this.status = status;
+  }
+
+  /**
+   * Checks if is resonse size within limit.
+   * 
+   * @param maxResultSize
+   *          the max result size
+   * @return true, if is resonse size within limit
+   */
+  public boolean isResonseSizeWithinLimit(long maxResultSize) {
+    // System.out.println("isResonseSizeWithinLimit() : getResponseSize() < (input|default result size - maximum packet size ) ="+
+    // getResponseSize()+ " < " + ( maxResultSize
+    // -ConfigurationUtil.getMaxRowSize()));
+    return getResponseSize() < (maxResultSize - ConfigurationUtil
+        .getMaxRowSize());
+  }
+
+  /**
+   * Gets the response size.
+   * 
+   * @return the response size
+   */
+  public long getResponseSize() {
+    long responseSize = 0;
+    for (byte[] pcap : this.pcaps) {
+      responseSize = responseSize + pcap.length;
+    }
+    return responseSize;
+  }
+
+  /**
+   * Gets the pcaps.
+   * 
+   * @return the pcaps
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public byte[] getPcaps() throws IOException {
+    if (pcaps.size() == 1) {
+      return pcaps.get(0);
+    }
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    PcapMerger.merge(baos, pcaps);
+    return baos.toByteArray();
+  }
+
+  /* (non-Javadoc)
+   * @see java.lang.Object#toString()
+   */
+  @Override
+  public String toString() {
+    return "PcapsResponse [lastRowKey=" + lastRowKey
+        + ", status=" + status + ", pcapsSize="
+        + String.valueOf(getResponseSize()) + "]";
+  }
+}



[22/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/mock/MockGeoAdapter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/mock/MockGeoAdapter.java b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/mock/MockGeoAdapter.java
new file mode 100644
index 0000000..70791fe
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/mock/MockGeoAdapter.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.integration.mock;
+
+import com.google.common.base.Joiner;
+import org.apache.metron.enrichment.bolt.CacheKey;
+import org.apache.metron.enrichment.interfaces.EnrichmentAdapter;
+import org.json.simple.JSONObject;
+
+import java.io.Serializable;
+
+public class MockGeoAdapter implements EnrichmentAdapter<CacheKey>,
+        Serializable {
+
+  public static final String DEFAULT_LOC_ID = "1";
+  public static final String DEFAULT_COUNTRY = "test country";
+  public static final String DEFAULT_CITY = "test city";
+  public static final String DEFAULT_POSTAL_CODE = "test postalCode";
+  public static final String DEFAULT_LATITUDE = "test latitude";
+  public static final String DEFAULT_LONGITUDE = "test longitude";
+  public static final String DEFAULT_DMACODE= "test dmaCode";
+  public static final String DEFAULT_LOCATION_POINT= Joiner.on(',').join(DEFAULT_LONGITUDE, DEFAULT_LATITUDE);
+
+  @Override
+  public void logAccess(CacheKey value) {
+
+  }
+
+  public JSONObject enrich(CacheKey cache ) {
+    JSONObject enriched = new JSONObject();
+    enriched.put("locID", DEFAULT_LOC_ID);
+    enriched.put("country", DEFAULT_COUNTRY);
+    enriched.put("city", DEFAULT_CITY);
+    enriched.put("postalCode", DEFAULT_POSTAL_CODE);
+    enriched.put("latitude", DEFAULT_LATITUDE);
+    enriched.put("longitude", DEFAULT_LONGITUDE);
+    enriched.put("dmaCode", DEFAULT_DMACODE);
+    enriched.put("location_point", DEFAULT_LOCATION_POINT);
+    return enriched;
+  }
+
+  public boolean initializeAdapter() {
+    return true;
+  }
+
+  public void cleanup() {
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/mock/MockHBaseConnector.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/mock/MockHBaseConnector.java b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/mock/MockHBaseConnector.java
new file mode 100644
index 0000000..f40b366
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/mock/MockHBaseConnector.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.integration.mock;
+
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
+import org.apache.metron.hbase.Connector;
+import org.apache.metron.hbase.TupleTableConfig;
+
+import java.io.IOException;
+import java.io.InterruptedIOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+public class MockHBaseConnector extends Connector {
+    static List<Put> puts = Collections.synchronizedList(new ArrayList<Put>());
+    public MockHBaseConnector(TupleTableConfig conf, String _quorum, String _port) throws IOException {
+        super(conf, _quorum, _port);
+    }
+
+    @Override
+    public void put(Put put) throws InterruptedIOException, RetriesExhaustedWithDetailsException {
+        puts.add(put);
+    }
+
+    @Override
+    public void close() {
+
+    }
+    public static void clear() {
+        puts.clear();
+    }
+    public static List<Put> getPuts() {
+        return puts;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/utils/KafkaUtil.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/utils/KafkaUtil.java b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/utils/KafkaUtil.java
new file mode 100644
index 0000000..e3a3110
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/utils/KafkaUtil.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.integration.utils;
+
+
+import kafka.consumer.ConsumerIterator;
+import kafka.consumer.KafkaStream;
+import kafka.javaapi.producer.Producer;
+import kafka.producer.KeyedMessage;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class KafkaUtil {
+  public static <K,V> void send(Producer<K,V> producer, K key, V value, String topic) {
+    producer.send(new KeyedMessage<>(topic, key,value));
+  }
+
+  public static <K,V> void send(Producer<K,V> producer, Iterable<Map.Entry<K,V>> messages, String topic) {
+    for(Map.Entry<K,V> kv : messages) {
+      send(producer, kv.getKey(), kv.getValue(), topic);
+    }
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/utils/SampleUtil.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/utils/SampleUtil.java b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/utils/SampleUtil.java
new file mode 100644
index 0000000..a172760
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/utils/SampleUtil.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.integration.utils;
+
+import org.apache.metron.TestConstants;
+import org.apache.metron.common.configuration.Configurations;
+import org.apache.metron.common.cli.ConfigurationsUtils;
+
+import java.io.IOException;
+import java.util.Map;
+
+public class SampleUtil {
+
+  public static
+  Configurations getSampleConfigs() throws IOException {
+    Configurations configurations = new Configurations();
+    configurations.updateGlobalConfig(ConfigurationsUtils.readGlobalConfigFromFile(TestConstants.SAMPLE_CONFIG_PATH));
+    Map<String, byte[]> sensorEnrichmentConfigs = ConfigurationsUtils.readSensorEnrichmentConfigsFromFile(TestConstants.SAMPLE_CONFIG_PATH);
+    for(String sensorType: sensorEnrichmentConfigs.keySet()) {
+      configurations.updateSensorEnrichmentConfig(sensorType, sensorEnrichmentConfigs.get(sensorType));
+    }
+    return configurations;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/utils/TestUtils.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/utils/TestUtils.java b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/utils/TestUtils.java
new file mode 100644
index 0000000..67acb33
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/utils/TestUtils.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.integration.utils;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+public class TestUtils {
+
+  public static List<byte[]> readSampleData(String samplePath) throws IOException {
+    BufferedReader br = new BufferedReader(new FileReader(samplePath));
+    List<byte[]> ret = new ArrayList<>();
+    for (String line = null; (line = br.readLine()) != null; ) {
+      ret.add(line.getBytes());
+    }
+    br.close();
+    return ret;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/config/global.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/config/global.json b/metron-platform/metron-integration-test/src/main/resources/sample/config/global.json
new file mode 100644
index 0000000..721f70f
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/resources/sample/config/global.json
@@ -0,0 +1,10 @@
+{
+  "es.clustername": "metron",
+  "es.ip": "localhost",
+  "es.port": 9300,
+  "es.date.format": "yyyy.MM.dd.HH",
+  "solr.zookeeper": "localhost:2181",
+  "solr.collection": "metron",
+  "solr.numShards": 1,
+  "solr.replicationFactor": 1
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/config/sensors/bro.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/config/sensors/bro.json b/metron-platform/metron-integration-test/src/main/resources/sample/config/sensors/bro.json
new file mode 100644
index 0000000..8886495
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/resources/sample/config/sensors/bro.json
@@ -0,0 +1,19 @@
+{
+  "index": "bro",
+  "batchSize": 5,
+  "enrichmentFieldMap":
+  {
+    "geo": ["ip_dst_addr", "ip_src_addr"],
+    "host": ["host"]
+  },
+  "threatIntelFieldMap":
+  {
+    "hbaseThreatIntel": ["ip_dst_addr", "ip_src_addr"]
+  },
+  "fieldToThreatIntelTypeMap":
+  {
+    "ip_dst_addr" : [ "malicious_ip" ]
+    ,"ip_src_addr" : [ "malicious_ip" ]
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/config/sensors/pcap.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/config/sensors/pcap.json b/metron-platform/metron-integration-test/src/main/resources/sample/config/sensors/pcap.json
new file mode 100644
index 0000000..82c7c5e
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/resources/sample/config/sensors/pcap.json
@@ -0,0 +1,13 @@
+{
+  "index": "pcap",
+  "batchSize": 5,
+  "enrichmentFieldMap":
+  {
+    "geo": ["ip_src_addr", "ip_dst_addr"],
+    "host": ["ip_src_addr", "ip_dst_addr"]
+  },
+  "threatIntelFieldMap":
+  {
+    "ip": ["ip_src_addr", "ip_dst_addr"]
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/config/sensors/snort.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/config/sensors/snort.json b/metron-platform/metron-integration-test/src/main/resources/sample/config/sensors/snort.json
new file mode 100644
index 0000000..b7cc22b
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/resources/sample/config/sensors/snort.json
@@ -0,0 +1,19 @@
+{
+  "index": "snort",
+  "batchSize": 1,
+  "enrichmentFieldMap":
+  {
+    "geo": ["ip_dst_addr", "ip_src_addr"],
+    "host": ["host"]
+  },
+  "threatIntelFieldMap":
+  {
+    "hbaseThreatIntel": ["ip_dst_addr", "ip_src_addr"]
+  },
+  "fieldToThreatIntelTypeMap":
+  {
+    "ip_dst_addr" : [ "malicious_ip" ]
+    ,"ip_src_addr" : [ "malicious_ip" ]
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/config/sensors/yaf.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/config/sensors/yaf.json b/metron-platform/metron-integration-test/src/main/resources/sample/config/sensors/yaf.json
new file mode 100644
index 0000000..98da265
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/resources/sample/config/sensors/yaf.json
@@ -0,0 +1,25 @@
+{
+  "index": "yaf",
+  "batchSize": 5,
+  "enrichmentFieldMap":
+  {
+    "geo": ["ip_src_addr", "ip_dst_addr"],
+    "host": ["ip_src_addr", "ip_dst_addr"],
+    "hbaseEnrichment" : ["ip_src_addr", "ip_dst_addr"]
+  },
+  "threatIntelFieldMap":
+  {
+    "hbaseThreatIntel": ["ip_src_addr", "ip_dst_addr"]
+  },
+  "fieldToThreatIntelTypeMap":
+  {
+    "ip_src_addr" : ["malicious_ip"],
+    "ip_dst_addr" : ["malicious_ip"]
+  },
+  "fieldToEnrichmentTypeMap":
+  {
+    "ip_src_addr" : ["playful_classification"],
+    "ip_dst_addr" : ["playful_classification"]
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleIndexed/YafIndexed
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleIndexed/YafIndexed b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleIndexed/YafIndexed
new file mode 100644
index 0000000..1c38406
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleIndexed/YafIndexed
@@ -0,0 +1,10 @@
+{"adapter.threatinteladapter.end.ts":"1457102731219","enrichments.geo.dip.location_point":"test longitude,test latitude","isn":"22efa001","index.elasticsearchwriter.ts":"1457102731220","dip":"10.0.2.15","dp":39468,"rpkt":0,"original_string":"2016-01-28 15:29:48.512|2016-01-28 15:29:48.512|   0.000|   0.000|  6|                          216.21.170.221|   80|                               10.0.2.15|39468|      AS|       0|       0|       0|22efa001|00000000|000|000|       1|      44|       0|       0|    0|idle","enrichments.geo.dip.locID":"1","enrichments.geo.sip.city":"test city","enrichmentjoinbolt.joiner.ts":"1457102731206","adapter.hostfromjsonlistadapter.begin.ts":"1457102731185","tag":0,"enrichments.geo.dip.dmaCode":"test dmaCode","app":0,"oct":44,"end_reason":"idle","enrichments.geo.sip.locID":"1","adapter.mockgeoadapter.begin.ts":"1457102731185","threatintelsplitterbolt.splitter.ts":"1457102731207","enrichments.geo.dip.postalCode":"test postalCode","start_time":1453994988512,
 "adapter.threatinteladapter.begin.ts":"1457102731210","riflags":0,"proto":6,"enrichments.host.dip.known_info.local":"YES","enrichments.geo.dip.longitude":"test longitude","iflags":"AS","uflags":0,"adapter.mockgeoadapter.end.ts":"1457102731198","adapter.hostfromjsonlistadapter.end.ts":"1457102731197","enrichments.geo.sip.postalCode":"test postalCode","duration":"0.000","enrichments.geo.dip.country":"test country","threatinteljoinbolt.joiner.ts":"1457102731220","enrichments.geo.dip.latitude":"test latitude","enrichments.geo.sip.country":"test country","enrichments.geo.dip.city":"test city","enrichments.geo.sip.dmaCode":"test dmaCode","pkt":1,"enrichments.geo.sip.location_point":"test longitude,test latitude","ruflags":0,"roct":0,"sip":"216.21.170.221","rtag":0,"sp":80,"enrichments.geo.sip.longitude":"test longitude","enrichments.geo.sip.latitude":"test latitude","timestamp":1453994988512,"risn":0,"enrichments.host.dip.known_info.type":"printer","end_time":1453994988512,"enrichments.ho
 st.dip.known_info.asset_value":"important","source.type":"yaf","rtt":"0.000"}
+{"adapter.threatinteladapter.end.ts":"1457102731221","enrichments.geo.dip.location_point":"test longitude,test latitude","enrichments.host.sip.known_info.asset_value":"important","isn":10000000,"index.elasticsearchwriter.ts":"1457102731221","dip":"10.0.2.3","dp":53,"rpkt":0,"original_string":"2016-01-28 15:29:48.502|2016-01-28 15:29:48.502|   0.000|   0.000| 17|                               10.0.2.15|37299|                                10.0.2.3|   53|       A|       0|       0|       0|10000000|00000000|000|000|       1|      56|       0|       0|    0|idle","enrichments.geo.dip.locID":"1","enrichments.geo.sip.city":"test city","enrichments.host.sip.known_info.type":"printer","enrichmentjoinbolt.joiner.ts":"1457102731208","adapter.hostfromjsonlistadapter.begin.ts":"1457102731197","tag":0,"enrichments.geo.dip.dmaCode":"test dmaCode","app":0,"oct":56,"end_reason":"idle","enrichments.geo.sip.locID":"1","adapter.mockgeoadapter.begin.ts":"1457102731198","threatintelsplitterbolt.splitt
 er.ts":"1457102731210","enrichments.geo.dip.postalCode":"test postalCode","start_time":1453994988502,"adapter.threatinteladapter.begin.ts":"1457102731219","riflags":0,"proto":17,"enrichments.geo.dip.longitude":"test longitude","iflags":"A","uflags":0,"adapter.mockgeoadapter.end.ts":"1457102731198","adapter.hostfromjsonlistadapter.end.ts":"1457102731197","enrichments.host.sip.known_info.local":"YES","threatintels.ip.dip.ip_threat_intel":"alert","enrichments.geo.sip.postalCode":"test postalCode","duration":"0.000","enrichments.geo.dip.country":"test country","threatinteljoinbolt.joiner.ts":"1457102731221","enrichments.geo.dip.latitude":"test latitude","enrichments.geo.sip.country":"test country","enrichments.geo.dip.city":"test city","enrichments.geo.sip.dmaCode":"test dmaCode","pkt":1,"enrichments.geo.sip.location_point":"test longitude,test latitude","ruflags":0,"roct":0,"sip":"10.0.2.15","rtag":0,"sp":37299,"enrichments.geo.sip.longitude":"test longitude","enrichments.geo.sip.latit
 ude":"test latitude","timestamp":1453994988502,"risn":0,"end_time":1453994988502,"is_alert":"true","source.type":"yaf","rtt":"0.000"}
+{"adapter.threatinteladapter.end.ts":"1457102731221","enrichments.geo.dip.location_point":"test longitude,test latitude","isn":0,"index.elasticsearchwriter.ts":"1457102731222","dip":"10.0.2.15","dp":37299,"rpkt":0,"original_string":"2016-01-28 15:29:48.504|2016-01-28 15:29:48.504|   0.000|   0.000| 17|                                10.0.2.3|   53|                               10.0.2.15|37299|       A|       0|       0|       0|00000000|00000000|000|000|       1|     312|       0|       0|    0|idle","enrichments.geo.dip.locID":"1","enrichments.geo.sip.city":"test city","enrichmentjoinbolt.joiner.ts":"1457102731209","adapter.hostfromjsonlistadapter.begin.ts":"1457102731197","tag":0,"enrichments.geo.dip.dmaCode":"test dmaCode","app":0,"oct":312,"end_reason":"idle","enrichments.geo.sip.locID":"1","adapter.mockgeoadapter.begin.ts":"1457102731198","threatintelsplitterbolt.splitter.ts":"1457102731210","enrichments.geo.dip.postalCode":"test postalCode","start_time":1453994988504,"adapter
 .threatinteladapter.begin.ts":"1457102731221","riflags":0,"proto":17,"enrichments.host.dip.known_info.local":"YES","enrichments.geo.dip.longitude":"test longitude","iflags":"A","uflags":0,"adapter.mockgeoadapter.end.ts":"1457102731199","adapter.hostfromjsonlistadapter.end.ts":"1457102731198","enrichments.geo.sip.postalCode":"test postalCode","duration":"0.000","enrichments.geo.dip.country":"test country","threatinteljoinbolt.joiner.ts":"1457102731222","enrichments.geo.dip.latitude":"test latitude","enrichments.geo.sip.country":"test country","enrichments.geo.dip.city":"test city","enrichments.geo.sip.dmaCode":"test dmaCode","pkt":1,"enrichments.geo.sip.location_point":"test longitude,test latitude","ruflags":0,"roct":0,"sip":"10.0.2.3","rtag":0,"sp":53,"enrichments.geo.sip.longitude":"test longitude","enrichments.geo.sip.latitude":"test latitude","timestamp":1453994988504,"risn":0,"enrichments.host.dip.known_info.type":"printer","end_time":1453994988504,"enrichments.host.dip.known_i
 nfo.asset_value":"important","is_alert":"true","source.type":"yaf","threatintels.ip.sip.ip_threat_intel":"alert","rtt":"0.000"}
+{"adapter.threatinteladapter.end.ts":"1457102731222","enrichments.geo.dip.location_point":"test longitude,test latitude","enrichments.host.sip.known_info.asset_value":"important","isn":0,"index.elasticsearchwriter.ts":"1457102731222","dip":"10.0.2.3","dp":53,"rpkt":0,"original_string":"2016-01-28 15:29:48.504|2016-01-28 15:29:48.504|   0.000|   0.000| 17|                               10.0.2.15|56303|                                10.0.2.3|   53|       A|       0|       0|       0|00000000|00000000|000|000|       1|      56|       0|       0|    0|idle","enrichments.geo.dip.locID":"1","enrichments.geo.sip.city":"test city","enrichments.host.sip.known_info.type":"printer","enrichmentjoinbolt.joiner.ts":"1457102731209","adapter.hostfromjsonlistadapter.begin.ts":"1457102731198","tag":0,"enrichments.geo.dip.dmaCode":"test dmaCode","app":0,"oct":56,"end_reason":"idle","enrichments.geo.sip.locID":"1","adapter.mockgeoadapter.begin.ts":"1457102731199","threatintelsplitterbolt.splitter.ts":
 "1457102731211","enrichments.geo.dip.postalCode":"test postalCode","start_time":1453994988504,"adapter.threatinteladapter.begin.ts":"1457102731221","riflags":0,"proto":17,"enrichments.geo.dip.longitude":"test longitude","iflags":"A","uflags":0,"adapter.mockgeoadapter.end.ts":"1457102731199","adapter.hostfromjsonlistadapter.end.ts":"1457102731198","enrichments.host.sip.known_info.local":"YES","threatintels.ip.dip.ip_threat_intel":"alert","enrichments.geo.sip.postalCode":"test postalCode","duration":"0.000","enrichments.geo.dip.country":"test country","threatinteljoinbolt.joiner.ts":"1457102731222","enrichments.geo.dip.latitude":"test latitude","enrichments.geo.sip.country":"test country","enrichments.geo.dip.city":"test city","enrichments.geo.sip.dmaCode":"test dmaCode","pkt":1,"enrichments.geo.sip.location_point":"test longitude,test latitude","ruflags":0,"roct":0,"sip":"10.0.2.15","rtag":0,"sp":56303,"enrichments.geo.sip.longitude":"test longitude","enrichments.geo.sip.latitude":"t
 est latitude","timestamp":1453994988504,"risn":0,"end_time":1453994988504,"is_alert":"true","source.type":"yaf","rtt":"0.000"}
+{"adapter.threatinteladapter.end.ts":"1457102731222","enrichments.geo.dip.location_point":"test longitude,test latitude","isn":0,"index.elasticsearchwriter.ts":"1457102731222","dip":"10.0.2.15","dp":56303,"rpkt":0,"original_string":"2016-01-28 15:29:48.506|2016-01-28 15:29:48.506|   0.000|   0.000| 17|                                10.0.2.3|   53|                               10.0.2.15|56303|       A|       0|       0|       0|00000000|00000000|000|000|       1|      84|       0|       0|    0|idle","enrichments.geo.dip.locID":"1","enrichments.geo.sip.city":"test city","enrichmentjoinbolt.joiner.ts":"1457102731210","adapter.hostfromjsonlistadapter.begin.ts":"1457102731198","tag":0,"enrichments.geo.dip.dmaCode":"test dmaCode","app":0,"oct":84,"end_reason":"idle","enrichments.geo.sip.locID":"1","adapter.mockgeoadapter.begin.ts":"1457102731199","threatintelsplitterbolt.splitter.ts":"1457102731212","enrichments.geo.dip.postalCode":"test postalCode","start_time":1453994988506,"adapter.
 threatinteladapter.begin.ts":"1457102731222","riflags":0,"proto":17,"enrichments.host.dip.known_info.local":"YES","enrichments.geo.dip.longitude":"test longitude","iflags":"A","uflags":0,"adapter.mockgeoadapter.end.ts":"1457102731199","adapter.hostfromjsonlistadapter.end.ts":"1457102731198","enrichments.geo.sip.postalCode":"test postalCode","duration":"0.000","enrichments.geo.dip.country":"test country","threatinteljoinbolt.joiner.ts":"1457102731222","enrichments.geo.dip.latitude":"test latitude","enrichments.geo.sip.country":"test country","enrichments.geo.dip.city":"test city","enrichments.geo.sip.dmaCode":"test dmaCode","pkt":1,"enrichments.geo.sip.location_point":"test longitude,test latitude","ruflags":0,"roct":0,"sip":"10.0.2.3","rtag":0,"sp":53,"enrichments.geo.sip.longitude":"test longitude","enrichments.geo.sip.latitude":"test latitude","timestamp":1453994988506,"risn":0,"enrichments.host.dip.known_info.type":"printer","end_time":1453994988506,"enrichments.host.dip.known_in
 fo.asset_value":"important","is_alert":"true","source.type":"yaf","threatintels.ip.sip.ip_threat_intel":"alert","rtt":"0.000"}
+{"adapter.threatinteladapter.end.ts":"1457102731222","enrichments.geo.dip.location_point":"test longitude,test latitude","enrichments.host.sip.known_info.asset_value":"important","isn":"58c52fca","index.elasticsearchwriter.ts":"1457102732038","dip":"216.21.170.221","dp":80,"rpkt":0,"original_string":"2016-01-28 15:29:48.508|2016-01-28 15:29:48.508|   0.000|   0.000|  6|                               10.0.2.15|39468|                          216.21.170.221|   80|       S|       0|       0|       0|58c52fca|00000000|000|000|       1|      60|       0|       0|    0|idle","enrichments.geo.dip.locID":"1","enrichments.geo.sip.city":"test city","enrichments.host.sip.known_info.type":"printer","enrichmentjoinbolt.joiner.ts":"1457102731210","adapter.hostfromjsonlistadapter.begin.ts":"1457102731198","tag":0,"enrichments.geo.dip.dmaCode":"test dmaCode","app":0,"oct":60,"end_reason":"idle","enrichments.geo.sip.locID":"1","adapter.mockgeoadapter.begin.ts":"1457102731199","threatintelsplitterbol
 t.splitter.ts":"1457102731212","enrichments.geo.dip.postalCode":"test postalCode","start_time":1453994988508,"adapter.threatinteladapter.begin.ts":"1457102731222","riflags":0,"proto":6,"enrichments.geo.dip.longitude":"test longitude","iflags":"S","uflags":0,"adapter.mockgeoadapter.end.ts":"1457102731199","adapter.hostfromjsonlistadapter.end.ts":"1457102731198","enrichments.host.sip.known_info.local":"YES","enrichments.geo.sip.postalCode":"test postalCode","duration":"0.000","enrichments.geo.dip.country":"test country","threatinteljoinbolt.joiner.ts":"1457102731223","enrichments.geo.dip.latitude":"test latitude","enrichments.geo.sip.country":"test country","enrichments.geo.dip.city":"test city","enrichments.geo.sip.dmaCode":"test dmaCode","pkt":1,"enrichments.geo.sip.location_point":"test longitude,test latitude","ruflags":0,"roct":0,"sip":"10.0.2.15","rtag":0,"sp":39468,"enrichments.geo.sip.longitude":"test longitude","enrichments.geo.sip.latitude":"test latitude","timestamp":145399
 4988508,"risn":0,"end_time":1453994988508,"source.type":"yaf","rtt":"0.000"}
+{"adapter.threatinteladapter.end.ts":"1457102731223","enrichments.geo.dip.location_point":"test longitude,test latitude","enrichments.host.sip.known_info.asset_value":"important","isn":"58c52fcb","index.elasticsearchwriter.ts":"1457102732038","dip":"216.21.170.221","dp":80,"rpkt":0,"original_string":"2016-01-28 15:29:48.512|2016-01-28 15:29:48.512|   0.000|   0.000|  6|                               10.0.2.15|39468|                          216.21.170.221|   80|       A|       0|       0|       0|58c52fcb|00000000|000|000|       1|      40|       0|       0|    0|idle ","enrichments.geo.dip.locID":"1","enrichments.geo.sip.city":"test city","enrichments.host.sip.known_info.type":"printer","enrichmentjoinbolt.joiner.ts":"1457102731210","adapter.hostfromjsonlistadapter.begin.ts":"1457102731198","tag":0,"enrichments.geo.dip.dmaCode":"test dmaCode","app":0,"oct":40,"end_reason":"idle ","enrichments.geo.sip.locID":"1","adapter.mockgeoadapter.begin.ts":"1457102731199","threatintelsplitterb
 olt.splitter.ts":"1457102731212","enrichments.geo.dip.postalCode":"test postalCode","start_time":1453994988512,"adapter.threatinteladapter.begin.ts":"1457102731223","riflags":0,"proto":6,"enrichments.geo.dip.longitude":"test longitude","iflags":"A","uflags":0,"adapter.mockgeoadapter.end.ts":"1457102731199","adapter.hostfromjsonlistadapter.end.ts":"1457102731198","enrichments.host.sip.known_info.local":"YES","enrichments.geo.sip.postalCode":"test postalCode","duration":"0.000","enrichments.geo.dip.country":"test country","threatinteljoinbolt.joiner.ts":"1457102731223","enrichments.geo.dip.latitude":"test latitude","enrichments.geo.sip.country":"test country","enrichments.geo.dip.city":"test city","enrichments.geo.sip.dmaCode":"test dmaCode","pkt":1,"enrichments.geo.sip.location_point":"test longitude,test latitude","ruflags":0,"roct":0,"sip":"10.0.2.15","rtag":0,"sp":39468,"enrichments.geo.sip.longitude":"test longitude","enrichments.geo.sip.latitude":"test latitude","timestamp":1453
 994988512,"risn":0,"end_time":1453994988512,"source.type":"yaf","rtt":"0.000"}
+{"adapter.threatinteladapter.end.ts":"1457102731223","enrichments.geo.dip.location_point":"test longitude,test latitude","enrichments.host.sip.known_info.asset_value":"important","isn":"58c52fcb","index.elasticsearchwriter.ts":"1457102732038","dip":"216.21.170.221","dp":80,"rpkt":0,"original_string":"2016-01-28 15:29:48.512|2016-01-28 15:29:48.512|   0.000|   0.000|  6|                               10.0.2.15|39468|                          216.21.170.221|   80|      AP|       0|       0|       0|58c52fcb|00000000|000|000|       1|     148|       0|       0|    0|idle ","enrichments.geo.dip.locID":"1","enrichments.geo.sip.city":"test city","enrichments.host.sip.known_info.type":"printer","enrichmentjoinbolt.joiner.ts":"1457102731210","adapter.hostfromjsonlistadapter.begin.ts":"1457102731198","tag":0,"enrichments.geo.dip.dmaCode":"test dmaCode","app":0,"oct":148,"end_reason":"idle ","enrichments.geo.sip.locID":"1","adapter.mockgeoadapter.begin.ts":"1457102731199","threatintelsplitter
 bolt.splitter.ts":"1457102731212","enrichments.geo.dip.postalCode":"test postalCode","start_time":1453994988512,"adapter.threatinteladapter.begin.ts":"1457102731223","riflags":0,"proto":6,"enrichments.geo.dip.longitude":"test longitude","iflags":"AP","uflags":0,"adapter.mockgeoadapter.end.ts":"1457102731199","adapter.hostfromjsonlistadapter.end.ts":"1457102731198","enrichments.host.sip.known_info.local":"YES","enrichments.geo.sip.postalCode":"test postalCode","duration":"0.000","enrichments.geo.dip.country":"test country","threatinteljoinbolt.joiner.ts":"1457102731225","enrichments.geo.dip.latitude":"test latitude","enrichments.geo.sip.country":"test country","enrichments.geo.dip.city":"test city","enrichments.geo.sip.dmaCode":"test dmaCode","pkt":1,"enrichments.geo.sip.location_point":"test longitude,test latitude","ruflags":0,"roct":0,"sip":"10.0.2.15","rtag":0,"sp":39468,"enrichments.geo.sip.longitude":"test longitude","enrichments.geo.sip.latitude":"test latitude","timestamp":14
 53994988512,"risn":0,"end_time":1453994988512,"source.type":"yaf","rtt":"0.000"}
+{"adapter.threatinteladapter.end.ts":"1457102731225","enrichments.geo.dip.location_point":"test longitude,test latitude","isn":"22efa002","index.elasticsearchwriter.ts":"1457102732038","dip":"10.0.2.15","dp":39468,"rpkt":0,"original_string":"2016-01-28 15:29:48.512|2016-01-28 15:29:48.512|   0.000|   0.000|  6|                          216.21.170.221|   80|                               10.0.2.15|39468|       A|       0|       0|       0|22efa002|00000000|000|000|       1|      40|       0|       0|    0|idle ","enrichments.geo.dip.locID":"1","enrichments.geo.sip.city":"test city","enrichmentjoinbolt.joiner.ts":"1457102731211","adapter.hostfromjsonlistadapter.begin.ts":"1457102731198","tag":0,"enrichments.geo.dip.dmaCode":"test dmaCode","app":0,"oct":40,"end_reason":"idle ","enrichments.geo.sip.locID":"1","adapter.mockgeoadapter.begin.ts":"1457102731199","threatintelsplitterbolt.splitter.ts":"1457102731212","enrichments.geo.dip.postalCode":"test postalCode","start_time":145399498851
 2,"adapter.threatinteladapter.begin.ts":"1457102731223","riflags":0,"proto":6,"enrichments.host.dip.known_info.local":"YES","enrichments.geo.dip.longitude":"test longitude","iflags":"A","uflags":0,"adapter.mockgeoadapter.end.ts":"1457102731199","adapter.hostfromjsonlistadapter.end.ts":"1457102731198","enrichments.geo.sip.postalCode":"test postalCode","duration":"0.000","enrichments.geo.dip.country":"test country","threatinteljoinbolt.joiner.ts":"1457102731225","enrichments.geo.dip.latitude":"test latitude","enrichments.geo.sip.country":"test country","enrichments.geo.dip.city":"test city","enrichments.geo.sip.dmaCode":"test dmaCode","pkt":1,"enrichments.geo.sip.location_point":"test longitude,test latitude","ruflags":0,"roct":0,"sip":"216.21.170.221","rtag":0,"sp":80,"enrichments.geo.sip.longitude":"test longitude","enrichments.geo.sip.latitude":"test latitude","timestamp":1453994988512,"risn":0,"enrichments.host.dip.known_info.type":"printer","end_time":1453994988512,"enrichments.h
 ost.dip.known_info.asset_value":"important","source.type":"yaf","rtt":"0.000"}
+{"adapter.threatinteladapter.end.ts":"1457102731226","enrichments.geo.dip.location_point":"test longitude,test latitude","isn":"22efa002","index.elasticsearchwriter.ts":"1457102732038","dip":"10.0.2.15","dp":39468,"rpkt":0,"original_string":"2016-01-28 15:29:48.562|2016-01-28 15:29:48.562|   0.000|   0.000|  6|                          216.21.170.221|   80|                               10.0.2.15|39468|      AP|       0|       0|       0|22efa002|00000000|000|000|       1|     604|       0|       0|    0|idle","enrichments.geo.dip.locID":"1","enrichments.geo.sip.city":"test city","enrichmentjoinbolt.joiner.ts":"1457102731211","adapter.hostfromjsonlistadapter.begin.ts":"1457102731198","tag":0,"enrichments.geo.dip.dmaCode":"test dmaCode","app":0,"oct":604,"end_reason":"idle","enrichments.geo.sip.locID":"1","adapter.mockgeoadapter.begin.ts":"1457102731199","threatintelsplitterbolt.splitter.ts":"1457102731213","enrichments.geo.dip.postalCode":"test postalCode","start_time":1453994988562
 ,"adapter.threatinteladapter.begin.ts":"1457102731226","riflags":0,"proto":6,"enrichments.host.dip.known_info.local":"YES","enrichments.geo.dip.longitude":"test longitude","iflags":"AP","uflags":0,"adapter.mockgeoadapter.end.ts":"1457102731199","adapter.hostfromjsonlistadapter.end.ts":"1457102731198","enrichments.geo.sip.postalCode":"test postalCode","duration":"0.000","enrichments.geo.dip.country":"test country","threatinteljoinbolt.joiner.ts":"1457102731226","enrichments.geo.dip.latitude":"test latitude","enrichments.geo.sip.country":"test country","enrichments.geo.dip.city":"test city","enrichments.geo.sip.dmaCode":"test dmaCode","pkt":1,"enrichments.geo.sip.location_point":"test longitude,test latitude","ruflags":0,"roct":0,"sip":"216.21.170.221","rtag":0,"sp":80,"enrichments.geo.sip.longitude":"test longitude","enrichments.geo.sip.latitude":"test latitude","timestamp":1453994988562,"risn":0,"enrichments.host.dip.known_info.type":"printer","end_time":1453994988562,"enrichments.h
 ost.dip.known_info.asset_value":"important","source.type":"yaf","rtt":"0.000"}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/.PCAPExampleOutput.crc
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/.PCAPExampleOutput.crc b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/.PCAPExampleOutput.crc
new file mode 100644
index 0000000..6e53497
Binary files /dev/null and b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/.PCAPExampleOutput.crc differ

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/AsaOutput
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/AsaOutput b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/AsaOutput
new file mode 100644
index 0000000..6009d48
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/resources/sample/data/SampleInput/AsaOutput
@@ -0,0 +1,100 @@
+<167>Jan  5 08:52:35 10.22.8.216 %ASA-7-609001: Built local-host inside:10.22.8.205
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302021: Teardown ICMP connection for faddr 10.22.8.74/0(LOCAL\user.name) gaddr 10.22.8.205/0 laddr 10.22.8.205/0
+<167>Jan  5 08:52:35 10.22.8.216 %ASA-7-609002: Teardown local-host inside:10.22.8.205 duration 0:00:00
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488167725 for Outside_VPN:147.111.72.16/26436 to DMZ-Inside:10.22.8.53/443 duration 0:00:00 bytes 9687 TCP FINs
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302014: Teardown TCP connection 212805593 for outside:10.22.8.223/59614(LOCAL\user.name) to inside:10.22.8.78/8102 duration 0:00:07 bytes 3433 TCP FINs (user.name)
+<174>Jan  5 14:52:35 10.22.8.212 %ASA-6-302013: Built inbound TCP connection 76245503 for outside:10.22.8.233/54209 (10.22.8.233/54209) to inside:198.111.72.238/443 (198.111.72.238/443) (user.name)
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302013: Built inbound TCP connection 212806031 for outside:10.22.8.17/58633 (10.22.8.17/58633)(LOCAL\user.name) to inside:10.22.8.12/389 (10.22.8.12/389) (user.name)
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168292 for DMZ-Inside:10.22.8.51/51231 to Inside-Trunk:10.22.8.174/40004 duration 0:00:00 bytes 2103 TCP FINs
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-106015: Deny TCP (no connection) from 186.111.72.11/80 to 204.111.72.226/45019 flags SYN ACK  on interface Outside_VPN
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-302014: Teardown TCP connection 17604987 for outside:209.111.72.151/443 to inside:10.22.8.188/64306 duration 0:00:31 bytes 10128 TCP FINs
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-302014: Teardown TCP connection 17604999 for outside:209.111.72.151/443 to inside:10.22.8.188/64307 duration 0:00:30 bytes 6370 TCP FINs
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488167347 for Outside_VPN:198.111.72.24/2134 to DMZ-Inside:10.22.8.53/443 duration 0:00:01 bytes 9785 TCP FINs
+<174>Jan  5 14:52:35 10.22.8.212 %ASA-6-302015: Built inbound UDP connection 76245506 for outside:10.22.8.110/49886 (10.22.8.110/49886) to inside:192.111.72.8/8612 (192.111.72.8/8612) (user.name)
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302014: Teardown TCP connection 212805993 for outside:10.22.8.89/56917(LOCAL\user.name) to inside:216.111.72.126/443 duration 0:00:00 bytes 0 TCP FINs (user.name)
+<167>Jan  5 08:52:35 10.22.8.216 %ASA-7-710005: UDP request discarded from 10.22.8.223/49192 to outside:224.111.72.252/5355
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488166143 for Outside_VPN:198.111.72.64/80 to Inside-Trunk:10.22.8.39/54883 duration 0:00:04 bytes 1148 TCP FINs
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-106015: Deny TCP (no connection) from 10.22.8.84/445 to 10.22.8.219/60726 flags ACK  on interface inside
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168344 for DMZ-Inside:10.22.8.53/61682 to Inside-Trunk:10.22.8.174/40004 duration 0:00:00 bytes 5648 TCP FINs
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168345 for DMZ-Inside:10.22.8.16/31454 to Inside-Trunk:10.22.8.21/443 duration 0:00:00 bytes 756 TCP FINs
+<182>Jan  5 20:22:35 10.22.8.4 %ASA-6-302020: Built inbound ICMP connection for faddr 10.22.8.12/0 gaddr 10.22.8.45/1 laddr 10.22.8.45/1
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-106015: Deny TCP (no connection) from 50.111.72.230/80 to 204.111.72.254/53077 flags RST  on interface Outside_VPN
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-302016: Teardown UDP connection 17603649 for outside:206.111.72.2/161 to inside:10.22.8.48/63297 duration 0:02:01 bytes 209
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-302016: Teardown UDP connection 17603650 for outside:207.111.72.122/161 to inside:10.22.8.48/63298 duration 0:02:01 bytes 209
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-302016: Teardown UDP connection 17603652 for outside:206.111.72.2/161 to inside:10.22.8.48/63300 duration 0:02:01 bytes 115
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-302016: Teardown UDP connection 17603657 for outside:206.111.72.2/161 to inside:10.22.8.48/63306 duration 0:02:01 bytes 115
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168436 for DMZ-Inside:10.22.8.51/51235 to Inside-Trunk:10.22.8.174/40004 duration 0:00:00 bytes 2497 TCP FINs
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488167656 for Outside_VPN:69.111.72.70/21560 to DMZ-Inside:10.22.8.53/443 duration 0:00:01 bytes 11410 TCP FINs
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302015: Built inbound UDP connection 212806050 for outside:10.22.8.62/53965 (10.22.8.62/53965)(LOCAL\user.name) to inside:10.22.8.85/53 (10.22.8.85/53) (user.name)
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302013: Built inbound TCP connection 212806052 for outside:10.22.8.62/56500 (10.22.8.62/56500)(LOCAL\user.name) to inside:198.111.72.83/443 (198.111.72.83/443) (user.name)
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302013: Built inbound TCP connection 212806054 for outside:10.22.8.62/56502 (10.22.8.62/56502)(LOCAL\user.name) to inside:50.111.72.252/443 (50.111.72.252/443) (user.name)
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-305011: Built dynamic TCP translation from inside:10.22.8.188/64340 to outside:206.111.72.41/2013
+<166>Jan  5 15:52:35 10.22.8.33 %ASA-6-305012: Teardown dynamic UDP translation from inside:192.111.72.2/62251 to outside:79.111.72.174/21311 duration 0:02:30
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302015: Built inbound UDP connection 212806058 for outside:10.22.8.221/56631 (10.22.8.221/56631)(LOCAL\user.name) to inside:10.22.8.26/389 (10.22.8.26/389) (user.name)
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168189 for Outside_VPN:209.111.72.10/56619 to DMZ-Inside:10.22.8.53/443 duration 0:00:00 bytes 2477 TCP FINs
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-106015: Deny TCP (no connection) from 10.22.8.112/52235 to 198.111.72.227/80 flags ACK  on interface Inside-Trunk
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488167192 for Outside_VPN:115.111.72.7/49196 to DMZ-Inside:10.22.8.57/443 duration 0:00:02 bytes 20588 TCP Reset-O
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302016: Teardown UDP connection 212806055 for outside:10.22.8.62/55383(LOCAL\user.name) to inside:10.22.8.85/53 duration 0:00:00 bytes 349 (user.name)
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168380 for Outside_VPN:74.111.72.12/443 to Inside-Trunk:10.22.8.39/54894 duration 0:00:00 bytes 5701 TCP FINs
+<174>Jan  5 14:52:35 10.22.8.212 %ASA-6-302013: Built inbound TCP connection 76245522 for outside:10.22.8.147/56343 (10.22.8.147/56343) to inside:209.111.72.151/443 (209.111.72.151/443) (user.name)
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168443 for Outside_VPN:23.111.72.27/80 to Inside-Trunk:10.22.8.81/64713 duration 0:00:00 bytes 2426 TCP FINs
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488111566 for Outside_VPN:131.111.72.49/443 to Inside-Trunk:10.22.8.127/56558 duration 0:01:57 bytes 3614 TCP Reset-O
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302013: Built inbound TCP connection 212806061 for outside:10.22.8.17/58635 (10.22.8.17/58635)(LOCAL\user.name) to inside:10.22.8.12/389 (10.22.8.12/389) (user.name)
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302014: Teardown TCP connection 212806010 for outside:10.22.8.33/60223(LOCAL\user.name) to inside:10.22.8.86/389 duration 0:00:00 bytes 416 TCP Reset-I (user.name)
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302015: Built inbound UDP connection 212806062 for outside:10.22.8.221/56632 (10.22.8.221/56632)(LOCAL\user.name) to inside:10.22.8.73/389 (10.22.8.73/389) (user.name)
+<167>Jan  5 08:52:35 10.22.8.216 %ASA-7-609002: Teardown local-host inside:10.22.8.205 duration 0:00:00
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168231 for Outside_VPN:204.111.72.243/3011 to Inside-Trunk:10.22.8.208/60037 duration 0:00:00 bytes 19415 TCP FINs
+<166>Jan  5 16:52:35 10.22.8.41 %ASA-6-302013: Built inbound TCP connection 45476108 for Outside:10.22.8.97/53484 (10.22.8.97/53484)(LOCAL\user.name) to Inside:141.111.72.70/7576 (141.111.72.70/7576) (user.name)
+<174>Jan  5 14:52:35 10.22.8.212 %ASA-6-302013: Built inbound TCP connection 76245527 for outside:10.22.8.97/65195 (10.22.8.97/65195) to inside:17.111.72.212/5223 (17.111.72.212/5223) (user.name)
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302014: Teardown TCP connection 212806018 for outside:10.22.8.17/58632(LOCAL\user.name) to inside:10.22.8.12/389 duration 0:00:00 bytes 0 TCP FINs (user.name)
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168562 for DMZ-Inside:10.22.8.51/51236 to Inside-Trunk:10.22.8.174/40004 duration 0:00:00 bytes 2273 TCP FINs
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302015: Built inbound UDP connection 212806065 for outside:10.22.8.62/59829 (10.22.8.62/59829)(LOCAL\user.name) to inside:10.22.8.85/53 (10.22.8.85/53) (user.name)
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302013: Built inbound TCP connection 212806067 for outside:10.22.8.143/62675 (10.22.8.143/62675)(LOCAL\user.name) to inside:141.111.72.12/389 (141.111.72.12/389) (user.name)
+<167>Jan  5 08:52:35 10.22.8.216 %ASA-7-710005: UDP request discarded from 10.22.8.223/61122 to outside:224.111.72.252/5355
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302020: Built inbound ICMP connection for faddr 10.22.8.143/0(LOCAL\user.name) gaddr 141.111.72.12/0 laddr 141.111.72.12/0 (user.name)
+<142>Jan  5 08:52:35 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168547 for Outside_VPN:107.111.72.102/80 to Inside-Trunk:10.22.8.54/61676 duration 0:00:00 bytes 1030 TCP FINs
+<166>Jan  5 08:52:35 10.22.8.216 %ASA-6-302015: Built inbound UDP connection 212806078 for outside:10.22.8.221/56633 (10.22.8.221/56633)(LOCAL\user.name) to inside:10.22.8.20/389 (10.22.8.20/389) (user.name)
+<166>Jan  5 09:52:35 10.22.8.12 %ASA-6-305011: Built dynamic TCP translation from inside:10.22.8.83/59915 to outside:206.111.72.41/22776
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168044 for Outside_VPN:50.111.72.39/80 to Inside-Trunk:10.22.8.75/60877 duration 0:00:01 bytes 13304 TCP FINs
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488118326 for Outside_VPN:23.111.72.27/80 to Inside-Trunk:10.22.8.229/57901 duration 0:01:45 bytes 1942 TCP FINs
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488160565 for Outside_VPN:72.111.72.29/80 to Inside-Trunk:10.22.8.42/57520 duration 0:00:15 bytes 1025 TCP FINs
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488096423 for Outside_VPN:72.111.72.43/80 to Inside-Trunk:10.22.8.127/59096 duration 0:02:27 bytes 99347 TCP Reset-O
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488095522 for Outside_VPN:72.111.72.43/80 to Inside-Trunk:10.22.8.127/59087 duration 0:02:29 bytes 154785 TCP Reset-O
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488106557 for Outside_VPN:72.111.72.43/80 to Inside-Trunk:10.22.8.127/59134 duration 0:02:09 bytes 25319 TCP Reset-O
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488096426 for Outside_VPN:72.111.72.43/80 to Inside-Trunk:10.22.8.127/59099 duration 0:02:27 bytes 26171 TCP Reset-O
+<166>Jan  5 08:52:36 10.22.8.216 %ASA-6-302014: Teardown TCP connection 212806005 for outside:10.22.8.17/58630(LOCAL\user.name) to inside:10.22.8.12/389 duration 0:00:00 bytes 3942 TCP FINs (user.name)
+<166>Jan  5 08:52:36 10.22.8.216 %ASA-6-302015: Built inbound UDP connection 212806085 for outside:10.22.8.143/54018 (10.22.8.143/54018)(LOCAL\user.name) to inside:10.22.8.85/53 (10.22.8.85/53) (user.name)
+<174>Jan  5 14:52:36 10.22.8.212 %ASA-6-302020: Built inbound ICMP connection for faddr 10.22.8.96/2708 gaddr 10.22.8.30/0 laddr 10.22.8.30/0 (user.name)
+<174>Jan  5 14:52:36 10.22.8.212 %ASA-6-302015: Built inbound UDP connection 76245537 for outside:10.22.8.110/49886 (10.22.8.110/49886) to inside:192.111.72.11/8612 (192.111.72.11/8612) (user.name)
+<166>Jan  5 16:52:36 10.22.8.41 %ASA-6-106015: Deny TCP (no connection) from 10.22.8.85/58359 to 10.22.8.11/88 flags RST ACK  on interface Outside
+<166>Jan  5 08:52:36 10.22.8.216 %ASA-6-302021: Teardown ICMP connection for faddr 10.22.8.82/0(LOCAL\user.name) gaddr 10.22.8.205/0 laddr 10.22.8.205/0
+<166>Jan  5 08:52:36 10.22.8.216 %ASA-6-302016: Teardown UDP connection 212799832 for outside:10.22.8.230/55549(LOCAL\user.name) to inside:10.22.8.11/389 duration 0:02:01 bytes 354 (user.name)
+<166>Jan  5 08:52:36 10.22.8.216 %ASA-6-302016: Teardown UDP connection 212799867 for outside:10.22.8.240/138(LOCAL\user.name) to inside:10.22.8.255/138 duration 0:02:01 bytes 214 (user.name)
+<167>Jan  5 08:52:36 10.22.8.216 %ASA-7-609001: Built local-host inside:67.111.72.204
+<174>Jan  5 14:52:36 10.22.8.212 %ASA-6-302013: Built inbound TCP connection 76245544 for outside:10.22.8.227/54540 (10.22.8.227/54540) to inside:63.111.72.124/80 (63.111.72.124/80) (user.name)
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168135 for Outside_VPN:198.111.72.66/36797 to DMZ-Inside:10.22.8.53/80 duration 0:00:01 bytes 89039 TCP FINs
+<166>Jan  5 08:52:36 10.22.8.216 %ASA-6-302014: Teardown TCP connection 212805836 for outside:10.22.8.62/56471(LOCAL\user.name) to inside:208.111.72.1/443 duration 0:00:04 bytes 1700 TCP FINs (user.name)
+<174>Jan  5 14:52:36 10.22.8.212 %ASA-6-302013: Built inbound TCP connection 76245546 for outside:10.22.8.227/54542 (10.22.8.227/54542) to inside:63.111.72.124/80 (63.111.72.124/80) (user.name)
+<166>Jan  5 08:52:36 10.22.8.216 %ASA-6-302021: Teardown ICMP connection for faddr 10.22.8.74/0(LOCAL\user.name) gaddr 10.22.8.205/0 laddr 10.22.8.205/0
+<174>Jan  5 14:52:36 10.22.8.212 %ASA-6-302020: Built outbound ICMP connection for faddr 10.22.8.96/2708 gaddr 10.22.8.30/0 laddr 10.22.8.30/0
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168388 for DMZ-Inside:10.22.8.10/49771 to Inside-Trunk:10.22.8.128/443 duration 0:00:00 bytes 19132 TCP Reset-O
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488168692 for DMZ-Inside:10.22.8.53/61694 to Inside-Trunk:10.22.8.174/40004 duration 0:00:00 bytes 5660 TCP FINs
+<174>Jan  5 14:52:36 10.22.8.212 %ASA-6-302013: Built inbound TCP connection 76245552 for outside:10.22.8.92/51042 (10.22.8.92/51042) to inside:10.22.8.193/9100 (10.22.8.193/9100) (user.name)
+<166>Jan  5 16:52:36 10.22.8.41 %ASA-6-302016: Teardown UDP connection 45474680 for Outside:10.22.8.49/137(LOCAL\user.name) to Inside:10.22.8.12/137 duration 0:02:03 bytes 486 (user.name)
+<166>Jan  5 16:52:36 10.22.8.41 %ASA-6-302016: Teardown UDP connection 45474694 for Outside:10.22.8.49/138(LOCAL\user.name) to Inside:10.22.8.12/138 duration 0:02:01 bytes 184 (user.name)
+<142>Jan  5 08:52:36 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488167720 for Outside_VPN:198.111.72.75/1033 to DMZ-Inside:10.22.8.53/443 duration 0:00:01 bytes 9634 TCP FINs
+<142>Jan  5 08:52:32 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488165627 for Outside_VPN:170.111.72.22/27463 to DMZ-Inside:10.22.8.53/443 duration 0:00:01 bytes 9756 TCP FINs
+<166>Jan  5 08:52:32 10.22.8.216 %ASA-6-302016: Teardown UDP connection 212805854 for outside:10.22.8.62/54704(LOCAL\user.name) to inside:10.22.8.85/53 duration 0:00:00 bytes 114 (user.name)
+<166>Jan  5 09:52:32 10.22.8.12 %ASA-6-302020: Built inbound ICMP connection for faddr 207.111.72.122/0 gaddr 206.111.72.24/512 laddr 10.22.8.57/512
+<166>Jan  5 09:52:32 10.22.8.12 %ASA-6-302013: Built outbound TCP connection 17605397 for outside:69.111.72.0/80 (69.111.72.0/80) to inside:10.22.8.102/55659 (206.111.72.41/40627)
+<174>Jan  5 14:52:32 10.22.8.212 %ASA-6-302015: Built inbound UDP connection 76245230 for outside:10.22.8.96/123 (10.22.8.96/123) to inside:10.22.8.12/123 (10.22.8.12/123) (user.name)
+<142>Jan  5 08:52:32 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488031413 for Outside_VPN:184.111.72.216/50341 to DMZ-Inside:10.22.8.57/443 duration 0:05:01 bytes 13543 TCP Reset-O
+<166>Jan  5 16:52:32 10.22.8.41 %ASA-6-302020: Built inbound ICMP connection for faddr 10.22.8.95/1(LOCAL\user.name) gaddr 10.22.8.12/0 laddr 10.22.8.12/0 (user.name)
+<142>Jan  5 08:52:32 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488030393 for DMZ-Inside:[10.22.8.10/57109 to Inside-Trunk:10.22.8.128/443 duration 0:05:04 bytes 13541 TCP Reset-O
+<166>Jan  5 09:52:32 10.22.8.12 %ASA-6-305012: Teardown dynamic TCP translation from inside:10.22.8.149/62156 to outside:206.111.72.41/19576 duration 0:00:44
+<166>Jan  5 09:52:32 10.22.8.12 %ASA-6-305012: Teardown dynamic TCP translation from inside:10.22.8.149/62159 to outside:206.111.72.41/39634 duration 0:00:44
+<142>Jan  5 08:52:32 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488031793 for Outside_VPN:198.111.72.146/28026 to DMZ-Inside:10.22.8.53/443 duration 0:05:00 bytes 119 TCP FINs
+<142>Jan  5 08:52:32 10.22.8.201 %ASA-6-302014: Teardown TCP connection 488030810 for DMZ-Inside:10.22.8.10/56930 to Inside-Trunk:10.22.8.128/443 duration 0:05:03 bytes 13543 TCP Reset-O
+<142>Jan  5 08:52:32 10.22.8.201 %ASA-6-106015: Deny TCP (no connection) from 186.111.72.11/80 to 204.111.72.199/61438 flags SYN ACK  on interface Outside_VPN
+<166>Jan  5 08:52:32 10.22.8.216 %ASA-6-302013: Built inbound TCP connection 212805863 for outside:10.22.8.144/61999 (10.22.8.144/61999)(LOCAL\user.name) to inside:10.22.8.163/80 (10.22.8.163/80) (user.name)
+<167>Jan  5 08:52:32 10.22.8.216 %ASA-7-609002: Teardown local-host inside:10.22.8.205 duration 0:00:00
\ No newline at end of file


[13/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/fireeye/BasicFireEyeParserTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/fireeye/BasicFireEyeParserTest.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/fireeye/BasicFireEyeParserTest.java
new file mode 100644
index 0000000..129619d
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/fireeye/BasicFireEyeParserTest.java
@@ -0,0 +1,161 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.fireeye;
+
+
+
+import java.util.Iterator;
+import java.util.Map;
+
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+import org.apache.metron.parsers.AbstractConfigTest;
+import org.junit.Assert;
+
+/**
+ * <ul>
+ * <li>Title: Test For SourceFireParser</li>
+ * <li>Description: </li>
+ * <li>Created: July 8, 2014</li>
+ * </ul>
+ * @version $Revision: 1.0 $
+ */
+public class BasicFireEyeParserTest extends AbstractConfigTest
+{
+   /**
+    * The inputStrings.
+    */
+    private static String[] inputStrings;
+ 
+   /**
+    * The parser.
+    */
+    private BasicFireEyeParser parser=null;
+
+	
+   /**
+    * Constructs a new <code>BasicFireEyeParserTest</code> instance.
+    * @throws Exception
+    */ 
+    public BasicFireEyeParserTest() throws Exception {
+        super();
+    }
+
+
+	/**
+	 * @throws java.lang.Exception
+	 */
+	public static void setUpBeforeClass() throws Exception {
+	}
+
+	/**
+	 * @throws java.lang.Exception
+	 */
+	public static void tearDownAfterClass() throws Exception {
+	}
+
+	/**
+	 * @throws java.lang.Exception
+	 */
+	public void setUp() throws Exception {
+        super.setUp("org.apache.metron.parsers.fireeye.BasicFireEyeParserTest");
+        setInputStrings(super.readTestDataFromFile(this.getConfig().getString("logFile")));
+        parser = new BasicFireEyeParser();  
+	}
+
+	/**
+	 * 	
+	 * 	
+	 * @throws java.lang.Exception
+	 */
+	public void tearDown() throws Exception {
+		parser = null;
+        setInputStrings(null);		
+	}
+
+	/**
+	 * Test method for
+	 *
+	 *
+	 *
+	 *
+	 *
+	 * {@link BasicFireEyeParser#parse(byte[])}.
+	 */
+	@SuppressWarnings({ "rawtypes"})
+	public void testParse() {
+		for (String inputString : getInputStrings()) {
+			JSONObject parsed = parser.parse(inputString.getBytes()).get(0);
+			Assert.assertNotNull(parsed);
+		
+			JSONParser parser = new JSONParser();
+
+			Map json=null;
+			try {
+				json = (Map) parser.parse(parsed.toJSONString());
+			} catch (ParseException e) {
+				e.printStackTrace();
+			}
+			Iterator iter = json.entrySet().iterator();
+			
+			Assert.assertNotNull(json);
+			Assert.assertFalse(json.isEmpty());
+			
+
+			while (iter.hasNext()) {
+				Map.Entry entry = (Map.Entry) iter.next();
+				String key = (String) entry.getKey();
+				String value = (String) json.get(key).toString();
+				Assert.assertNotNull(value);
+			}
+		}
+	}
+
+	/**
+	 * Returns Input String
+	 */
+	public static String[] getInputStrings() {
+		return inputStrings;
+	}
+		
+	/**
+	 * Sets SourceFire Input String
+	 */	
+	public static void setInputStrings(String[] strings) {
+		BasicFireEyeParserTest.inputStrings = strings;
+	}
+	
+    /**
+     * Returns the parser.
+     * @return the parser.
+     */
+    public BasicFireEyeParser getParser() {
+        return parser;
+    }
+
+    /**
+     * Sets the parser.
+     * @param parser the parser.
+     */
+     public void setParser(BasicFireEyeParser parser) {
+    
+        this.parser = parser;
+     }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/integration/ParserIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/integration/ParserIntegrationTest.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/integration/ParserIntegrationTest.java
new file mode 100644
index 0000000..a21ebf8
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/integration/ParserIntegrationTest.java
@@ -0,0 +1,131 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.integration;
+
+import org.apache.metron.common.Constants;
+import org.apache.metron.integration.BaseIntegrationTest;
+import org.apache.metron.integration.utils.TestUtils;
+import org.apache.metron.test.utils.UnitTestHelper;
+import org.apache.metron.integration.ComponentRunner;
+import org.apache.metron.integration.Processor;
+import org.apache.metron.integration.ReadinessState;
+import org.apache.metron.integration.components.FluxTopologyComponent;
+import org.apache.metron.integration.components.KafkaWithZKComponent;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+public abstract class ParserIntegrationTest extends BaseIntegrationTest {
+
+  public abstract String getFluxPath();
+  public abstract String getSampleInputPath();
+  public abstract String getSampleParsedPath();
+  public abstract String getSensorType();
+  public abstract String getFluxTopicProperty();
+
+  @Test
+  public void test() throws Exception {
+
+    final String kafkaTopic = getSensorType();
+
+    final List<byte[]> inputMessages = TestUtils.readSampleData(getSampleInputPath());
+
+    final Properties topologyProperties = new Properties() {{
+      setProperty(getFluxTopicProperty(), kafkaTopic);
+    }};
+    final KafkaWithZKComponent kafkaComponent = getKafkaComponent(topologyProperties, new ArrayList<KafkaWithZKComponent.Topic>() {{
+      add(new KafkaWithZKComponent.Topic(kafkaTopic, 1));
+    }});
+
+    topologyProperties.setProperty("kafka.broker", kafkaComponent.getBrokerList());
+    FluxTopologyComponent fluxComponent = new FluxTopologyComponent.Builder()
+            .withTopologyLocation(new File(getFluxPath()))
+            .withTopologyName("test")
+            .withTopologyProperties(topologyProperties)
+            .build();
+
+    UnitTestHelper.verboseLogging();
+    ComponentRunner runner = new ComponentRunner.Builder()
+            .withComponent("kafka", kafkaComponent)
+            .withComponent("storm", fluxComponent)
+            .withMillisecondsBetweenAttempts(5000)
+            .withNumRetries(10)
+            .build();
+    runner.start();
+    fluxComponent.submitTopology();
+    kafkaComponent.writeMessages(kafkaTopic, inputMessages);
+    List<byte[]> outputMessages =
+            runner.process(new Processor<List<byte[]>>() {
+              List<byte[]> messages = null;
+
+              public ReadinessState process(ComponentRunner runner) {
+                KafkaWithZKComponent kafkaWithZKComponent = runner.getComponent("kafka", KafkaWithZKComponent.class);
+                List<byte[]> outputMessages = kafkaWithZKComponent.readMessages(Constants.ENRICHMENT_TOPIC);
+                if (outputMessages.size() == inputMessages.size()) {
+                  messages = outputMessages;
+                  return ReadinessState.READY;
+                } else {
+                  return ReadinessState.NOT_READY;
+                }
+              }
+
+              public List<byte[]> getResult() {
+                return messages;
+              }
+            });
+    List<byte[]> sampleParsedMessages = TestUtils.readSampleData(getSampleParsedPath());
+    Assert.assertEquals(sampleParsedMessages.size(), outputMessages.size());
+    for (int i = 0; i < outputMessages.size(); i++) {
+      String sampleParsedMessage = new String(sampleParsedMessages.get(i));
+      String outputMessage = new String(outputMessages.get(i));
+      assertJSONEqual(sampleParsedMessage, outputMessage);
+    }
+    runner.stop();
+
+  }
+
+  public static void assertJSONEqual(String doc1, String doc2) throws IOException {
+    ObjectMapper mapper = new ObjectMapper();
+    Map m1 = mapper.readValue(doc1, Map.class);
+    Map m2 = mapper.readValue(doc2, Map.class);
+    Assert.assertEquals(m1.size(), m2.size());
+    for(Object k : m1.keySet()) {
+      Object v1 = m1.get(k);
+      Object v2 = m2.get(k);
+
+      if(v2 == null) {
+        Assert.fail("Unable to find key: " + k + " in output");
+      }
+      if(k.equals("timestamp")) {
+        //TODO: Take the ?!?@ timestamps out of the reference file.
+        Assert.assertEquals(v1.toString().length(), v2.toString().length());
+      }
+      else if(!v2.equals(v1)) {
+        Assert.assertEquals("value mismatch for " + k ,v1, v2);
+      }
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/integration/PcapParserIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/integration/PcapParserIntegrationTest.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/integration/PcapParserIntegrationTest.java
new file mode 100644
index 0000000..0dac76b
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/integration/PcapParserIntegrationTest.java
@@ -0,0 +1,214 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.integration;
+
+import com.google.common.collect.Iterables;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.SequenceFile.Reader;
+import org.apache.metron.TestConstants;
+import org.apache.metron.hbase.TableProvider;
+import org.apache.metron.integration.BaseIntegrationTest;
+import org.apache.metron.test.utils.UnitTestHelper;
+import org.apache.metron.integration.ComponentRunner;
+import org.apache.metron.integration.Processor;
+import org.apache.metron.integration.ReadinessState;
+import org.apache.metron.integration.components.FluxTopologyComponent;
+import org.apache.metron.integration.components.KafkaWithZKComponent;
+import org.apache.metron.test.mock.MockHTable;
+import org.apache.metron.parsers.pcap.PcapParser;
+import org.apache.metron.pcap.utils.PcapUtils;
+import org.json.simple.JSONObject;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+public class PcapParserIntegrationTest extends BaseIntegrationTest {
+
+  private static String BASE_DIR = "pcap";
+  private static String DATA_DIR = BASE_DIR + "/data_dir";
+  private static String QUERY_DIR = BASE_DIR + "/query";
+  private String topologiesDir = "./src/main/flux";
+  private String targetDir = "target";
+
+  public static class Provider implements TableProvider, Serializable {
+    MockHTable.Provider provider = new MockHTable.Provider();
+
+    @Override
+    public HTableInterface getTable(Configuration config, String tableName) throws IOException {
+      return provider.getTable(config, tableName);
+    }
+  }
+
+  private File getOutDir(String targetDir) {
+    File outDir = new File(new File(targetDir), DATA_DIR);
+    if (!outDir.exists()) {
+      outDir.mkdirs();
+    }
+
+    return outDir;
+  }
+
+  private File getQueryDir(String targetDir) {
+    File outDir = new File(new File(targetDir), QUERY_DIR);
+    if (!outDir.exists()) {
+      outDir.mkdirs();
+    }
+    return outDir;
+  }
+
+  private static void clearOutDir(File outDir) {
+    for (File f : outDir.listFiles()) {
+      f.delete();
+    }
+  }
+
+  private static Map<String, byte[]> readPcaps(Path pcapFile) throws IOException {
+    SequenceFile.Reader reader = new SequenceFile.Reader(new Configuration(),
+            Reader.file(pcapFile)
+    );
+    Map<String, byte[]> ret = new HashMap<>();
+    IntWritable key = new IntWritable();
+    BytesWritable value = new BytesWritable();
+    PcapParser parser = new PcapParser();
+    parser.init();
+    while (reader.next(key, value)) {
+      int keyInt = key.get();
+      byte[] valueBytes = value.copyBytes();
+      JSONObject message = parser.parse(valueBytes).get(0);
+      if (parser.validate(message)) {
+        ret.put(PcapUtils.getSessionKey(message), valueBytes);
+      }
+    }
+    return ret;
+  }
+
+  @Test
+  public void testTopology() throws Exception {
+    if (!new File(topologiesDir).exists()) {
+      topologiesDir = UnitTestHelper.findDir("topologies");
+    }
+    targetDir = UnitTestHelper.findDir("target");
+    final String kafkaTopic = "pcap";
+    final String tableName = "pcap";
+    final String columnFamily = "t";
+    final String columnIdentifier = "value";
+    final File outDir = getOutDir(targetDir);
+    final File queryDir = getQueryDir(targetDir);
+    clearOutDir(outDir);
+    clearOutDir(queryDir);
+
+    File baseDir = new File(new File(targetDir), BASE_DIR);
+    Assert.assertNotNull(topologiesDir);
+    Assert.assertNotNull(targetDir);
+    Path pcapFile = new Path(TestConstants.SAMPLE_DATA_INPUT_PATH + "PCAPExampleOutput");
+    final Map<String, byte[]> pcapEntries = readPcaps(pcapFile);
+    Assert.assertTrue(Iterables.size(pcapEntries.keySet()) > 0);
+    final Properties topologyProperties = new Properties() {{
+      setProperty("hbase.provider.impl", "" + Provider.class.getName());
+      setProperty("spout.kafka.topic.pcap", kafkaTopic);
+      setProperty("bolt.hbase.table.name", tableName);
+      setProperty("bolt.hbase.table.fields", columnFamily + ":" + columnIdentifier);
+    }};
+    final KafkaWithZKComponent kafkaComponent = getKafkaComponent(topologyProperties, new ArrayList<KafkaWithZKComponent.Topic>() {{
+      add(new KafkaWithZKComponent.Topic(kafkaTopic, 1));
+    }});
+
+    FluxTopologyComponent fluxComponent = new FluxTopologyComponent.Builder()
+            .withTopologyLocation(new File(topologiesDir + "/pcap/test.yaml"))
+            .withTopologyName("pcap")
+            .withTopologyProperties(topologyProperties)
+            .build();
+
+    final MockHTable pcapTable = (MockHTable) MockHTable.Provider.addToCache(tableName, columnFamily);
+
+    UnitTestHelper.verboseLogging();
+    ComponentRunner runner = new ComponentRunner.Builder()
+            .withComponent("kafka", kafkaComponent)
+            .withComponent("storm", fluxComponent)
+            .withMaxTimeMS(60000)
+            .withMillisecondsBetweenAttempts(6000)
+            .withNumRetries(10)
+            .build();
+    try {
+      runner.start();
+      fluxComponent.submitTopology();
+      kafkaComponent.writeMessages(kafkaTopic, pcapEntries.values());
+      System.out.println("Sent pcap data: " + pcapEntries.size());
+      List<byte[]> messages = kafkaComponent.readMessages(kafkaTopic);
+      Assert.assertEquals(pcapEntries.size(), messages.size());
+      System.out.println("Wrote " + pcapEntries.size() + " to kafka");
+      runner.process(new Processor<Void>() {
+        @Override
+        public ReadinessState process(ComponentRunner runner) {
+          int hbaseCount = 0;
+          try {
+            System.out.println("Waiting...");
+            ResultScanner resultScanner = pcapTable.getScanner(columnFamily.getBytes(), columnIdentifier.getBytes());
+            while (resultScanner.next() != null) hbaseCount++;
+          } catch (IOException e) {
+            e.printStackTrace();
+          }
+          if (hbaseCount == pcapEntries.size()) {
+            return ReadinessState.READY;
+          } else {
+            return ReadinessState.NOT_READY;
+          }
+        }
+
+        @Override
+        public Void getResult() {
+          return null;
+        }
+      });
+      ResultScanner resultScanner = pcapTable.getScanner(columnFamily.getBytes(), columnIdentifier.getBytes());
+      Result result;
+      int rowCount = 0;
+      while ((result = resultScanner.next()) != null) {
+        String rowKey = new String(result.getRow());
+        byte[] hbaseValue = result.getValue(columnFamily.getBytes(), columnIdentifier.getBytes());
+        byte[] originalValue = pcapEntries.get(rowKey);
+        Assert.assertNotNull("Could not find pcap with key " + rowKey + " in sample data", originalValue);
+        Assert.assertArrayEquals("Raw values are different for key " + rowKey, originalValue, hbaseValue);
+        rowCount++;
+      }
+      Assert.assertEquals(pcapEntries.size(), rowCount);
+      System.out.println("Ended");
+    } catch (Exception e ) {
+      e.printStackTrace();
+    } finally {
+      runner.stop();
+      clearOutDir(outDir);
+      clearOutDir(queryDir);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/integration/SnortIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/integration/SnortIntegrationTest.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/integration/SnortIntegrationTest.java
new file mode 100644
index 0000000..983f7e3
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/integration/SnortIntegrationTest.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.integration;
+
+import org.apache.metron.TestConstants;
+
+public class SnortIntegrationTest extends ParserIntegrationTest {
+
+  @Override
+  public String getFluxPath() {
+    return "./src/main/flux/snort/test.yaml";
+  }
+
+  @Override
+  public String getSampleInputPath() {
+    return TestConstants.SAMPLE_DATA_INPUT_PATH + "SnortOutput";
+  }
+
+  @Override
+  public String getSampleParsedPath() {
+    return TestConstants.SAMPLE_DATA_PARSED_PATH + "SnortParsed";
+  }
+
+  @Override
+  public String getSensorType() {
+    return "snort";
+  }
+
+  @Override
+  public String getFluxTopicProperty() {
+    return "spout.kafka.topic.snort";
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/integration/YafIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/integration/YafIntegrationTest.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/integration/YafIntegrationTest.java
new file mode 100644
index 0000000..67fe2d6
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/integration/YafIntegrationTest.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.integration;
+
+import org.apache.metron.TestConstants;
+
+public class YafIntegrationTest extends ParserIntegrationTest {
+
+  @Override
+  public String getFluxPath() {
+    return "./src/main/flux/yaf/test.yaml";
+  }
+
+  @Override
+  public String getSampleInputPath() {
+    return TestConstants.SAMPLE_DATA_INPUT_PATH + "YafExampleOutput";
+  }
+
+  @Override
+  public String getSampleParsedPath() {
+    return TestConstants.SAMPLE_DATA_PARSED_PATH + "YafExampleParsed";
+  }
+
+  @Override
+  public String getSensorType() {
+    return "yaf";
+  }
+
+  @Override
+  public String getFluxTopicProperty() {
+    return "spout.kafka.topic.yaf";
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/ise/BasicIseParserTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/ise/BasicIseParserTest.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/ise/BasicIseParserTest.java
new file mode 100644
index 0000000..751e414
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/ise/BasicIseParserTest.java
@@ -0,0 +1,170 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.ise;
+
+import java.io.IOException;
+import java.net.URL;
+import java.util.Map;
+
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+import org.apache.metron.parsers.AbstractSchemaTest;
+import org.junit.Assert;
+
+
+/**
+ * <ul>
+ * <li>Title: Basic ISE Parser</li>
+ * <li>Description: Junit Test Case for BasicISE Parser</li>
+ * <li>Created: AUG 25, 2014</li>
+ * </ul>
+ * 
+ * @version $Revision: 1.1 $
+ */
+
+public class BasicIseParserTest extends AbstractSchemaTest {
+    /**
+     * The inputStrings.
+     */
+     private static String[] inputStrings;   
+
+	 /**
+	 * The parser.
+	 */
+	private static BasicIseParser parser = null;
+
+
+	/**
+	 * Constructs a new <code>BasicIseParserTest</code> instance.
+	 * 
+	 * @param name
+	 */
+
+	public BasicIseParserTest(String name) {
+		super(name);
+	}
+
+	/**
+	 * 
+	 * @throws java.lang.Exception
+	 */
+	protected static void setUpBeforeClass() throws Exception {
+	}
+
+	/**
+	 * 
+	 * @throws java.lang.Exception
+	 */
+	protected static void tearDownAfterClass() throws Exception {
+	}
+
+	/*
+	 * (non-Javadoc)
+	 * 
+	 * @see junit.framework.TestCase#setUp()
+	 */
+
+	protected void setUp() throws Exception {
+        super.setUp("org.apache.metron.parsers.lancope.BasicLancopeParserTest");
+        setInputStrings(super.readTestDataFromFile(this.getConfig().getString("logFile")));
+        BasicIseParserTest.setIseParser(new BasicIseParser());
+		
+		URL schema_url = getClass().getClassLoader().getResource(
+				"TestSchemas/IseSchema.json");
+		 super.setSchemaJsonString(super.readSchemaFromFile(schema_url));
+	}
+
+	/*
+	 * (non-Javadoc)
+	 * 
+	 * @see junit.framework.TestCase#tearDown()
+	 */
+
+	protected void tearDown() throws Exception {
+		super.tearDown();
+	}
+
+	/**
+	 * Test method for
+	 * {@link BasicIseParser#parse(byte[])}.
+	 * 
+	 * @throws IOException
+	 * @throws Exception
+	 */
+	public void testParse() throws ParseException, IOException, Exception {
+        for (String inputString : getInputStrings()) {
+            JSONObject parsed = parser.parse(inputString.getBytes()).get(0);
+            assertNotNull(parsed);
+        
+            System.out.println(parsed);
+            JSONParser parser = new JSONParser();
+
+            Map<?, ?> json=null;
+            try {
+                json = (Map<?, ?>) parser.parse(parsed.toJSONString());
+                Assert.assertEquals(true, validateJsonData(super.getSchemaJsonString(), json.toString()));
+            } catch (ParseException e) {
+                e.printStackTrace();
+            }
+        }
+	}
+
+	/**
+	 * Returns the iseParser.
+	 * 
+	 * @return the iseParser.
+	 */
+
+	public BasicIseParser getIseParser() {
+		return parser;
+	}
+
+	/**
+	 * Sets the iseParser.
+	 * 
+	 * @param parser
+	 */
+
+
+	public static void setIseParser(BasicIseParser parser) {
+
+		BasicIseParserTest.parser = parser;
+	}
+   /**
+    * Returns the inputStrings.
+    * @return the inputStrings.
+    */
+   
+   public static String[] getInputStrings() {
+       return inputStrings;
+   }
+
+   /**
+    * Sets the inputStrings.
+    * @param inputStrings the inputStrings.
+    */
+   
+   public static void setInputStrings(String[] inputStrings) {
+       BasicIseParserTest.inputStrings = inputStrings;
+   }   
+
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/lancope/BasicLancopeParserTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/lancope/BasicLancopeParserTest.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/lancope/BasicLancopeParserTest.java
new file mode 100644
index 0000000..4d8a2d0
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/lancope/BasicLancopeParserTest.java
@@ -0,0 +1,161 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.lancope;
+
+import java.io.IOException;
+import java.net.URL;
+import java.util.Map;
+
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+import org.apache.metron.parsers.AbstractSchemaTest;
+import org.junit.Assert;
+
+  /**
+ * <ul>
+ * <li>Title: Junit for LancopeParserTest</li>
+ * <li>Description: </li>
+ * <li>Created: Aug 25, 2014</li>
+ * </ul>
+ * @version $Revision: 1.1 $
+ */
+public class BasicLancopeParserTest extends AbstractSchemaTest {
+    
+    /**
+     * The inputStrings.
+     */
+     private static String[] inputStrings;    
+
+
+    /**
+     * The parser.
+     */
+    private static BasicLancopeParser parser=null;   
+
+    /**
+     * Constructs a new <code>BasicLancopeParserTest</code> instance.
+     * @param name
+     */
+
+    public BasicLancopeParserTest(String name) {
+        super(name);
+    }
+
+    /**
+     
+     * @throws java.lang.Exception
+     */
+    protected static void setUpBeforeClass() throws Exception {        
+    }
+
+    /**
+     
+     * @throws java.lang.Exception
+     */
+    protected static void tearDownAfterClass() throws Exception {
+    }
+
+    /* 
+     * (non-Javadoc)
+     * @see junit.framework.TestCase#setUp()
+     */
+
+    protected void setUp() throws Exception {
+        super.setUp("org.apache.metron.parsers.lancope.BasicLancopeParserTest");
+        setInputStrings(super.readTestDataFromFile(this.getConfig().getString("logFile")));
+        BasicLancopeParserTest.setParser(new BasicLancopeParser());   
+        
+        URL schema_url = getClass().getClassLoader().getResource(
+            "TestSchemas/LancopeSchema.json");
+        super.setSchemaJsonString(super.readSchemaFromFile(schema_url));      
+    }
+
+    /* 
+     * (non-Javadoc)
+     * @see junit.framework.TestCase#tearDown()
+     */
+
+    protected void tearDown() throws Exception {
+        super.tearDown();
+    }
+
+    /**
+     * Test method for {@link BasicLancopeParser#parse(byte[])}.
+     * @throws Exception 
+     * @throws IOException 
+     */
+    public void testParse() throws IOException, Exception {
+        
+        for (String inputString : getInputStrings()) {
+            JSONObject parsed = parser.parse(inputString.getBytes()).get(0);
+            assertNotNull(parsed);
+        
+            System.out.println(parsed);
+            JSONParser parser = new JSONParser();
+
+            Map<?, ?> json=null;
+            try {
+                json = (Map<?, ?>) parser.parse(parsed.toJSONString());
+                Assert.assertEquals(true, validateJsonData(super.getSchemaJsonString(), json.toString()));
+            } catch (ParseException e) {
+                e.printStackTrace();
+            }
+        }
+    }
+
+    /**
+    * Returns the parser.
+    * @return the parser.
+    */
+   
+   public static BasicLancopeParser getParser() {
+       return parser;
+   }
+
+   /**
+    * Sets the parser.
+    * @param parser the parser.
+    */
+   
+   public static void setParser(BasicLancopeParser parser) {
+   
+       BasicLancopeParserTest.parser = parser;
+   }
+
+   /**
+    * Returns the inputStrings.
+    * @return the inputStrings.
+    */
+   
+   public static String[] getInputStrings() {
+       return inputStrings;
+   }
+
+   /**
+    * Sets the inputStrings.
+    * @param inputStrings the inputStrings.
+    */
+   
+   public static void setInputStrings(String[] inputStrings) {
+   
+       BasicLancopeParserTest.inputStrings = inputStrings;
+   }   
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/paloalto/BasicPaloAltoFirewallParserTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/paloalto/BasicPaloAltoFirewallParserTest.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/paloalto/BasicPaloAltoFirewallParserTest.java
new file mode 100644
index 0000000..0c0947b
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/paloalto/BasicPaloAltoFirewallParserTest.java
@@ -0,0 +1,155 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.paloalto;
+
+import java.util.Iterator;
+import java.util.Map;
+
+import org.apache.metron.parsers.sourcefire.BasicSourcefireParser;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+import org.apache.metron.parsers.AbstractConfigTest;
+import org.junit.Assert;
+
+public class BasicPaloAltoFirewallParserTest extends AbstractConfigTest {
+    /**
+    * The inputStrings.
+    */
+   private static String[] inputStrings;
+
+    /**
+     * Constructs a new <code>BasicPaloAltoFirewallParserTest</code> instance.
+     * @throws Exception
+     */ 
+    public BasicPaloAltoFirewallParserTest() throws Exception {
+        super();        
+    }
+
+     /**
+     * Sets the inputStrings.
+     * @param inputStrings the inputStrings.
+     */
+        
+    public static void setInputStrings(String[] inputStrings) {
+    
+        BasicPaloAltoFirewallParserTest.inputStrings = inputStrings;
+    }
+
+     /**
+     * The paParser.
+     */
+    private BasicPaloAltoFirewallParser paParser=null;
+
+		/**
+		 * @throws java.lang.Exception
+		 */
+		public static void setUpBeforeClass() throws Exception {
+		}
+
+		/**
+		 * @throws java.lang.Exception
+		 */
+		public static void tearDownAfterClass() throws Exception {
+			setPAStrings(null);
+		}
+
+		/**
+		 * @throws java.lang.Exception
+		 */
+		public void setUp() throws Exception {
+	          super.setUp("org.apache.metron.parsers.paloalto.BasicPaloAltoFirewallParserTest");
+	          setPAStrings(super.readTestDataFromFile(this.getConfig().getString("logFile")));
+	          paParser = new BasicPaloAltoFirewallParser();           
+		}
+
+		/**
+		 * 	
+		 * 	
+		 * @throws java.lang.Exception
+		 */
+		public void tearDown() throws Exception {
+			paParser = null;
+		}
+
+		/**
+		 * Test method for
+		 * {@link BasicSourcefireParser#parse(byte[])}.
+		 */
+		@SuppressWarnings({ "rawtypes" })
+		public void testParse() {
+			for (String inputString : getInputStrings()) {
+				JSONObject parsed = paParser.parse(inputString.getBytes()).get(0);
+				Assert.assertNotNull(parsed);
+			
+				System.out.println(parsed);
+				JSONParser parser = new JSONParser();
+
+				Map json=null;
+				try {
+					json = (Map) parser.parse(parsed.toJSONString());
+				} catch (ParseException e) {
+					e.printStackTrace();
+				}
+				Iterator iter = json.entrySet().iterator();
+				
+
+				while (iter.hasNext()) {
+					Map.Entry entry = (Map.Entry) iter.next();
+					String key = (String) entry.getKey();
+					String value = (String) json.get(key).toString();
+					Assert.assertNotNull(value);
+				}
+			}
+		}
+
+		/**
+		 * Returns  Input String
+		 */
+		public static String[] getInputStrings() {
+			return inputStrings;
+		}
+
+			
+		/**
+		 * Sets  Input String
+		 */	
+		public static void setPAStrings(String[] strings) {
+			BasicPaloAltoFirewallParserTest.inputStrings = strings;
+		}
+        
+        /**
+         * Returns the paParser.
+         * @return the paParser.
+         */
+        public BasicPaloAltoFirewallParser getPaParser() {
+            return paParser;
+        }
+
+        /**
+         * Sets the paParser.
+         * @param paParser the paParser.
+         */
+        
+        public void setPaParser(BasicPaloAltoFirewallParser paParser) {
+        
+            this.paParser = paParser;
+        }
+
+	}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/sourcefire/BasicSourcefireParserTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/sourcefire/BasicSourcefireParserTest.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/sourcefire/BasicSourcefireParserTest.java
new file mode 100644
index 0000000..2ce035c
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/sourcefire/BasicSourcefireParserTest.java
@@ -0,0 +1,156 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.sourcefire;
+
+
+
+import java.util.Iterator;
+import java.util.Map;
+
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+import org.apache.metron.parsers.AbstractConfigTest;
+import org.junit.Assert;
+
+/**
+ * <ul>
+ * <li>Title: Test For SourceFireParser</li>
+ * <li>Description: </li>
+ * <li>Created: July 8, 2014</li>
+ * </ul>
+ * @version $Revision: 1.0 $
+ */
+public class BasicSourcefireParserTest extends AbstractConfigTest
+{
+     /**
+     * The sourceFireStrings.
+     */    
+    private static String[] sourceFireStrings;
+    
+     /**
+     * The sourceFireParser.
+     */
+    private BasicSourcefireParser sourceFireParser=null;
+
+
+    /**
+     * Constructs a new <code>BasicSourcefireParserTest</code> instance.
+     * @throws Exception
+     */
+     
+    public BasicSourcefireParserTest() throws Exception {
+        super();  
+    }
+    
+	/**
+	 * @throws java.lang.Exception
+	 */
+	public static void setUpBeforeClass() throws Exception {
+	}
+
+	/**
+	 * @throws java.lang.Exception
+	 */
+	public static void tearDownAfterClass() throws Exception {
+		setSourceFireStrings(null);
+	}
+
+	/**
+	 * @throws java.lang.Exception
+	 */
+	public void setUp() throws Exception {
+        super.setUp("org.apache.metron.parsing.test.BasicSoureceFireParserTest");
+        setSourceFireStrings(super.readTestDataFromFile(this.getConfig().getString("logFile")));
+        sourceFireParser = new BasicSourcefireParser();
+	}
+
+	/**
+	 * 	
+	 * 	
+	 * @throws java.lang.Exception
+	 */
+	public void tearDown() throws Exception {
+		sourceFireParser = null;
+	}
+
+	/**
+	 * Test method for {@link BasicSourcefireParser#parse(byte[])}.
+	 */
+	@SuppressWarnings({ "rawtypes", "unused" })
+	public void testParse() {
+		for (String sourceFireString : getSourceFireStrings()) {
+		    byte[] srcBytes = sourceFireString.getBytes();
+			JSONObject parsed = sourceFireParser.parse(sourceFireString.getBytes()).get(0);
+			Assert.assertNotNull(parsed);
+		
+			System.out.println(parsed);
+			JSONParser parser = new JSONParser();
+
+			Map json=null;
+			try {
+				json = (Map) parser.parse(parsed.toJSONString());
+			} catch (ParseException e) {
+				e.printStackTrace();
+			}
+			Iterator iter = json.entrySet().iterator();
+			
+
+			while (iter.hasNext()) {
+				Map.Entry entry = (Map.Entry) iter.next();
+				String key = (String) entry.getKey();
+				String value = (String) json.get("original_string").toString();
+				Assert.assertNotNull(value);
+			}
+		}
+	}
+
+	/**
+	 * Returns SourceFire Input String
+	 */
+	public static String[] getSourceFireStrings() {
+		return sourceFireStrings;
+	}
+
+		
+	/**
+	 * Sets SourceFire Input String
+	 */	
+	public static void setSourceFireStrings(String[] strings) {
+		BasicSourcefireParserTest.sourceFireStrings = strings;
+	}
+    /**
+    * Returns the sourceFireParser.
+    * @return the sourceFireParser.
+    */
+   
+   public BasicSourcefireParser getSourceFireParser() {
+       return sourceFireParser;
+   }
+
+   /**
+    * Sets the sourceFireParser.
+    * @param sourceFireParser the sourceFireParser.
+    */
+   
+   public void setSourceFireParser(BasicSourcefireParser sourceFireParser) {
+   
+       this.sourceFireParser = sourceFireParser;
+   }	
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/resources/TestSchemas/BroSchema.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/resources/TestSchemas/BroSchema.json b/metron-platform/metron-parsers/src/test/resources/TestSchemas/BroSchema.json
new file mode 100644
index 0000000..0105c19
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/resources/TestSchemas/BroSchema.json
@@ -0,0 +1,28 @@
+{
+	"title": "Bro Schema",
+	"type": "object",
+	"properties": {
+		"status_code": {
+			"type": "integer"
+		},
+		"uid": {
+			"type": "string"
+		},
+		"protocol": {
+			"type": "string"
+		},
+		"ip_dst_addr": {
+			"type": "string"
+		},
+		"host": {
+			"type": "string"
+		},		
+		"request_body_len": {
+			"type": "integer"
+		},
+		"response_body_len": {
+			"type": "integer"
+		}	
+	},
+	"required": ["status_code", "uid", "protocol","ip_dst_addr","host","request_body_len","response_body_len"]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/resources/TestSchemas/IseSchema.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/resources/TestSchemas/IseSchema.json b/metron-platform/metron-parsers/src/test/resources/TestSchemas/IseSchema.json
new file mode 100644
index 0000000..1c3f670
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/resources/TestSchemas/IseSchema.json
@@ -0,0 +1,21 @@
+{
+	"title": "Pcap Schema",
+	"type": "object",
+	"properties": {
+		"ip_src_addr": {
+			"type": "string"
+		},
+		"ip_dst_addr": {
+			"type": "string"
+		},
+		"ip_src_port": {
+			"type": "string"
+		},
+		"ip_dst_port": {
+			"type": "string"
+		},
+		"protocol": {
+			"type": "string"
+		}
+	}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/resources/TestSchemas/LancopeSchema.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/resources/TestSchemas/LancopeSchema.json b/metron-platform/metron-parsers/src/test/resources/TestSchemas/LancopeSchema.json
new file mode 100644
index 0000000..9118a93
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/resources/TestSchemas/LancopeSchema.json
@@ -0,0 +1,28 @@
+{
+	"title": "Lancope Schema",
+	"type": "object",
+	"properties": {
+		"ip_src_addr": {
+			"type": "string"
+		},
+		"ip_dst_addr": {
+			"type": "string"
+		},
+		"original_string": {
+			"type": "string"
+		},
+		"@version": {
+			"type": "string"
+		},
+		"timestamp": {
+			"type": "integer"
+		},
+		"type": {
+			"type": "string"
+		},
+		"host": {
+			"type": "string"
+		}
+	},
+	"required": ["ip_src_addr", "ip_dst_addr", "original_string","@version", "timestamp", "type","host"]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/resources/TestSchemas/PcapSchema.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/resources/TestSchemas/PcapSchema.json b/metron-platform/metron-parsers/src/test/resources/TestSchemas/PcapSchema.json
new file mode 100644
index 0000000..761396e
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/resources/TestSchemas/PcapSchema.json
@@ -0,0 +1,22 @@
+{
+	"title": "Pcap Schema",
+	"type": "object",
+	"properties": {
+		"ip_src_addr": {
+			"type": "string"
+		},
+		"ip_dst_addr": {
+			"type": "string"
+		},
+		"ip_src_port": {
+			"type": "string"
+		},
+		"ip_dst_port": {
+			"type": "string"
+		},
+		"protocol": {
+			"type": "string"
+		}
+	},
+	"required": ["ip_src_addr", "ip_dst_addr", "ip_src_port", "ip_dst_port","protocol"]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/resources/TestSchemas/SourcefireSchema.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/resources/TestSchemas/SourcefireSchema.json b/metron-platform/metron-parsers/src/test/resources/TestSchemas/SourcefireSchema.json
new file mode 100644
index 0000000..2711909
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/resources/TestSchemas/SourcefireSchema.json
@@ -0,0 +1,34 @@
+{
+	"title": "Sourcefire Schema",
+	"type": "object",
+	"properties": {
+		"ip_src_addr": {
+			"type": "string"
+		},
+		"ip_dst_addr": {
+			"type": "string"
+		},
+		"timestamp": {
+			"type": "integer"
+		},
+		"protocol": {
+			"type": "string"
+		},
+		"original_string": {
+			"type": "string"
+		},
+		"original_string": {
+			"type": "string"
+		},
+		"ip_src_port": {
+			"type": "string"
+		},		
+		"ip_dst_port": {
+			"type": "string"
+		},
+		"key": {
+			"type": "string"
+		}	
+	},
+	"required": ["ip_src_addr", "ip_dst_addr", "ip_src_port", "ip_dst_port","protocol","original_string","key","timestamp"]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/resources/config/BasicFireEyeParserTest.config
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/resources/config/BasicFireEyeParserTest.config b/metron-platform/metron-parsers/src/test/resources/config/BasicFireEyeParserTest.config
new file mode 100644
index 0000000..4b4c648
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/resources/config/BasicFireEyeParserTest.config
@@ -0,0 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#BasicFireEyeParserTestConfig
+logFile=src/test/resources/FireEyeParserTest.log

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/resources/config/BasicIseParserTest.config
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/resources/config/BasicIseParserTest.config b/metron-platform/metron-parsers/src/test/resources/config/BasicIseParserTest.config
new file mode 100644
index 0000000..d57dace
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/resources/config/BasicIseParserTest.config
@@ -0,0 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#IseParserTestConfig
+logFile=src/test/resources/IseParserTest.log

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/resources/config/BasicLancopeParserTest.config
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/resources/config/BasicLancopeParserTest.config b/metron-platform/metron-parsers/src/test/resources/config/BasicLancopeParserTest.config
new file mode 100644
index 0000000..bd32770
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/resources/config/BasicLancopeParserTest.config
@@ -0,0 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#LancopeParserTestConfig
+logFile=src/test/resources/LancopeParserTest.log

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/resources/config/BasicPaloAltoFirewallParserTest.config
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/resources/config/BasicPaloAltoFirewallParserTest.config b/metron-platform/metron-parsers/src/test/resources/config/BasicPaloAltoFirewallParserTest.config
new file mode 100644
index 0000000..61b649e
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/resources/config/BasicPaloAltoFirewallParserTest.config
@@ -0,0 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#BasicFireEyeParserTestConfig
+logFile=src/test/resources/PaloAltoFirewallParserTest.log

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/resources/config/BasicSourcefireParserTest.config
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/resources/config/BasicSourcefireParserTest.config b/metron-platform/metron-parsers/src/test/resources/config/BasicSourcefireParserTest.config
new file mode 100644
index 0000000..685b0fd
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/resources/config/BasicSourcefireParserTest.config
@@ -0,0 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#BasicSourceFileParserTestConfig
+logFile=src/test/resources/SourceFireTest.log

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/resources/config/BroParserTest.config
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/resources/config/BroParserTest.config b/metron-platform/metron-parsers/src/test/resources/config/BroParserTest.config
new file mode 100644
index 0000000..0dcfc71
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/resources/config/BroParserTest.config
@@ -0,0 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#BroParserTestConfig
+logFile=src/test/resources/BroParserTest.log

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/resources/config/GrokAsaParserTest.config
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/resources/config/GrokAsaParserTest.config b/metron-platform/metron-parsers/src/test/resources/config/GrokAsaParserTest.config
new file mode 100644
index 0000000..9dbc3b6
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/resources/config/GrokAsaParserTest.config
@@ -0,0 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#GrokParserTestConfig
+logFile=src/test/resources/GrokParserTest.log


[17/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/pom.xml b/metron-platform/metron-parsers/pom.xml
new file mode 100644
index 0000000..0462ba9
--- /dev/null
+++ b/metron-platform/metron-parsers/pom.xml
@@ -0,0 +1,227 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 
+  Licensed to the Apache Software 
+	Foundation (ASF) under one or more contributor license agreements. See the 
+	NOTICE file distributed with this work for additional information regarding 
+	copyright ownership. The ASF licenses this file to You under the Apache License, 
+	Version 2.0 (the "License"); you may not use this file except in compliance 
+	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 
+	Unless required by applicable law or agreed to in writing, software distributed 
+	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES 
+	OR CONDITIONS OF ANY KIND, either express or implied. See the License for 
+  the specific language governing permissions and limitations under the License. 
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.metron</groupId>
+        <artifactId>metron-platform</artifactId>
+        <version>0.1BETA</version>
+    </parent>
+    <artifactId>metron-parsers</artifactId>
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+    </properties>
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-common</artifactId>
+            <version>${project.parent.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-pcap</artifactId>
+            <version>${project.parent.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-common</artifactId>
+            <version>${global_hadoop_version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-client</artifactId>
+            <version>${global_hbase_version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>log4j</groupId>
+                    <artifactId>log4j</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-core</artifactId>
+            <version>${global_storm_version}</version>
+            <scope>provided</scope>
+            <exclusions>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>log4j-over-slf4j</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <version>${global_junit_version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.google.guava</groupId>
+            <artifactId>guava</artifactId>
+            <version>${global_hbase_guava_version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>io.thekraken</groupId>
+            <artifactId>grok</artifactId>
+            <version>0.1.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka_2.9.2</artifactId>
+            <version>${global_kafka_version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>log4j</groupId>
+                    <artifactId>log4j</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-test-utilities</artifactId>
+            <version>${project.parent.version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-integration-test</artifactId>
+            <version>${project.parent.version}</version>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+    <reporting>
+        <plugins>
+            <!-- Normally, dependency report takes time, skip it -->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-project-info-reports-plugin</artifactId>
+                <version>2.7</version>
+
+                <configuration>
+                    <dependencyLocationsEnabled>false</dependencyLocationsEnabled>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>emma-maven-plugin</artifactId>
+                <version>1.0-alpha-3</version>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-pmd-plugin</artifactId>
+                <configuration>
+                    <targetJdk>1.7</targetJdk>
+                </configuration>
+
+            </plugin>
+        </plugins>
+    </reporting>
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <version>3.1</version>
+                <inherited>true</inherited>
+                <configuration>
+                    <source>1.7</source>
+                    <target>1.7</target>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <version>2.3</version>
+                <configuration>
+                    <createDependencyReducedPom>true</createDependencyReducedPom>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <artifactSet>
+                                <excludes>
+                                    <exclude>storm:storm-core:*</exclude>
+                                    <exclude>storm:storm-lib:*</exclude>
+                                    <exclude>org.slf4j.impl*</exclude>
+                                    <exclude>org.slf4j:slf4j-log4j*</exclude>
+                                </excludes>
+                            </artifactSet>
+                            <transformers>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
+                                    <resource>.yaml</resource>
+                                </transformer>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                    <mainClass></mainClass>
+                                </transformer>
+                            </transformers>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <configuration>
+                    <descriptor>src/main/assembly/assembly.xml</descriptor>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>make-assembly</id> <!-- this is used for inheritance merges -->
+                        <phase>package</phase> <!-- bind to the packaging phase -->
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+        <resources>
+            <resource>
+                <directory>src/main/resources</directory>
+            </resource>
+            <resource>
+                <directory>src/main/patterns</directory>
+            </resource>
+            <resource>
+                <directory>src/test/resources</directory>
+            </resource>
+        </resources>
+    </build>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/assembly/assembly.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/assembly/assembly.xml b/metron-platform/metron-parsers/src/main/assembly/assembly.xml
new file mode 100644
index 0000000..d6da96c
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/assembly/assembly.xml
@@ -0,0 +1,74 @@
+<!--
+  Licensed to the Apache Software
+	Foundation (ASF) under one or more contributor license agreements. See the
+	NOTICE file distributed with this work for additional information regarding
+	copyright ownership. The ASF licenses this file to You under the Apache License,
+	Version 2.0 (the "License"); you may not use this file except in compliance
+	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+	Unless required by applicable law or agreed to in writing, software distributed
+	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
+	OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+  the specific language governing permissions and limitations under the License.
+  -->
+
+<assembly>
+  <id>archive</id>
+  <formats>
+    <format>tar.gz</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>${project.basedir}/src/main/config</directory>
+      <outputDirectory>/config</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+      <excludes>
+        <exclude>**/*.formatted</exclude>
+        <exclude>**/*.filtered</exclude>
+      </excludes>
+      <fileMode>0644</fileMode>
+      <lineEnding>unix</lineEnding>
+    </fileSet>
+    <fileSet>
+      <directory>${project.basedir}/src/main/flux</directory>
+      <outputDirectory>/flux</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+      <excludes>
+        <exclude>**/*.formatted</exclude>
+        <exclude>**/*.filtered</exclude>
+      </excludes>
+      <fileMode>0644</fileMode>
+      <lineEnding>unix</lineEnding>
+    </fileSet>
+    <fileSet>
+      <directory>${project.basedir}/src/main/resources/patterns</directory>
+      <outputDirectory>/patterns</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+      <excludes>
+        <exclude>**/*.formatted</exclude>
+        <exclude>**/*.filtered</exclude>
+      </excludes>
+      <fileMode>0644</fileMode>
+      <lineEnding>unix</lineEnding>
+    </fileSet>
+    <fileSet>
+      <directory>${project.basedir}/src/main/scripts</directory>
+      <outputDirectory>/scripts</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+      <excludes>
+        <exclude>**/*.formatted</exclude>
+        <exclude>**/*.filtered</exclude>
+      </excludes>
+      <fileMode>0644</fileMode>
+      <lineEnding>unix</lineEnding>
+    </fileSet>
+    <fileSet>
+      <directory>${project.basedir}/target</directory>
+      <includes>
+        <include>${project.artifactId}-${project.version}.jar</include>
+      </includes>
+      <outputDirectory>/lib</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+    </fileSet>
+  </fileSets>
+</assembly>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/config/parsers.properties
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/config/parsers.properties b/metron-platform/metron-parsers/src/main/config/parsers.properties
new file mode 100644
index 0000000..7b906d2
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/config/parsers.properties
@@ -0,0 +1,21 @@
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+
+##### Kafka #####
+
+kafka.zk=node1:2181
+kafka.broker=node1:6667

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/asa/remote.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/asa/remote.yaml b/metron-platform/metron-parsers/src/main/flux/asa/remote.yaml
new file mode 100644
index 0000000..052728e
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/asa/remote.yaml
@@ -0,0 +1,82 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "asa"
+config:
+    topology.workers: 1
+
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.asa.GrokAsaParser"
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "asa"
+            # zk root
+            - ""
+            # id
+            - "asa"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -1
+            -   name: "socketTimeoutMs"
+                value: 1000000
+
+spouts:
+    -   id: "testingSpout"
+        className: "org.apache.metron.test.spouts.GenericInternalTestSpout"
+        parallelism: 1
+        configMethods:
+            -   name: "withFilename"
+                args:
+                    - "SampleInput/YafExampleOutput"
+            -   name: "withRepeating"
+                args:
+                    - true
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "asa"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/asa/test.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/asa/test.yaml b/metron-platform/metron-parsers/src/main/flux/asa/test.yaml
new file mode 100644
index 0000000..c816b45
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/asa/test.yaml
@@ -0,0 +1,82 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "asa-test"
+config:
+    topology.workers: 1
+
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.asa.GrokAsaParser"
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "asa"
+            # zk root
+            - ""
+            # id
+            - "asa"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -2
+            -   name: "socketTimeoutMs"
+                value: 1000000
+
+spouts:
+    -   id: "testingSpout"
+        className: "org.apache.metron.test.spouts.GenericInternalTestSpout"
+        parallelism: 1
+        configMethods:
+            -   name: "withFilename"
+                args:
+                    - "SampleInput/YafExampleOutput"
+            -   name: "withRepeating"
+                args:
+                    - false
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "asa"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/bro/remote.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/bro/remote.yaml b/metron-platform/metron-parsers/src/main/flux/bro/remote.yaml
new file mode 100644
index 0000000..1852499
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/bro/remote.yaml
@@ -0,0 +1,71 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "bro"
+config:
+    topology.workers: 1
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.bro.BasicBroParser"
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "bro"
+            # zk root
+            - ""
+            # id
+            - "bro"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -1
+            -   name: "socketTimeoutMs"
+                value: 1000000
+
+spouts:
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "bro"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/bro/test.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/bro/test.yaml b/metron-platform/metron-parsers/src/main/flux/bro/test.yaml
new file mode 100644
index 0000000..42c3261
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/bro/test.yaml
@@ -0,0 +1,72 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "bro-test"
+config:
+    topology.workers: 1
+
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.bro.BasicBroParser"
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "bro"
+            # zk root
+            - ""
+            # id
+            - "bro"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -2
+            -   name: "socketTimeoutMs"
+                value: 1000000
+
+spouts:
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "bro"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/fireeye/remote.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/fireeye/remote.yaml b/metron-platform/metron-parsers/src/main/flux/fireeye/remote.yaml
new file mode 100644
index 0000000..a745d38
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/fireeye/remote.yaml
@@ -0,0 +1,79 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "fireeye"
+config:
+    topology.workers: 1
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.fireeye.BasicFireEyeParser"
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "fireeye"
+            # zk root
+            - ""
+            # id
+            - "fireeye"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -1
+
+spouts:
+    -   id: "testingSpout"
+        className: "org.apache.metron.test.spouts.GenericInternalTestSpout"
+        parallelism: 1
+        configMethods:
+            -   name: "withFilename"
+                args:
+                    - "SampleInput/YafExampleOutput"
+            -   name: "withRepeating"
+                args:
+                    - true
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "fireeye"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/fireeye/test.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/fireeye/test.yaml b/metron-platform/metron-parsers/src/main/flux/fireeye/test.yaml
new file mode 100644
index 0000000..9f4c06f
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/fireeye/test.yaml
@@ -0,0 +1,79 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "fireeye-test"
+config:
+    topology.workers: 1
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.fireeye.BasicFireEyeParser"
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "fireeye"
+            # zk root
+            - ""
+            # id
+            - "fireeye"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -2
+
+spouts:
+    -   id: "testingSpout"
+        className: "org.apache.metron.test.spouts.GenericInternalTestSpout"
+        parallelism: 1
+        configMethods:
+            -   name: "withFilename"
+                args:
+                    - "SampleInput/YafExampleOutput"
+            -   name: "withRepeating"
+                args:
+                    - false
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "fireeye"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/ise/remote.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/ise/remote.yaml b/metron-platform/metron-parsers/src/main/flux/ise/remote.yaml
new file mode 100644
index 0000000..53ed5fc
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/ise/remote.yaml
@@ -0,0 +1,79 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "ise"
+config:
+    topology.workers: 1
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.ise.BasicIseParser"
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "ise"
+            # zk root
+            - ""
+            # id
+            - "ise"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -1
+
+spouts:
+    -   id: "testingSpout"
+        className: "org.apache.metron.test.spouts.GenericInternalTestSpout"
+        parallelism: 1
+        configMethods:
+            -   name: "withFilename"
+                args:
+                    - "SampleInput/YafExampleOutput"
+            -   name: "withRepeating"
+                args:
+                    - true
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "ise"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/ise/test.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/ise/test.yaml b/metron-platform/metron-parsers/src/main/flux/ise/test.yaml
new file mode 100644
index 0000000..2e88594
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/ise/test.yaml
@@ -0,0 +1,79 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "ise-test"
+config:
+    topology.workers: 1
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.ise.BasicIseParser"
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "ise"
+            # zk root
+            - ""
+            # id
+            - "ise"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -2
+
+spouts:
+    -   id: "testingSpout"
+        className: "org.apache.metron.test.spouts.GenericInternalTestSpout"
+        parallelism: 1
+        configMethods:
+            -   name: "withFilename"
+                args:
+                    - "SampleInput/YafExampleOutput"
+            -   name: "withRepeating"
+                args:
+                    - false
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "ise"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/lancope/remote.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/lancope/remote.yaml b/metron-platform/metron-parsers/src/main/flux/lancope/remote.yaml
new file mode 100644
index 0000000..4fe7c3b
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/lancope/remote.yaml
@@ -0,0 +1,79 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "lancope"
+config:
+    topology.workers: 1
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.lancope.BasicLancopeParser"
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "lancope"
+            # zk root
+            - ""
+            # id
+            - "lancope"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -1
+
+spouts:
+    -   id: "testingSpout"
+        className: "org.apache.metron.test.spouts.GenericInternalTestSpout"
+        parallelism: 1
+        configMethods:
+            -   name: "withFilename"
+                args:
+                    - "SampleInput/YafExampleOutput"
+            -   name: "withRepeating"
+                args:
+                    - true
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "lancope"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/lancope/test.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/lancope/test.yaml b/metron-platform/metron-parsers/src/main/flux/lancope/test.yaml
new file mode 100644
index 0000000..eb8a1ef
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/lancope/test.yaml
@@ -0,0 +1,79 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "lancope-test"
+config:
+    topology.workers: 1
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.lancope.BasicLancopeParser"
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "lancope"
+            # zk root
+            - ""
+            # id
+            - "lancope"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -2
+
+spouts:
+    -   id: "testingSpout"
+        className: "org.apache.metron.test.spouts.GenericInternalTestSpout"
+        parallelism: 1
+        configMethods:
+            -   name: "withFilename"
+                args:
+                    - "SampleInput/YafExampleOutput"
+            -   name: "withRepeating"
+                args:
+                    - false
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "lancope"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/paloalto/remote.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/paloalto/remote.yaml b/metron-platform/metron-parsers/src/main/flux/paloalto/remote.yaml
new file mode 100644
index 0000000..4287fce
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/paloalto/remote.yaml
@@ -0,0 +1,79 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "paloalto"
+config:
+    topology.workers: 1
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.paloalto.BasicPaloAltoFirewallParser"
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "paloalto"
+            # zk root
+            - ""
+            # id
+            - "paloalto"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -1
+
+spouts:
+    -   id: "testingSpout"
+        className: "org.apache.metron.test.spouts.GenericInternalTestSpout"
+        parallelism: 1
+        configMethods:
+            -   name: "withFilename"
+                args:
+                    - "SampleInput/YafExampleOutput"
+            -   name: "withRepeating"
+                args:
+                    - true
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "paloalto"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/paloalto/test.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/paloalto/test.yaml b/metron-platform/metron-parsers/src/main/flux/paloalto/test.yaml
new file mode 100644
index 0000000..cef5dc5
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/paloalto/test.yaml
@@ -0,0 +1,79 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "paloalto-test"
+config:
+    topology.workers: 1
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.paloalto.BasicPaloAltoFirewallParser"
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "paloalto"
+            # zk root
+            - ""
+            # id
+            - "paloalto"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -1
+
+spouts:
+    -   id: "testingSpout"
+        className: "org.apache.metron.test.spouts.GenericInternalTestSpout"
+        parallelism: 1
+        configMethods:
+            -   name: "withFilename"
+                args:
+                    - "SampleInput/YafExampleOutput"
+            -   name: "withRepeating"
+                args:
+                    - false
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "paloalto"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/pcap/remote.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/pcap/remote.yaml b/metron-platform/metron-parsers/src/main/flux/pcap/remote.yaml
new file mode 100644
index 0000000..418fac1
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/pcap/remote.yaml
@@ -0,0 +1,70 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "pcap"
+config:
+    topology.workers: 1
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.pcap.PcapParser"
+    -   id: "writer"
+        className: "org.apache.metron.pcap.writer.PcapWriter"
+        constructorArgs:
+            - "${bolt.hbase.table.name}"
+            - "${bolt.hbase.table.fields}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "pcap"
+            # zk root
+            - ""
+            # id
+            - "pcap"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -1
+
+spouts:
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "pcap"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/pcap/test.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/pcap/test.yaml b/metron-platform/metron-parsers/src/main/flux/pcap/test.yaml
new file mode 100644
index 0000000..f5c5011
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/pcap/test.yaml
@@ -0,0 +1,74 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "pcap-test"
+config:
+    topology.workers: 1
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.pcap.PcapParser"
+    -   id: "writer"
+        className: "org.apache.metron.pcap.writer.PcapWriter"
+        constructorArgs:
+            - "${bolt.hbase.table.name}"
+            - "${bolt.hbase.table.fields}"
+        configMethods:
+            -   name: "withProviderImpl"
+                args:
+                    - "${hbase.provider.impl}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "pcap"
+            # zk root
+            - ""
+            # id
+            - "pcap"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: false
+            -   name: "startOffsetTime"
+                value: -2
+
+spouts:
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "pcap"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/snort/remote.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/snort/remote.yaml b/metron-platform/metron-parsers/src/main/flux/snort/remote.yaml
new file mode 100644
index 0000000..8317acf
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/snort/remote.yaml
@@ -0,0 +1,69 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "snort"
+config:
+    topology.workers: 1
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.snort.BasicSnortParser"
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "snort"
+            # zk root
+            - ""
+            # id
+            - "snort"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -1
+
+spouts:
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "snort"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/snort/test.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/snort/test.yaml b/metron-platform/metron-parsers/src/main/flux/snort/test.yaml
new file mode 100644
index 0000000..5b9a2df
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/snort/test.yaml
@@ -0,0 +1,69 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "snort-test"
+config:
+    topology.workers: 1
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.snort.BasicSnortParser"
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "snort"
+            # zk root
+            - ""
+            # id
+            - "snort"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: false
+            -   name: "startOffsetTime"
+                value: -2
+
+spouts:
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "snort"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/sourcefire/remote.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/sourcefire/remote.yaml b/metron-platform/metron-parsers/src/main/flux/sourcefire/remote.yaml
new file mode 100644
index 0000000..312dce0
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/sourcefire/remote.yaml
@@ -0,0 +1,79 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "sourcefire"
+config:
+    topology.workers: 1
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.sourcefire.BasicSourcefireParser"
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "sourcefire"
+            # zk root
+            - ""
+            # id
+            - "sourcefire"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -1
+
+spouts:
+    -   id: "testingSpout"
+        className: "org.apache.metron.test.spouts.GenericInternalTestSpout"
+        parallelism: 1
+        configMethods:
+            -   name: "withFilename"
+                args:
+                    - "SampleInput/YafExampleOutput"
+            -   name: "withRepeating"
+                args:
+                    - true
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "sourcefire"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/sourcefire/test.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/sourcefire/test.yaml b/metron-platform/metron-parsers/src/main/flux/sourcefire/test.yaml
new file mode 100644
index 0000000..a0a00d0
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/sourcefire/test.yaml
@@ -0,0 +1,79 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "sourcefire-test"
+config:
+    topology.workers: 1
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.sourcefire.BasicSourcefireParser"
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "sourcefire"
+            # zk root
+            - ""
+            # id
+            - "sourcefire"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -2
+
+spouts:
+    -   id: "testingSpout"
+        className: "org.apache.metron.test.spouts.GenericInternalTestSpout"
+        parallelism: 1
+        configMethods:
+            -   name: "withFilename"
+                args:
+                    - "SampleInput/YafExampleOutput"
+            -   name: "withRepeating"
+                args:
+                    - false
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "sourcefire"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/yaf/remote.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/yaf/remote.yaml b/metron-platform/metron-parsers/src/main/flux/yaf/remote.yaml
new file mode 100644
index 0000000..f50b319
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/yaf/remote.yaml
@@ -0,0 +1,84 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "yaf"
+config:
+    topology.workers: 1
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.GrokParser"
+        constructorArgs:
+            - "/patterns/yaf"
+            - "YAF_DELIMITED"
+        configMethods:
+            -   name: "withTimestampField"
+                args:
+                    - "start_time"
+            -   name: "withTimeFields"
+                args:
+                    - ["start_time", "end_time"]
+            -   name: "withDateFormat"
+                args:
+                    - "yyyy-MM-dd HH:mm:ss.S"
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "yaf"
+            # zk root
+            - ""
+            # id
+            - "yaf"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -1
+            -   name: "socketTimeoutMs"
+                value: 1000000
+
+spouts:
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "yaf"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/flux/yaf/test.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/flux/yaf/test.yaml b/metron-platform/metron-parsers/src/main/flux/yaf/test.yaml
new file mode 100644
index 0000000..0f6031c
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/flux/yaf/test.yaml
@@ -0,0 +1,88 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "yaf-test"
+config:
+    topology.workers: 1
+
+
+components:
+    -   id: "parser"
+        className: "org.apache.metron.parsers.GrokParser"
+        constructorArgs:
+            - "../metron-parsers/src/main/resources/patterns/yaf"
+            - "YAF_DELIMITED"
+        configMethods:
+            -   name: "withTimestampField"
+                args:
+                    - "start_time"
+            -   name: "withTimeFields"
+                args:
+                    - ["start_time", "end_time"]
+            -   name: "withDateFormat"
+                args:
+                    - "yyyy-MM-dd HH:mm:ss.S"
+            -   name: "withMetronHDFSHome"
+                args:
+                    - ""
+    -   id: "writer"
+        className: "org.apache.metron.parsers.writer.KafkaWriter"
+        constructorArgs:
+            - "${kafka.broker}"
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "yaf"
+            # zk root
+            - ""
+            # id
+            - "yaf"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: false
+            -   name: "startOffsetTime"
+                value: -2
+            -   name: "socketTimeoutMs"
+                value: 1000000
+
+spouts:
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+
+bolts:
+    -   id: "parserBolt"
+        className: "org.apache.metron.parsers.bolt.ParserBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+            - "yaf"
+            - ref: "parser"
+            - ref: "writer"
+
+streams:
+    -   name: "spout -> bolt"
+        from: "kafkaSpout"
+        to: "parserBolt"
+        grouping:
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/BasicParser.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/BasicParser.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/BasicParser.java
new file mode 100644
index 0000000..2a168a5
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/BasicParser.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers;
+
+import org.apache.metron.parsers.interfaces.MessageParser;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.Serializable;
+
+public abstract class BasicParser implements
+        MessageParser<JSONObject>,
+        Serializable {
+
+  protected static final Logger LOG = LoggerFactory
+          .getLogger(BasicParser.class);
+
+  @Override
+  public boolean validate(JSONObject message) {
+    JSONObject value = message;
+    if (!(value.containsKey("original_string"))) {
+      LOG.trace("[Metron] Message does not have original_string: " + message);
+      return false;
+    } else if (!(value.containsKey("timestamp"))) {
+      LOG.trace("[Metron] Message does not have timestamp: " + message);
+      return false;
+    } else {
+      LOG.trace("[Metron] Message conforms to schema: "
+              + message);
+      return true;
+    }
+  }
+
+  public String getKey(JSONObject value) {
+    try {
+      String ipSrcAddr = null;
+      String ipDstAddr = null;
+      if (value.containsKey("ip_src_addr"))
+        ipSrcAddr = value.get("ip_src_addr").toString();
+      if (value.containsKey("ip_dst_addr"))
+        ipDstAddr = value.get("ip_dst_addr").toString();
+      if (ipSrcAddr == null && ipDstAddr == null)
+        return "0";
+      if (ipSrcAddr == null || ipSrcAddr.length() == 0)
+        return ipDstAddr;
+      if (ipDstAddr == null || ipDstAddr.length() == 0)
+        return ipSrcAddr;
+      double ip1 = Double.parseDouble(ipSrcAddr.replace(".", ""));
+      double ip2 = Double.parseDouble(ipDstAddr.replace(".", ""));
+      return String.valueOf(ip1 + ip2);
+    } catch (Exception e) {
+      return "0";
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/GrokParser.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/GrokParser.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/GrokParser.java
new file mode 100644
index 0000000..82734a2
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/GrokParser.java
@@ -0,0 +1,166 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers;
+
+import oi.thekraken.grok.api.Grok;
+import oi.thekraken.grok.api.Match;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.metron.parsers.interfaces.MessageParser;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Serializable;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.TimeZone;
+
+public class GrokParser implements MessageParser<JSONObject>, Serializable {
+
+  protected static final Logger LOG = LoggerFactory.getLogger(GrokParser.class);
+
+  private transient Grok grok;
+  private String grokHdfsPath;
+  private String patternLabel;
+  private String[] timeFields = new String[0];
+  private String timestampField;
+  private String dateFormat = "yyyy-MM-dd HH:mm:ss.S z";
+  private TimeZone timeZone = TimeZone.getTimeZone("UTC");
+
+  private String metronHdfsHome = "/apps/metron";
+  public GrokParser(String grokHdfsPath, String patterLabel) {
+    this.grokHdfsPath = grokHdfsPath;
+    this.patternLabel = patterLabel;
+  }
+
+  public GrokParser withMetronHDFSHome(String home) {
+    this.metronHdfsHome= home;
+    return this;
+  }
+
+  public GrokParser withTimestampField(String timestampField) {
+    this.timestampField = timestampField;
+    return this;
+  }
+
+  public GrokParser withTimeFields(String... timeFields) {
+    this.timeFields = timeFields;
+    return this;
+  }
+
+  public GrokParser withDateFormat(String dateFormat) {
+    this.dateFormat = dateFormat;
+    return this;
+  }
+
+  public GrokParser withTimeZone(String timeZone) {
+    this.timeZone = TimeZone.getTimeZone(timeZone);
+    return this;
+  }
+
+  public InputStream openInputStream(String streamName) throws IOException {
+    InputStream is = getClass().getResourceAsStream(streamName);
+    if(is == null) {
+      FileSystem fs = FileSystem.get(new Configuration());
+      Path path = new Path((metronHdfsHome != null && metronHdfsHome.length() > 0?metronHdfsHome + "/":"") + streamName);
+      if(fs.exists(path)) {
+        return fs.open(path);
+      }
+    }
+    return is;
+  }
+
+  @Override
+  public void init() {
+    grok = new Grok();
+    try {
+      InputStream commonInputStream = openInputStream("/patterns/common");
+      if(commonInputStream == null) {
+        throw new RuntimeException("Unable to initialize grok parser: Unable to load /patterns/common from either classpath or HDFS" );
+      }
+      grok.addPatternFromReader(new InputStreamReader(commonInputStream));
+      InputStream patterInputStream = openInputStream(grokHdfsPath);
+      if(patterInputStream == null) {
+        throw new RuntimeException("Unable to initialize grok parser: Unable to load " + grokHdfsPath + " from either classpath or HDFS" );
+      }
+      grok.addPatternFromReader(new InputStreamReader(patterInputStream));
+      grok.compile("%{" + patternLabel + "}");
+    } catch (Throwable e) {
+      LOG.error(e.getMessage(), e);
+      throw new RuntimeException("Grok parser Error: " + e.getMessage(), e);
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  @Override
+  public List<JSONObject> parse(byte[] rawMessage) {
+    if (grok == null) init();
+    List<JSONObject> messages = new ArrayList<>();
+    try {
+      String originalMessage = new String(rawMessage, "UTF-8");
+      Match gm = grok.match(originalMessage);
+      gm.captures();
+      JSONObject message = new JSONObject();
+      message.putAll(gm.toMap());
+      message.put("original_string", originalMessage);
+      for(String timeField: timeFields) {
+        String fieldValue = (String) message.get(timeField);
+        if (fieldValue != null) {
+          message.put(timeField, toEpoch(fieldValue));
+        }
+      }
+      if (timestampField != null) {
+        message.put("timestamp", message.get(timestampField));
+      }
+      message.remove(patternLabel);
+      messages.add(message);
+    } catch (Exception e) {
+      LOG.error(e.getMessage(), e);
+      return null;
+    }
+    return messages;
+  }
+
+  @Override
+  public boolean validate(JSONObject message) {
+    Object timestampObject = message.get("timestamp");
+    if (timestampObject instanceof Long) {
+      Long timestamp = (Long) timestampObject;
+      if (timestamp > 0) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  private long toEpoch(String datetime) throws ParseException {
+    SimpleDateFormat sdf = new SimpleDateFormat(dateFormat);
+    sdf.setTimeZone(timeZone);
+    Date date = sdf.parse(datetime);
+    return date.getTime();
+  }
+
+}


[48/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/java_jdk/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/java_jdk/tasks/main.yml b/deployment/roles/java_jdk/tasks/main.yml
deleted file mode 100644
index 999b9c1..0000000
--- a/deployment/roles/java_jdk/tasks/main.yml
+++ /dev/null
@@ -1,34 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Check for java at "{{ java_home }}"
-  stat: path="{{ java_home }}"
-  register: jdk_dir
-
-- name: Alternatives link for java
-  alternatives: name={{ item.name }} link={{ item.link }}  path={{ item.path }}
-  with_items:
-    - { name: java, link: /usr/bin/java, path: "{{ java_home }}/bin/java" }
-    - { name: jar, link: /usr/bin/jar, path: "{{ java_home }}/bin/jar" }
-  when: jdk_dir.stat.exists
-
-- name: Install openjdk
-  yum: name={{item}}
-  with_items:
-    - java-1.8.0-openjdk
-    - java-1.8.0-openjdk-devel
-  when: not jdk_dir.stat.exists

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/kafka-broker/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/kafka-broker/defaults/main.yml b/deployment/roles/kafka-broker/defaults/main.yml
deleted file mode 100644
index 351d125..0000000
--- a/deployment/roles/kafka-broker/defaults/main.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-hdp_repo_def: http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.2.0/hdp.repo

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/kafka-broker/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/kafka-broker/meta/main.yml b/deployment/roles/kafka-broker/meta/main.yml
deleted file mode 100644
index 9587e79..0000000
--- a/deployment/roles/kafka-broker/meta/main.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-dependencies:
-  - libselinux-python

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/kafka-broker/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/kafka-broker/tasks/main.yml b/deployment/roles/kafka-broker/tasks/main.yml
deleted file mode 100644
index db05cb0..0000000
--- a/deployment/roles/kafka-broker/tasks/main.yml
+++ /dev/null
@@ -1,41 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Retrieve HDP repository definition
-  get_url:
-    url: "{{ hdp_repo_def }}"
-    dest: /etc/yum.repos.d/hdp.repo
-    mode: 0644
-
-- name: Install kafka
-  yum: name={{item}}
-  with_items:
-    - java-1.8.0-openjdk
-    - kafka
-    - zookeeper-server
-
-- name: Create pid directories
-  file: path={{ item }} state=directory mode=0755
-  with_items:
-    - /var/run/zookeeper
-    - /var/run/kafka
-
-- name: Start zookeeper
-  shell: /usr/hdp/current/zookeeper-server/bin/zookeeper-server start
-
-- name: Start kafka
-  shell: /usr/hdp/current/kafka-broker/bin/kafka start

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/kafka-broker/vars/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/kafka-broker/vars/main.yml b/deployment/roles/kafka-broker/vars/main.yml
deleted file mode 100644
index 351d125..0000000
--- a/deployment/roles/kafka-broker/vars/main.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-hdp_repo_def: http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.2.0/hdp.repo

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/kafka-client/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/kafka-client/tasks/main.yml b/deployment/roles/kafka-client/tasks/main.yml
deleted file mode 100644
index 1674225..0000000
--- a/deployment/roles/kafka-client/tasks/main.yml
+++ /dev/null
@@ -1,30 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-  - name: Retrieve HDP repository definition
-    get_url:
-      url: "{{ hdp_repo_def }}"
-      dest: /etc/yum.repos.d/hdp.repo
-      mode: 0644
-
-  - name: Install kafka
-    yum:
-      name: kafka
-    register: result
-    until: result.rc == 0
-    retries: 5
-    delay: 10

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/librdkafka/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/librdkafka/defaults/main.yml b/deployment/roles/librdkafka/defaults/main.yml
deleted file mode 100644
index d920883..0000000
--- a/deployment/roles/librdkafka/defaults/main.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-librdkafka_version: 0.8.6
-librdkafka_url: https://github.com/edenhill/librdkafka/archive/0.8.6.tar.gz
-librdkafka_home: /usr/local

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/librdkafka/tasks/dependencies.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/librdkafka/tasks/dependencies.yml b/deployment/roles/librdkafka/tasks/dependencies.yml
deleted file mode 100644
index 431e861..0000000
--- a/deployment/roles/librdkafka/tasks/dependencies.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Install prerequisites
-  yum: name={{ item }}
-  with_items:
-    - cmake
-    - make
-    - gcc
-    - gcc-c++
-    - flex
-    - bison
-    - libpcap
-    - libpcap-devel
-    - openssl-devel
-    - python-devel
-    - swig
-    - zlib-devel
-    - perl
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/librdkafka/tasks/librdkafka.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/librdkafka/tasks/librdkafka.yml b/deployment/roles/librdkafka/tasks/librdkafka.yml
deleted file mode 100644
index 652d319..0000000
--- a/deployment/roles/librdkafka/tasks/librdkafka.yml
+++ /dev/null
@@ -1,39 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Download librdkafka
-  get_url:
-    url: "{{ librdkafka_url }}"
-    dest: "/tmp/librdkafka-{{ librdkafka_version }}.tar.gz"
-
-- name: Extract librdkafka tarball
-  unarchive:
-    src: "/tmp/librdkafka-{{ librdkafka_version }}.tar.gz"
-    dest: /tmp
-    copy: no
-    creates: "/tmp/librdkafka-{{ librdkafka_version }}"
-
-- name: Compile and install librdkafka
-  shell: "{{ item }}"
-  args:
-    chdir: "/tmp/librdkafka-{{ librdkafka_version }}"
-    creates: "{{ librdkafka_home }}/lib/librdkafka.so"
-  with_items:
-    - rm -rf build/
-    - "./configure --prefix={{ librdkafka_home }}"
-    - make
-    - make install

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/librdkafka/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/librdkafka/tasks/main.yml b/deployment/roles/librdkafka/tasks/main.yml
deleted file mode 100644
index 2144d7f..0000000
--- a/deployment/roles/librdkafka/tasks/main.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- include: dependencies.yml
-- include: librdkafka.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/libselinux-python/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/libselinux-python/tasks/main.yml b/deployment/roles/libselinux-python/tasks/main.yml
deleted file mode 100644
index 78f5a27..0000000
--- a/deployment/roles/libselinux-python/tasks/main.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Install libselinux-python
-  yum:
-    name: libselinux-python
-    state: installed
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_common/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_common/defaults/main.yml b/deployment/roles/metron_common/defaults/main.yml
deleted file mode 100644
index 50aaefd..0000000
--- a/deployment/roles/metron_common/defaults/main.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-metron_jar_name: Metron-Topologies-{{ metron_version }}.jar
-metron_jar_path: "{{ playbook_dir }}/../../metron-streaming/Metron-Topologies/target/{{ metron_jar_name }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_common/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_common/meta/main.yml b/deployment/roles/metron_common/meta/main.yml
deleted file mode 100644
index 4db50aa..0000000
--- a/deployment/roles/metron_common/meta/main.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - yum-update
-  - epel
-  - ntp
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_common/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_common/tasks/main.yml b/deployment/roles/metron_common/tasks/main.yml
deleted file mode 100644
index 64e6ab9..0000000
--- a/deployment/roles/metron_common/tasks/main.yml
+++ /dev/null
@@ -1,35 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Check OS Version
-  fail: msg="Metron deployment supports CentOS 6 only."
-  when: (ansible_distribution != "CentOS" or ansible_distribution_major_version != "6")
-
-- name: Check for Metron jar path
-  stat: path={{ metron_jar_path }}
-  register: metron_jars
-
-- name: Verify Metron jars exist
-  fail: msg="Unable to locate staged Metron jars at {{ metron_jar_path }}.  Did you run 'mvn package'?"
-  when: metron_jars.stat.exists == True
-
-- name: Ensure iptables is stopped and is not running at boot time.
-  ignore_errors: yes
-  service:
-    name: iptables
-    state: stopped
-    enabled: no

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_pcapservice/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_pcapservice/defaults/main.yml b/deployment/roles/metron_pcapservice/defaults/main.yml
deleted file mode 100644
index 0dbca48..0000000
--- a/deployment/roles/metron_pcapservice/defaults/main.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-metron_version: 0.1BETA
-metron_directory: /usr/metron/{{ metron_version }}
-pcapservice_jar_name: Metron-Pcap_Service-{{ metron_version }}-jar-with-dependencies.jar
-pcapservice_jar_src: "{{ playbook_dir }}/../../metron-streaming/Metron-Pcap_Service/target/{{ pcapservice_jar_name }}"
-pcapservice_jar_dst: "{{ metron_directory }}/lib/{{ pcapservice_jar_name }}"
-pcapservice_port: 8081
-hbase_config_path: "/etc/hbase/conf"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_pcapservice/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_pcapservice/meta/main.yml b/deployment/roles/metron_pcapservice/meta/main.yml
deleted file mode 100644
index ddf6aa9..0000000
--- a/deployment/roles/metron_pcapservice/meta/main.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - java_jdk

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_pcapservice/tasks/config-hbase.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_pcapservice/tasks/config-hbase.yml b/deployment/roles/metron_pcapservice/tasks/config-hbase.yml
deleted file mode 100644
index b77c1ec..0000000
--- a/deployment/roles/metron_pcapservice/tasks/config-hbase.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
- - name: Fetch hbase-site.xml
-   fetch: src=/etc/hbase/conf/hbase-site.xml dest=/tmp/hbase/conf/hbase-site.xml flat=yes
-   delegate_to: "{{ groups.ambari_slave[0] }}"
-
- - name: Create hbase conf directory
-   file: path=/etc/hbase/conf state=directory mode=0755
-
- - name: Copy hbase-site.xml
-   copy: src=/tmp/hbase/conf/hbase-site.xml dest=/etc/hbase/conf/hbase-site.xml mode=0644

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_pcapservice/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_pcapservice/tasks/main.yml b/deployment/roles/metron_pcapservice/tasks/main.yml
deleted file mode 100644
index ee9cac2..0000000
--- a/deployment/roles/metron_pcapservice/tasks/main.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Check for hbase-site
-  stat: path=/etc/hbase/conf/hbase-site.xml
-  register: hbase_site
-
-- include: config-hbase.yml
-  when: not hbase_site.stat.exists
-
-- include: pcapservice.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_pcapservice/tasks/pcapservice.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_pcapservice/tasks/pcapservice.yml b/deployment/roles/metron_pcapservice/tasks/pcapservice.yml
deleted file mode 100644
index 651f7fb..0000000
--- a/deployment/roles/metron_pcapservice/tasks/pcapservice.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Create Metron streaming directories
-  file: path={{ metron_directory }}/{{ item.name }}  state=directory mode=0755
-  with_items:
-      - { name: 'lib'}
-      - { name: 'config'}
-
-- name: Copy Metron pcapservice jar
-  copy:
-    src: "{{ pcapservice_jar_src }}"
-    dest: "{{ pcapservice_jar_dst }}"
-
-- name: Add hbase-site.xml to pcapservice jar
-  shell: "jar -uf {{ pcapservice_jar_dst }} hbase-site.xml"
-  args:
-    chdir: "{{ hbase_config_path }}"
-
-- name: Install service script
-  template: src=pcapservice dest=/etc/init.d/pcapservice mode=0755
-
-- name: Start pcapservice
-  service: name=pcapservice state=restarted

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_pcapservice/templates/pcapservice
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_pcapservice/templates/pcapservice b/deployment/roles/metron_pcapservice/templates/pcapservice
deleted file mode 100644
index a3ad92b..0000000
--- a/deployment/roles/metron_pcapservice/templates/pcapservice
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/env bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# metron pcap service
-# chkconfig: 345 20 80
-# description: Metron PCAP Service Daemon
-# processname: pcapservice
-#
-NAME=pcapservice
-DESC="Metron pcap service"
-PIDFILE=/var/run/$NAME.pid
-SCRIPTNAME=/etc/init.d/$NAME
-LOGFILE="/var/log/metron_pcapservice.log"
-EXTRA_ARGS="${@:2}"
-DAEMON_PATH="/"
-DAEMON="/usr/bin/java"
-DAEMONOPTS="-cp {{ pcapservice_jar_dst }} org.apache.metron.pcapservice.rest.PcapService -port {{ pcapservice_port }}"
-
-case "$1" in
-  start)
-    printf "%-50s" "Starting $NAME..."
-
-    # kick-off the daemon
-    cd $DAEMON_PATH
-    PID=`$DAEMON $DAEMONOPTS >> $LOGFILE 2>&1 & echo $!`
-    if [ -z $PID ]; then
-        printf "%s\n" "Fail"
-    else
-        echo $PID > $PIDFILE
-        printf "%s\n" "Ok"
-    fi
-  ;;
-
-  status)
-    printf "%-50s" "Checking $NAME..."
-    if [ -f $PIDFILE ]; then
-      PID=`cat $PIDFILE`
-      if [ -z "`ps axf | grep ${PID} | grep -v grep`" ]; then
-        printf "%s\n" "Process dead but pidfile exists"
-      else
-        echo "Running"
-      fi
-    else
-      printf "%s\n" "Service not running"
-    fi
-  ;;
-
-  stop)
-    printf "%-50s" "Stopping $NAME"
-    PID=`cat $PIDFILE`
-    cd $DAEMON_PATH
-    if [ -f $PIDFILE ]; then
-        kill -HUP $PID
-        printf "%s\n" "Ok"
-        rm -f $PIDFILE
-    else
-        printf "%s\n" "pidfile not found"
-    fi
-  ;;
-
-  restart)
-    $0 stop
-    $0 start
-  ;;
-
-  *)
-    echo "Usage: $0 {status|start|stop|restart}"
-    exit 1
-esac

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/defaults/main.yml b/deployment/roles/metron_streaming/defaults/main.yml
deleted file mode 100644
index f0f605d..0000000
--- a/deployment/roles/metron_streaming/defaults/main.yml
+++ /dev/null
@@ -1,75 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-metron_jar_name: Metron-Topologies-{{ metron_version }}.jar
-metron_directory: /usr/metron/{{ metron_version }}
-metron_solr_jar_name: Metron-Solr-{{ metron_version }}.jar
-metron_elasticsearch_jar_name: Metron-Elasticsearch-{{ metron_version }}.jar
-
-metron_dataloads_name: Metron-DataLoads-{{ metron_version }}-archive.tar.gz
-metron_dataloads_path: "{{ playbook_dir }}/../../metron-streaming/Metron-DataLoads/target/{{ metron_dataloads_name }}"
-metron_topologies_bundle_name: Metron-Topologies-{{ metron_version }}-archive.tar.gz
-metron_solr_bundle_name: Metron-Solr-{{ metron_version }}-archive.tar.gz
-metron_elasticsearch_bundle_name: Metron-Elasticsearch-{{ metron_version }}-archive.tar.gz
-metron_solr_bundle_path: "{{ playbook_dir }}/../../metron-streaming/Metron-Solr/target/{{ metron_solr_bundle_name }}"
-metron_elasticsearch_bundle_path: "{{ playbook_dir }}/../../metron-streaming/Metron-Elasticsearch/target/{{ metron_elasticsearch_bundle_name }}"
-metron_topologies_bundle_path: "{{ playbook_dir }}/../../metron-streaming/Metron-Topologies/target/{{ metron_topologies_bundle_name }}"
-
-config_path: "{{ metron_directory }}/config"
-zookeeper_config_path: "{{ config_path }}/zookeeper"
-zookeeper_global_config_path: "{{ zookeeper_config_path }}/global.json"
-metron_solr_properties_config_path: "{{ metron_directory }}/config/etc/env/solr.properties"
-metron_elasticsearch_properties_config_path: "{{ metron_directory }}/config/etc/env/elasticsearch.properties"
-hbase_config_path: "/etc/hbase/conf"
-hdfs_config_path: "/etc/hadoop/conf"
-
-threat_intel_bulk_load: True
-threat_intel_bin: "{{ metron_directory }}/bin/threatintel_bulk_load.sh"
-threat_intel_work_dir: /tmp/ti_bulk
-threat_intel_csv_filename: "threat_ip.csv"
-threat_intel_csv_filepath: "../roles/metron_streaming/templates/{{ threat_intel_csv_filename }}"
-
-pycapa_topic: pcap
-bro_topic: bro
-yaf_topic: yaf
-snort_topic: snort
-enrichments_topic: enrichments
-storm_topologies:
-    - "{{ metron_directory }}/config/topologies/bro/remote.yaml"
-    - "{{ metron_directory }}/config/topologies/snort/remote.yaml"
-    - "{{ metron_directory }}/config/topologies/yaf/remote.yaml"
-    - "{{ metron_directory }}/config/topologies/pcap/remote.yaml"
-    - "{{ metron_directory }}/config/topologies/enrichment/remote.yaml"
-
-hdfs_retention_days: 30
-hdfs_bro_purge_cronjob: "{{ metron_directory }}/bin/prune_hdfs_files.sh -f {{ hdfs_url }} -g '/apps/metron/enrichment/indexed/bro_doc/*enrichment-*' -s $(date -d '{{ hdfs_retention_days }} days ago' +%m/%d/%Y) -n 1 >> /var/log/bro-purge/cron-hdfs-bro-purge.log 2>&1"
-hdfs_yaf_purge_cronjob: "{{ metron_directory }}/bin/prune_hdfs_files.sh -f {{ hdfs_url }} -g '/apps/metron/enrichment/indexed/yaf_doc/*enrichment-*' -s $(date -d '{{ hdfs_retention_days }} days ago' +%m/%d/%Y) -n 1 >> /var/log/yaf-purge/cron-hdfs-yaf-purge.log 2>&1"
-hdfs_snort_purge_cronjob: "{{ metron_directory }}/bin/prune_hdfs_files.sh -f {{ hdfs_url }} -g '/apps/metron/enrichment/indexed/snort_doc/*enrichment-*' -s $(date -d '{{ hdfs_retention_days }} days ago' +%m/%d/%Y) -n 1 >> /var/log/yaf-purge/cron-hdfs-yaf-purge.log 2>&1"
-
-elasticsearch_config_path: /etc/elasticsearch
-elasticsearch_cluster_name: metron
-elasticsearch_transport_port: 9300
-
-es_retention_days: 30
-es_bro_purge_cronjob: "{{ metron_directory }}/bin/prune_elasticsearch_indices.sh -z {{ zookeeper_url }} -p bro_index_ -s $(date -d '{{ es_retention_days }} days ago' +%m/%d/%Y) -n 1 >> /var/log/bro-purge/cron-es-bro-purge.log 2>&1"
-es_yaf_purge_cronjob: "{{ metron_directory }}/bin/prune_elasticsearch_indices.sh -z {{ zookeeper_url }} -p yaf_index_ -s $(date -d '{{ es_retention_days }} days ago' +%m/%d/%Y) -n 1 >> /var/log/yaf-purge/cron-es-yaf-purge.log 2>&1"
-es_snort_purge_cronjob: "{{ metron_directory }}/bin/prune_elasticsearch_indices.sh -z {{ zookeeper_url }} -p yaf_index_ -s $(date -d '{{ es_retention_days }} days ago' +%m/%d/%Y) -n 1 >> /var/log/snort-purge/cron-es-snort-purge.log 2>&1"
-
-metron_hdfs_output_dir: "/apps/metron"
-metron_hdfs_rotation_policy: org.apache.storm.hdfs.bolt.rotation.TimedRotationPolicy
-metron_hdfs_rotation_policy_count: 1
-metron_hdfs_rotation_policy_units: DAYS

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/files/config/sensors/bro.json
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/files/config/sensors/bro.json b/deployment/roles/metron_streaming/files/config/sensors/bro.json
deleted file mode 100644
index 2b534b4..0000000
--- a/deployment/roles/metron_streaming/files/config/sensors/bro.json
+++ /dev/null
@@ -1,19 +0,0 @@
-{
-  "index": "bro",
-  "batchSize": 5,
-  "enrichmentFieldMap":
-  {
-    "geo": ["ip_dst_addr", "ip_src_addr"],
-    "host": ["host"]
-  },
-  "threatIntelFieldMap":
-  {
-    "hbaseThreatIntel": ["ip_src_addr", "ip_dst_addr"]
-  },
-  "fieldToThreatIntelTypeMap":
-  {
-    "ip_src_addr" : ["malicious_ip"],
-    "ip_dst_addr" : ["malicious_ip"]
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/files/config/sensors/pcap.json
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/files/config/sensors/pcap.json b/deployment/roles/metron_streaming/files/config/sensors/pcap.json
deleted file mode 100644
index 7792165..0000000
--- a/deployment/roles/metron_streaming/files/config/sensors/pcap.json
+++ /dev/null
@@ -1,19 +0,0 @@
-{
-  "index": "pcap",
-  "batchSize": 5,
-  "enrichmentFieldMap":
-  {
-    "geo": ["ip_src_addr", "ip_dst_addr"],
-    "host": ["ip_src_addr", "ip_dst_addr"]
-  },
-  "threatIntelFieldMap":
-  {
-    "hbaseThreatIntel": ["ip_dst_addr", "ip_src_addr"]
-  },
-  "fieldToThreatIntelTypeMap":
-  {
-    "ip_dst_addr" : [ "malicious_ip" ]
-    ,"ip_src_addr" : [ "malicious_ip" ]
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/files/config/sensors/snort.json
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/files/config/sensors/snort.json b/deployment/roles/metron_streaming/files/config/sensors/snort.json
deleted file mode 100644
index c5b6dcc..0000000
--- a/deployment/roles/metron_streaming/files/config/sensors/snort.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
-  "index": "snort",
-  "batchSize": 1,
-  "enrichmentFieldMap":
-  {
-    "geo": ["ip_dst_addr", "ip_src_addr"],
-    "host": ["host"]
-  },
- "threatIntelFieldMap":
-  {
-    "hbaseThreatIntel": ["ip_src_addr", "ip_dst_addr"]
-  },
-  "fieldToThreatIntelTypeMap":
-  {
-    "ip_src_addr" : ["malicious_ip"],
-    "ip_dst_addr" : ["malicious_ip"]
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/files/config/sensors/yaf.json
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/files/config/sensors/yaf.json b/deployment/roles/metron_streaming/files/config/sensors/yaf.json
deleted file mode 100644
index 2b46c9a..0000000
--- a/deployment/roles/metron_streaming/files/config/sensors/yaf.json
+++ /dev/null
@@ -1,19 +0,0 @@
-{
-  "index": "yaf",
-  "batchSize": 5,
-  "enrichmentFieldMap":
-  {
-    "geo": ["ip_dst_addr", "ip_src_addr"],
-    "host": ["host"]
-  },
-  "threatIntelFieldMap":
-  {
-    "hbaseThreatIntel": ["ip_src_addr", "ip_dst_addr"]
-  },
-  "fieldToThreatIntelTypeMap":
-  {
-    "ip_src_addr" : ["malicious_ip"],
-    "ip_dst_addr" : ["malicious_ip"]
-  }
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/files/extractor.json
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/files/extractor.json b/deployment/roles/metron_streaming/files/extractor.json
deleted file mode 100644
index 545202a..0000000
--- a/deployment/roles/metron_streaming/files/extractor.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-  "config": {
-    "columns": {
-      "ip": 0
-    },
-    "indicator_column": "ip",
-    "type" : "malicious_ip",
-    "separator": ","
-  },
-  "extractor": "CSV"
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/files/yaf_index.template
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/files/yaf_index.template b/deployment/roles/metron_streaming/files/yaf_index.template
deleted file mode 100644
index c8c1702..0000000
--- a/deployment/roles/metron_streaming/files/yaf_index.template
+++ /dev/null
@@ -1,36 +0,0 @@
-{
-    "template" : "yaf_index*",
-    "mappings" : {
-        "yaf_doc" : {
-            "properties": {
-                "message": {
-                    "properties": {
-                        "@timestamp":{"type":"date","format":"dateOptionalTime"},
-                        "end-time":{"type":"string"},
-                        "duration":{"type":"string"},
-                        "rtt":{"type":"string"},
-                        "proto":{"type":"string"},
-                        "sip":{"type":"string"},
-                        "sp":{"type":"string"},
-                        "dip":{"type":"string"},
-                        "dp":{"type":"string"},
-                        "iflags":{"type":"string"},
-                        "uflags":{"type":"string"},
-                        "riflags":{"type":"string"},
-                        "ruflags":{"type":"string"},
-                        "isn":{"type":"string"},
-                        "risn":{"type":"string"},
-                        "tag":{"type":"string"},
-                        "rtag":{"type":"string"},
-                        "pkt":{"type":"string"},
-                        "oct":{"type":"string"},
-                        "rpkt":{"type":"string"},
-                        "roct":{"type":"string"},
-                        "app":{"type":"string"},
-                        "end-reason":{"type":"string"}
-                    }
-                }
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/handlers/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/handlers/main.yml b/deployment/roles/metron_streaming/handlers/main.yml
deleted file mode 100644
index 5aee8b4..0000000
--- a/deployment/roles/metron_streaming/handlers/main.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Load Config
-  shell: "{{ metron_directory }}/bin/zk_load_configs.sh -p {{ zookeeper_config_path }} -z {{ zookeeper_url }} && touch {{ zookeeper_config_path }}/configured"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/meta/main.yml b/deployment/roles/metron_streaming/meta/main.yml
deleted file mode 100644
index 6820b2c..0000000
--- a/deployment/roles/metron_streaming/meta/main.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - ambari_gather_facts
-  - java_jdk

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/tasks/es_purge.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/tasks/es_purge.yml b/deployment/roles/metron_streaming/tasks/es_purge.yml
deleted file mode 100644
index 22616ca..0000000
--- a/deployment/roles/metron_streaming/tasks/es_purge.yml
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Create Empty Log Files for ES Purge
-  file:
-    path: "{{ item }}"
-    state: touch
-    owner: hdfs
-    group: hdfs
-    mode: 0644
-  with_items:
-    - /var/log/bro-purge/cron-es-bro-purge.log
-    - /var/log/yaf-purge/cron-es-yaf-purge.log
-    - /var/log/snort-purge/cron-es-snort-purge.log
-
-
-- name: Purge Elasticsearch Indices every 30 days.
-  cron:
-    name: "{{ item.name }}"
-    job: "{{ item.job }}"
-    special_time: daily
-    user: hdfs
-  with_items:
-    - { name: "bro_es_purge", job:  "{{ es_bro_purge_cronjob }}" }
-    - { name: "yaf_es_purge", job: "{{ es_yaf_purge_cronjob }}" }
-    - { name: "snort_es_purge", job: "{{ es_snort_purge_cronjob }}" }
-
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/tasks/grok_upload.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/tasks/grok_upload.yml b/deployment/roles/metron_streaming/tasks/grok_upload.yml
deleted file mode 100644
index ef560f0..0000000
--- a/deployment/roles/metron_streaming/tasks/grok_upload.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Create HDFS directory for grok patterns
-  command: hdfs dfs -mkdir -p {{ metron_hdfs_output_dir }}/patterns
-  become: yes
-  become_user: hdfs
-
-- name: Assign hfds user as owner of  {{ metron_hdfs_output_dir }}/patterns HDFS directory
-  command: hdfs dfs -chown -R hdfs:hadoop {{ metron_hdfs_output_dir }}/patterns
-  become: yes
-  become_user: hdfs
-
-- name: Assign permissions of HDFS {{ metron_hdfs_output_dir }}/patterns directory
-  command: hdfs dfs -chmod -R 775 {{ metron_hdfs_output_dir }}/patterns
-  become: yes
-  become_user: hdfs
-
-- name: Upload Grok Patterns to hdfs://{{ metron_hdfs_output_dir }}
-  command: hdfs dfs -put -f {{ metron_directory }}/config/patterns  {{ metron_hdfs_output_dir }}
-  become: yes
-  become_user: hdfs
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/tasks/hdfs_filesystem.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/tasks/hdfs_filesystem.yml b/deployment/roles/metron_streaming/tasks/hdfs_filesystem.yml
deleted file mode 100644
index 252e671..0000000
--- a/deployment/roles/metron_streaming/tasks/hdfs_filesystem.yml
+++ /dev/null
@@ -1,41 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Create root user HDFS directory
-  command: hdfs dfs -mkdir -p /user/root
-  become: yes
-  become_user: hdfs
-
-- name: Assign root as owner of /user/root HDFS directory
-  command: hdfs dfs -chown root:root /user/root
-  become: yes
-  become_user: hdfs
-
-- name: Create Metron HDFS output directory
-  command: hdfs dfs -mkdir -p {{ metron_hdfs_output_dir }}
-  become: yes
-  become_user: hdfs
-
-- name: Assign hdfs as owner of HDFS output directory
-  command: hdfs dfs -chown hdfs:hadoop {{ metron_hdfs_output_dir }}
-  become: yes
-  become_user: hdfs
-
-- name: Assign permissions of HDFS output directory
-  command: hdfs dfs -chmod 775 {{ metron_hdfs_output_dir }}
-  become: yes
-  become_user: hdfs
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/tasks/hdfs_purge.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/tasks/hdfs_purge.yml b/deployment/roles/metron_streaming/tasks/hdfs_purge.yml
deleted file mode 100644
index 33442e4..0000000
--- a/deployment/roles/metron_streaming/tasks/hdfs_purge.yml
+++ /dev/null
@@ -1,52 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Create Log Directories for HDFS Purge
-  file:
-      path: "{{ item }}"
-      state: directory
-      mode: 0755
-      owner: hdfs
-      group: hdfs
-  with_items:
-    - /var/log/bro-purge
-    - /var/log/yaf-purge
-    - /var/log/snort-purge
-
-- name: Create Empty Log Files for HDFS Purge
-  file:
-    path: "{{ item }}"
-    state: touch
-    owner: hdfs
-    group: hdfs
-    mode: 0644
-  with_items:
-    - /var/log/bro-purge/cron-hdfs-bro-purge.log
-    - /var/log/yaf-purge/cron-hdfs-yaf-purge.log
-    - /var/log/snort-purge/cron-hdfs-snort-purge.log
-
-- name: Purge HDFS Sensor Data every 30 days.
-  cron:
-    name: "{{ item.name }}"
-    job: "{{ item.job }}"
-    special_time: daily
-    user: hdfs
-  with_items:
-    - { name: "bro_hdfs_purge", job:  "{{ hdfs_bro_purge_cronjob }}" }
-    - { name: "yaf_hdfs_purge", job: "{{ hdfs_yaf_purge_cronjob }}" }
-    - { name: "snort_hdfs_purge", job: "{{ hdfs_snort_purge_cronjob }}" }
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/tasks/main.yml b/deployment/roles/metron_streaming/tasks/main.yml
deleted file mode 100644
index e076645..0000000
--- a/deployment/roles/metron_streaming/tasks/main.yml
+++ /dev/null
@@ -1,136 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Create Metron streaming directories
-  file: path="{{ metron_directory }}/{{ item.name }}"  state=directory mode=0755
-  with_items:
-      - { name: 'lib'}
-      - { name: 'bin'}
-      - { name: 'config'}
-
-
-- name: Copy Metron Solr bundle
-  copy:
-    src: "{{ metron_solr_bundle_path }}"
-    dest: "{{ metron_directory }}"
-
-- name: Copy Metron Elasticsearch bundle
-  copy:
-    src: "{{ metron_elasticsearch_bundle_path }}"
-    dest: "{{ metron_directory }}"
-
-- name: Copy Metron Topologies bundle
-  copy:
-    src: "{{ metron_topologies_bundle_path }}"
-    dest: "{{ metron_directory }}"
-
-- name: Copy Metron DataLoads bundle
-  copy:
-    src: "{{ metron_dataloads_path }}"
-    dest: "{{ metron_directory }}"
-
-- name: Unbundle Metron bundles
-  shell: cd {{ metron_directory }} && tar xzvf Metron-Solr*.tar.gz && tar xzvf Metron-Elasticsearch*.tar.gz && tar xzvf Metron-Topologies*.tar.gz && tar xzvf Metron-DataLoads*.tar.gz && rm *.tar.gz
-
-- name: Add *-site.xml files to topology jars
-  shell: cd {{ item.config_path }} && jar -uf {{ metron_directory }}/lib/{{ item.jar_name }} {{ item.file_name }}
-  with_items:
-      - { config_path: "{{ hbase_config_path }}", jar_name: "{{ metron_solr_jar_name }}", file_name: "hbase-site.xml" }
-      - { config_path: "{{ hdfs_config_path }}", jar_name: "{{ metron_solr_jar_name }}", file_name: "core-site.xml" }
-      - { config_path: "{{ hdfs_config_path }}", jar_name: "{{ metron_solr_jar_name }}", file_name: "hdfs-site.xml" }
-      - { config_path: "{{ hbase_config_path }}", jar_name: "{{ metron_elasticsearch_jar_name }}", file_name: "hbase-site.xml" }
-      - { config_path: "{{ hdfs_config_path }}", jar_name: "{{ metron_elasticsearch_jar_name }}", file_name: "core-site.xml" }
-      - { config_path: "{{ hdfs_config_path }}", jar_name: "{{ metron_elasticsearch_jar_name }}", file_name: "hdfs-site.xml" }
-
-- name: Get Default mysql passowrd
-  include_vars: "../roles/mysql_server/defaults/main.yml"
-  when: mysql_root_password is undefined
-
-- include: hdfs_filesystem.yml
-  run_once: true
-
-- include: grok_upload.yml
-  run_once: true
-
-- name: Configure Metron Solr topologies
-  lineinfile: >
-    dest={{ metron_solr_properties_config_path }}
-    regexp="{{ item.regexp }}"
-    line="{{ item.line }}"
-  with_items:
-    - { regexp: "kafka.zk=", line: "kafka.zk={{ zookeeper_url }}" }
-    - { regexp: "kafka.broker=", line: "kafka.broker={{ kafka_broker_url }}" }
-    - { regexp: "es.ip=", line: "es.ip={{ groups.search[0] }}" }
-    - { regexp: "es.port=", line: "es.port={{ elasticsearch_transport_port }}" }
-    - { regexp: "es.clustername=", line: "es.clustername={{ elasticsearch_cluster_name }}" }
-    - { regexp: "bolt.hdfs.file.system.url=", line: "bolt.hdfs.file.system.url={{ hdfs_url }}" }
-    - { regexp: "spout.kafka.topic.pcap=", line: "spout.kafka.topic.pcap={{ pycapa_topic }}" }
-    - { regexp: "spout.kafka.topic.bro=", line: "spout.kafka.topic.bro={{ bro_topic }}" }
-    - { regexp: "bolt.hbase.table.name=", line: "bolt.hbase.table.name={{ pcap_hbase_table }}" }
-    - { regexp: "threat.intel.tracker.table=", line: "threat.intel.tracker.table={{ tracker_hbase_table }}" }
-    - { regexp: "threat.intel.tracker.cf=", line: "threat.intel.tracker.cf=t" }
-    - { regexp: "threat.intel.simple.hbase.table=", line: "threat.intel.simple.hbase.table={{ threatintel_hbase_table }}" }
-    - { regexp: "threat.intel.simple.hbase.cf=", line: "threat.intel.simple.hbase.cf=t" }
-    - { regexp: "enrichment.simple.hbase.table=", line: "enrichment.simple.hbase.table={{ enrichment_hbase_table }}" }
-    - { regexp: "enrichment.simple.hbase.cf=", line: "enrichment.simple.hbase.cf=t" }
-    - { regexp: "mysql.ip=", line: "mysql.ip={{ groups.mysql[0] }}" }
-    - { regexp: "mysql.password=", line: "mysql.password={{ mysql_root_password }}" }
-    - { regexp: "index.hdfs.output=", line: "index.hdfs.output={{ metron_hdfs_output_dir }}/enrichment/indexed" }
-    - { regexp: "bolt.hdfs.rotation.policy=", line: "bolt.hdfs.rotation.policy={{ metron_hdfs_rotation_policy }}" }
-    - { regexp: "bolt.hdfs.rotation.policy.count=", line: "bolt.hdfs.rotation.policy.count={{ metron_hdfs_rotation_policy_count}}" }
-    - { regexp: "bolt.hdfs.rotation.policy.units=", line: "bolt.hdfs.rotation.policy.units={{ metron_hdfs_rotation_policy_units }}" }
-
-- name: Configure Metron Elasticsearch topologies
-  lineinfile: >
-    dest={{ metron_elasticsearch_properties_config_path }}
-    regexp="{{ item.regexp }}"
-    line="{{ item.line }}"
-  with_items:
-    - { regexp: "kafka.zk=", line: "kafka.zk={{ zookeeper_url }}" }
-    - { regexp: "kafka.broker=", line: "kafka.broker={{ kafka_broker_url }}" }
-    - { regexp: "es.ip=", line: "es.ip={{ groups.search[0] }}" }
-    - { regexp: "es.port=", line: "es.port={{ elasticsearch_transport_port }}" }
-    - { regexp: "es.clustername=", line: "es.clustername={{ elasticsearch_cluster_name }}" }
-    - { regexp: "bolt.hdfs.file.system.url=", line: "bolt.hdfs.file.system.url={{ hdfs_url }}" }
-    - { regexp: "spout.kafka.topic.pcap=", line: "spout.kafka.topic.pcap={{ pycapa_topic }}" }
-    - { regexp: "spout.kafka.topic.bro=", line: "spout.kafka.topic.bro={{ bro_topic }}" }
-    - { regexp: "bolt.hbase.table.name=", line: "bolt.hbase.table.name={{ pcap_hbase_table }}" }
-    - { regexp: "threat.intel.tracker.table=", line: "threat.intel.tracker.table={{ tracker_hbase_table }}" }
-    - { regexp: "threat.intel.tracker.cf=", line: "threat.intel.tracker.cf=t" }
-    - { regexp: "threat.intel.simple.hbase.table=", line: "threat.intel.simple.hbase.table={{ threatintel_hbase_table }}" }
-    - { regexp: "threat.intel.simple.hbase.cf=", line: "threat.intel.simple.hbase.cf=t" }
-    - { regexp: "enrichment.simple.hbase.table=", line: "enrichment.simple.hbase.table={{ enrichment_hbase_table }}" }
-    - { regexp: "enrichment.simple.hbase.cf=", line: "enrichment.simple.hbase.cf=t" }
-    - { regexp: "mysql.ip=", line: "mysql.ip={{ groups.mysql[0] }}" }
-    - { regexp: "mysql.password=", line: "mysql.password={{ mysql_root_password }}" }
-    - { regexp: "index.hdfs.output=", line: "index.hdfs.output={{ metron_hdfs_output_dir }}/enrichment/indexed" }
-    - { regexp: "bolt.hdfs.rotation.policy=", line: "bolt.hdfs.rotation.policy={{ metron_hdfs_rotation_policy }}" }
-    - { regexp: "bolt.hdfs.rotation.policy.count=", line: "bolt.hdfs.rotation.policy.count={{ metron_hdfs_rotation_policy_count}}" }
-    - { regexp: "bolt.hdfs.rotation.policy.units=", line: "bolt.hdfs.rotation.policy.units={{ metron_hdfs_rotation_policy_units }}" }
-
-- include: source_config.yml
-  run_once: true
-
-- include: threat_intel.yml
-  run_once: true
-  when: threat_intel_bulk_load == True
-
-- include: metron_topology.yml
-
-- include: hdfs_purge.yml
-
-- include: es_purge.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/tasks/metron_topology.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/tasks/metron_topology.yml b/deployment/roles/metron_streaming/tasks/metron_topology.yml
deleted file mode 100644
index f8bf539..0000000
--- a/deployment/roles/metron_streaming/tasks/metron_topology.yml
+++ /dev/null
@@ -1,29 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-
-- name: Submit Solr Metron topologies
-  command: storm jar {{ metron_directory }}/lib/{{ metron_solr_jar_name }} org.apache.storm.flux.Flux  --filter {{ metron_solr_properties_config_path }} --remote {{ item }}
-  with_items:
-      - "{{ storm_topologies }}"
-  when: install_solr | default(False) == True
-
-- name: Submit Elasticsearch Metron topologies
-  command: storm jar {{ metron_directory }}/lib/{{ metron_elasticsearch_jar_name }} org.apache.storm.flux.Flux  --filter {{ metron_elasticsearch_properties_config_path }} --remote {{ item }}
-  with_items:
-      - "{{ storm_topologies }}"
-  when: install_elasticsearch | default(False) == True

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/tasks/source_config.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/tasks/source_config.yml b/deployment/roles/metron_streaming/tasks/source_config.yml
deleted file mode 100644
index 959056c..0000000
--- a/deployment/roles/metron_streaming/tasks/source_config.yml
+++ /dev/null
@@ -1,48 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Create Source Config Directory
-  file:
-    path: "{{ zookeeper_config_path }}"
-    state: directory
-
-- name: Copy Elasticsearch Global Config File
-  template:
-    src: "templates/config/elasticsearch.global.json"
-    dest: "{{ zookeeper_global_config_path }}"
-    mode: 0644
-  when: install_elasticsearch | default(False) == True
-
-- name: Copy Solr Global Config File
-  template:
-    src: "../roles/metron_streaming/templates/config/solr.global.json"
-    dest: "{{ zookeeper_global_config_path }}"
-    mode: 0644
-  when: install_solr | default(False) == True
-
-- name: Copy Sensor Config Files
-  copy:
-    src: "{{ item }}"
-    dest: "{{ zookeeper_config_path }}"
-    mode: 0644
-  with_items:
-    - ../roles/metron_streaming/files/config/
-  notify: Load Config
-
-
-
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/tasks/threat_intel.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/tasks/threat_intel.yml b/deployment/roles/metron_streaming/tasks/threat_intel.yml
deleted file mode 100644
index 6dd8144..0000000
--- a/deployment/roles/metron_streaming/tasks/threat_intel.yml
+++ /dev/null
@@ -1,46 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-
-- name: Create Bulk load working Directory
-  file:
-    path: "{{ threat_intel_work_dir }}"
-    state: directory
-
-- name: Copy extractor.json to {{ inventory_hostname }}
-  copy:
-    src: ../roles/metron_streaming/files/extractor.json
-    dest: "{{  threat_intel_work_dir }}"
-    mode: 0644
-
-- name: Copy Bulk Load CSV File
-  template:
-    src: "{{ threat_intel_csv_filepath }}"
-    dest: "{{ threat_intel_work_dir }}/{{ threat_intel_csv_filename }}"
-    mode: 0644
-
-- name: Copy Bulk Load CSV File to HDFS
-  command: "hdfs dfs -put {{ threat_intel_work_dir }}/{{ threat_intel_csv_filename }} ."
-
-- name: Run Threat Intel Bulk Load
-  shell: "{{ threat_intel_bin }} -f t --table {{threatintel_hbase_table}} -e {{ threat_intel_work_dir }}/extractor.json  -i /user/root && touch {{ threat_intel_work_dir }}/loaded"
-  args:
-    creates: "{{ threat_intel_work_dir }}/loaded"
-
-- name: Clean up HDFS File
-  command: "hdfs dfs -rm {{ threat_intel_csv_filename }}"
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/templates/config/elasticsearch.global.json
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/templates/config/elasticsearch.global.json b/deployment/roles/metron_streaming/templates/config/elasticsearch.global.json
deleted file mode 100644
index 8177102..0000000
--- a/deployment/roles/metron_streaming/templates/config/elasticsearch.global.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
-  "es.clustername": "{{ elasticsearch_cluster_name }}",
-  "es.ip": "{{ groups.search[0] }}",
-  "es.port": "{{ elasticsearch_transport_port }}",
-  "es.date.format": "yyyy.MM.dd.HH"
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/templates/config/solr.global.json
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/templates/config/solr.global.json b/deployment/roles/metron_streaming/templates/config/solr.global.json
deleted file mode 100644
index 5cb7a4d..0000000
--- a/deployment/roles/metron_streaming/templates/config/solr.global.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
-  "solr.zookeeper": "{{ zookeeper_url }}",
-  "solr.collection": "{{ solr_collection_name }}",
-  "solr.numShards": {{ solr_number_shards }},
-  "solr.replicationFactor": {{ solr_replication_factor }}
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_streaming/templates/threat_ip.csv
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_streaming/templates/threat_ip.csv b/deployment/roles/metron_streaming/templates/threat_ip.csv
deleted file mode 100644
index 3ac38f3..0000000
--- a/deployment/roles/metron_streaming/templates/threat_ip.csv
+++ /dev/null
@@ -1,37 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-#Add single column of ip address to alert
-#Public lists are available on the internet
-# example: 
-23.113.113.105
-24.107.205.249
-24.108.62.255
-24.224.153.71
-27.4.1.212
-27.131.149.102
-31.24.30.31
-31.131.251.33
-31.186.99.250
-31.192.209.119
-31.192.209.150
-31.200.244.17
-37.34.52.185
-37.58.112.101
-37.99.146.27
-37.128.132.96
-37.140.195.177
-37.140.199.100

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_ui/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_ui/defaults/main.yml b/deployment/roles/metron_ui/defaults/main.yml
deleted file mode 100644
index 23aed40..0000000
--- a/deployment/roles/metron_ui/defaults/main.yml
+++ /dev/null
@@ -1,21 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-metron_version: 0.1BETA
-metron_directory: /usr/metron/{{ metron_version }}
-metron_ui_directory: "{{ metron_directory }}/metron-ui"
-metron_temp_archive: /tmp/metron-ui.tar.gz

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_ui/tasks/copy-source.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_ui/tasks/copy-source.yml b/deployment/roles/metron_ui/tasks/copy-source.yml
deleted file mode 100644
index 703b7f9..0000000
--- a/deployment/roles/metron_ui/tasks/copy-source.yml
+++ /dev/null
@@ -1,46 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Verify {{ metron_ui_directory }} exists
-  file:
-    path: "{{ metron_ui_directory }}"
-    state: directory
-    mode: 0755
-    owner: root
-    group: root
-
-- name: Archive metron-ui on localhost
-  shell: tar --exclude='./node_modules' -czf {{ metron_temp_archive }} .
-  args:
-    chdir: "{{ playbook_dir }}/../../metron-ui"
-    creates: "{{ metron_temp_archive }}"
-    warn: false    #Warns to use unarchive - unarchive does not archive
-  become: false
-  delegate_to: localhost
-  run_once: true
-
-- name: Extract metron-ui tarball
-  unarchive:
-    src: "{{ metron_temp_archive }}"
-    dest: "{{ metron_ui_directory }}"
-    creates: "{{ metron_ui_directory}}/config"
-
-- name: Delete {{ metron_temp_archive }}
-  local_action: file path="{{ metron_temp_archive }}" state=absent
-  become: false
-  run_once: true
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/metron_ui/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/metron_ui/tasks/main.yml b/deployment/roles/metron_ui/tasks/main.yml
deleted file mode 100644
index fd3422b..0000000
--- a/deployment/roles/metron_ui/tasks/main.yml
+++ /dev/null
@@ -1,56 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Install Metron UI dependencies
-  yum:
-    pkg: "{{ item }}"
-    state: installed
-  with_items:
-      - libpcap-devel
-      - wireshark
-      - nodejs
-      - npm
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10
-
-- include: copy-source.yml
-
-- name: Configure Metron UI
-  lineinfile:
-    dest="{{ metron_ui_directory }}/config.json"
-    regexp="{{ item.regexp }}"
-    line="{{ item.line }}"
-    state=present
-  with_items:
-    - { regexp: '"elasticsearch":', line: '"elasticsearch": { "url": "http://{{ groups.search[0] }}:{{ elasticsearch_web_port }}" },' }
-    - { regexp: '"pcap":', line: '  "pcap": { "url": "http://{{ groups.web[0] }}:{{ pcapservice_port }}/pcapGetter","mock": false }' }
-
-- name: Install Node dependencies
-  npm:
-    name: pm2
-    path: "{{ metron_ui_directory }}"
-    global: true
-
-- name: Install Metron UI
-  npm:
-    path: "{{ metron_ui_directory }}"
-    production: no
-
-- name: Start Metron UI
-  shell: "pm2 start {{ metron_ui_directory }}/lib/metron-ui.js --name metron"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/mysql_client/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/mysql_client/tasks/main.yml b/deployment/roles/mysql_client/tasks/main.yml
deleted file mode 100644
index 8c54c23..0000000
--- a/deployment/roles/mysql_client/tasks/main.yml
+++ /dev/null
@@ -1,35 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-
-- name: Get Default mysql passowrd
-  include_vars: "../roles/mysql_server/defaults/main.yml"
-  when: mysql_root_password is undefined
-
-- name: Allow remote login to mysql
-  template:
-    src: "../roles/mysql_client/templates/db_config.sql"
-    dest: "/tmp/{{ansible_fqdn}}.sql"
-  delegate_to: "{{ groups.mysql[0] }}"
-
-- name: Import DB_Config
-  mysql_db:
-    name: "all"
-    state: "import"
-    target: "/tmp/{{ansible_fqdn}}.sql"
-  ignore_errors: True
-  delegate_to: "{{ groups.mysql[0] }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/mysql_client/templates/db_config.sql
----------------------------------------------------------------------
diff --git a/deployment/roles/mysql_client/templates/db_config.sql b/deployment/roles/mysql_client/templates/db_config.sql
deleted file mode 100644
index c407a13..0000000
--- a/deployment/roles/mysql_client/templates/db_config.sql
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- */
-
-CREATE USER 'root'@'{{ ansible_fqdn }}' IDENTIFIED BY '{{ mysql_root_password }}';
-SET PASSWORD FOR 'root'@'{{ ansible_fqdn }}' = PASSWORD('{{ mysql_root_password }}');
-GRANT ALL PRIVILEGES ON *.* to 'root'@'{{ ansible_fqdn }}' WITH GRANT OPTION;
-FLUSH PRIVILEGES;

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/mysql_server/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/mysql_server/defaults/main.yml b/deployment/roles/mysql_server/defaults/main.yml
deleted file mode 100644
index 0acbd17..0000000
--- a/deployment/roles/mysql_server/defaults/main.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-mysql_rpm_version: mysql57-community-release-el6-7.noarch
-mysql_yum_repo_url: https://dev.mysql.com/get/{{ mysql_rpm_version }}.rpm
-mysql_root_password: P@ssw0rd

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/mysql_server/files/geoip_ddl.sql
----------------------------------------------------------------------
diff --git a/deployment/roles/mysql_server/files/geoip_ddl.sql b/deployment/roles/mysql_server/files/geoip_ddl.sql
deleted file mode 100644
index 02616c6..0000000
--- a/deployment/roles/mysql_server/files/geoip_ddl.sql
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- */
-CREATE DATABASE IF NOT EXISTS GEO;
-
-USE GEO;
-
-DROP TABLE IF EXISTS `blocks`;
-CREATE TABLE  `blocks` ( `startIPNum` int(10) unsigned NOT NULL,`endIPNum` int(10) unsigned NOT NULL,`locID`
-int(10) unsigned NOT NULL, PRIMARY KEY  (`startIPNum`,`endIPNum`) )
-ENGINE=MyISAM DEFAULT CHARSET=latin1 PACK_KEYS=1 DELAY_KEY_WRITE=1;
-
-DROP TABLE IF EXISTS `location`;
-CREATE TABLE  `location` (`locID` int(10) unsigned NOT NULL,`country` char(2) default NULL,`region` char(2)
- default NULL,`city` varchar(45) default NULL,`postalCode` char(7) default NULL,`latitude` double default
-NULL,`longitude` double default NULL,`dmaCode` char(3) default NULL,`areaCode` char(3) default NULL,PRIMARY KEY
-  (`locID`),KEY `Index_Country` (`country`) ) ENGINE=MyISAM DEFAULT CHARSET=latin1 ROW_FORMAT=FIXED;
-
-load data infile '/var/lib/mysql-files/GeoLiteCity-Blocks.csv'  into table `blocks`  fields terminated by ',' optionally enclosed by '"'  lines terminated by '\n' ignore 2 lines;
-load data infile '/var/lib/mysql-files/GeoLiteCity-Location.csv'  into table `location`  fields terminated by ',' optionally enclosed by '"'  lines terminated by '\n' ignore 2 lines;
-
-
-DELIMITER $$
-DROP FUNCTION IF EXISTS `IPTOLOCID` $$
-CREATE FUNCTION `IPTOLOCID`( ip VARCHAR(15)) RETURNS int(10) unsigned
-  BEGIN
-    DECLARE ipn INTEGER UNSIGNED;
-    DECLARE locID_var INTEGER;
-    IF ip LIKE '192.168.%' OR ip LIKE '10.%' THEN RETURN 0;
-    END IF;
-    SET ipn = INET_ATON(ip);
-    SELECT locID INTO locID_var FROM `blocks` INNER JOIN (SELECT MAX(startIPNum) AS start FROM `blocks` WHERE startIPNum <= ipn) AS s ON (startIPNum = s.start) WHERE endIPNum >= ipn;
-    RETURN locID_var;
-  END
-$$
-DELIMITER ;

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/mysql_server/handlers/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/mysql_server/handlers/main.yml b/deployment/roles/mysql_server/handlers/main.yml
deleted file mode 100644
index 112c5ca..0000000
--- a/deployment/roles/mysql_server/handlers/main.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: restart elasticsearch
-  service: name=elasticsearch state=restarted


[50/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/amazon-ec2/README.md
----------------------------------------------------------------------
diff --git a/deployment/amazon-ec2/README.md b/deployment/amazon-ec2/README.md
deleted file mode 100644
index b4dcc6f..0000000
--- a/deployment/amazon-ec2/README.md
+++ /dev/null
@@ -1,211 +0,0 @@
-Apache Metron on Amazon EC2
-===========================
-
-This project fully automates the provisioning of Apache Metron on Amazon EC2 infrastructure.  Starting with only your Amazon EC2 credentials, this project will create a fully-functioning, end-to-end, multi-node cluster running Apache Metron.
-
-Getting Started
----------------
-
-### Prerequisites
-
-The host that will drive the provisioning process will need to have [Ansible](https://github.com/ansible/ansible), Python and PIP installed.  In most cases, a development laptop serves this purpose just fine.  Also, install the Python library `boto` and its dependencies.  
-
-```
-pip install boto six
-```
-
-Ensure that an SSH key has been generated and stored at `~/.ssh/id_rsa.pub`.  In most cases this key will already exist and no further action will be needed.
-
-### Create User
-
-1. Use Amazon's [Identity and Access Management](https://console.aws.amazon.com/iam/) tool to create a user account by navigating to `Users > Create New User`.  
-
-2. Grant the user permission by clicking on `Permissions > Attach Policy` and add the following policies.
-
-  ```
-  AmazonEC2FullAccess
-  AmazonVPCFullAccess
-  ```
-
-3. Create an access key for the user by clicking on `Security Credentials > Create Access Key`.  Save the provided access key values in a safe place.  These values cannot be retrieved from the web console at a later time.
-
-4. Use the access key by exporting its values to the shell's environment.  This allows Ansible to authenticate with Amazon EC2.  For example:
-
-  ```
-  export AWS_ACCESS_KEY_ID="AKIAI6NRFEO27E5FFELQ"
-  export AWS_SECRET_ACCESS_KEY="vTDydWJQnAer7OWauUS150i+9Np7hfCXrrVVP6ed"
-  ```
-
-### Deploy Metron
-
-1. Ensure that Metron's streaming topology uber-jar has been built.
-
-  ```
-  cd ../../metron-streaming
-  mvn clean package -DskipTests
-  ```
-
-2. Start the Metron playbook.  A full Metron deployment can consume up to 60 minutes.  Grab a coffee, relax and practice mindfulness meditation.  If the playbook fails mid-stream for any reason, simply re-run it.  
-
-  ```
-  export EC2_INI_PATH=conf/ec2.ini
-  ansible-playbook -i ec2.py playbook.yml
-  ```
-
-### Explore Metron
-
-1. After the deployment has completed successfully, a message like the following will be displayed.  Navigate to the specified resources to explore your newly minted Apache Metron environment.
-
-  ```
-  TASK [debug] *******************************************************************
-  ok: [localhost] => {
-      "Success": [
-          "Apache Metron deployed successfully",
-          "   Metron  @  http://ec2-52-37-255-142.us-west-2.compute.amazonaws.com:5000",
-          "   Ambari  @  http://ec2-52-37-225-202.us-west-2.compute.amazonaws.com:8080",
-          "   Sensors @  ec2-52-37-225-202.us-west-2.compute.amazonaws.com on tap0",
-          "For additional information, see https://metron.incubator.apache.org/'"
-      ]
-  }
-  ```
-
-2. Each of the provisioned hosts will be accessible from the internet. Connecting to one over SSH as the user `centos` will not require a password as it will authenticate with the pre-defined SSH key.  
-
-  ```
-  ssh centos@ec2-52-91-215-174.compute-1.amazonaws.com
-  ```
-
-Advanced Usage
---------------
-
-### Multiple Environments
-
-This process can support provisioning of multiple, isolated environments.  Simply change the `env` settings in `conf/defaults.yml`.  For example, you might provision separate development, test, and production environments.
-
-```
-env: metron-test
-```
-
-### Selective Provisioning
-
-To provision only subsets of the entire Metron deployment, Ansible tags can be specified.  For example, to only deploy the sensors on an Amazon EC2 environment, run the following command.
-
-```
-ansible-playbook -i ec2.py playbook.yml --tags "ec2,sensors"
-```
-
-### Custom SSH Key
-
-
-By default, the playbook will attempt to register your public SSH key `~/.ssh/id_rsa.pub` with each provisioned host.  This enables Ansible to communicate with each host using an SSH connection.  If would prefer to use another key simply add the path to the public key file to the `key_file` property in `conf/defaults.yml`.
-
-For example, generate a new SSH key for Metron that will be stored at `~/.ssh/my-metron-key`.
-
-```
-$ ssh-keygen -q -f ~/.ssh/my-metron-key
-Enter passphrase (empty for no passphrase):
-Enter same passphrase again:
-```
-
-Add the path to the newly created SSH public key to `conf/defaults.yml`.
-
-```
-key_file: ~/.ssh/metron-private-key.pub
-```
-
-Common Errors
--------------
-
-### Error: 'No handler was ready to authenticate...Check your credentials'
-
-```
-TASK [Define keypair] **********************************************************
-failed: [localhost] => (item=ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDXbcb1AlWsEPP
-  r9jEFrn0yun3PYNidJ/...david@hasselhoff.com) => {"failed": true, "item": "ssh-r
-  sa AAAAB3NzaC1yc2EAAAADAQABAAABAQDXbcb1AlWsEPPr9jEFr... david@hasselhoff.com",
-  "msg": "No handler was ready to authenticate. 1 handlers were checked.
-  ['HmacAuthV4Handler'] Check your credentials"}
-```
-
-#### Solution 1
-
-This occurs when Ansible does not have the correct AWS access keys.  The following commands must return a valid access key that is defined within Amazon's [Identity and Access Management](https://console.aws.amazon.com/iam/) console.  
-
-```
-$ echo $AWS_ACCESS_KEY_ID
-AKIAI6NRFEO27E5FFELQ
-
-$ echo $AWS_SECRET_ACCESS_KEY
-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
-```
-
-#### Solution 2
-
-This error can occur if you have exported the correct AWS access key, but you are using `sudo` to run the Ansible playbook.  Do not use the `sudo` command when running the Ansible playbook.
-
-### Error: 'OptInRequired: ... you need to accept terms and subscribe'
-
-```
-TASK [metron-test: Instantiate 1 host(s) as sensors,ambari_master,metron,ec2] **
-fatal: [localhost]: FAILED! => {"changed": false, "failed": true, "msg":
-"Instance creation failed => OptInRequired: In order to use this AWS Marketplace
-product you need to accept terms and subscribe. To do so please visit
-http://aws.amazon.com/marketplace/pp?sku=6x5jmcajty9edm3f211pqjfn2"}
-to retry, use: --limit @playbook.retry
-```
-
-#### Solution
-
-Apache Metron uses the [official CentOS 6 Amazon Machine Image](https://aws.amazon.com/marketplace/pp?sku=6x5jmcajty9edm3f211pqjfn2) when provisioning hosts. Amazon requires that you accept certain terms and conditions when using any Amazon Machine Image (AMI).  Follow the link provided in the error message to accept the terms and conditions then re-run the playbook.  
-
-### Error: 'PendingVerification: Your account is currently being verified'
-
-```
-TASK [metron-test: Instantiate 1 host(s) as sensors,ambari_master,metron,ec2] **
-fatal: [localhost]: FAILED! => {"changed": false, "failed": true, "msg":
-"Instance creation failed => PendingVerification: Your account is currently
-being verified. Verification normally takes less than 2 hours. Until your
-account is verified, you may not be able to launch additional instances or
-create additional volumes. If you are still receiving this message after more
-than 2 hours, please let us know by writing to aws-verification@amazon.com. We
-appreciate your patience."}
-to retry, use: --limit @playbook.retry
-```
-
-#### Solution
-
-This will occur if you are attempting to deploy Apache Metron using a newly created Amazon Web Services account.  Follow the advice of the message and wait until Amazon's verification process is complete.  Amazon has some additional [advice for dealing with this error and more](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/errors-overview.html).
-
-> Your account is pending verification. Until the verification process is complete, you may not be able to carry out requests with this account. If you have questions, contact [AWS Support](http://console.aws.amazon.com/support/home#/).
-
-### Error: 'Instance creation failed => InstanceLimitExceeded'
-
-```
-TASK [metron-test: Instantiate 3 host(s) as search,metron,ec2] *****************
-fatal: [localhost]: FAILED! => {"changed": false, "failed": true, "msg":
-"Instance creation failed => InstanceLimitExceeded: You have requested more
-instances (11) than your current instance limit of 10 allows for the specified
-instance type. Please visit http://aws.amazon.com/contact-us/ec2-request to
-request an adjustment to this limit."}
-to retry, use: --limit @playbook.retry
-```
-
-#### Solution
-
-This will occur if Apache Metron attempts to deploy more host instances than allowed by your account.  The total number of instances required for Apache Metron can be reduced by editing `deployment/amazon-ec/playbook.yml`.  Perhaps a better alternative is to request of Amazon that this limit be increased.  Amazon has some additional [advice for dealing with this error and more](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/errors-overview.html).
-
-> You've reached the limit on the number of instances you can run concurrently. The limit depends on the instance type. For more information, see [How many instances can I run in Amazon EC2](http://aws.amazon.com/ec2/faqs/#How_many_instances_can_I_run_in_Amazon_EC2). If you need additional instances, complete the [Amazon EC2 Instance Request Form](https://console.aws.amazon.com/support/home#/case/create?issueType=service-limit-increase&limitType=service-code-ec2-instances).
-
-### Error: 'SSH encountered an unknown error during the connection'
-
-```
-TASK [setup] *******************************************************************
-fatal: [ec2-52-26-113-221.us-west-2.compute.amazonaws.com]: UNREACHABLE! => {
-  "changed": false, "msg": "SSH encountered an unknown error during the
-  connection. We recommend you re-run the command using -vvvv, which will enable
-  SSH debugging output to help diagnose the issue", "unreachable": true}
-```
-
-#### Solution
-
-This most often indicates that Ansible cannot connect to the host with the SSH key that it has access to.  This could occur if hosts are provisioned with one SSH key, but the playbook is executed subsequently with a different SSH key.  The issue can be addressed by either altering the `key_file` variable to point to the key that was used to provision the hosts or by simply terminating all hosts and re-running the playbook.

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/amazon-ec2/ansible.cfg
----------------------------------------------------------------------
diff --git a/deployment/amazon-ec2/ansible.cfg b/deployment/amazon-ec2/ansible.cfg
deleted file mode 100644
index c8f26c4..0000000
--- a/deployment/amazon-ec2/ansible.cfg
+++ /dev/null
@@ -1,28 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-[defaults]
-host_key_checking = False
-library = ../extra_modules
-roles_path = ../roles
-pipelining = True
-remote_user = centos
-forks = 20
-log_path = ./ansible.log
-
-# fix for "ssh throws 'unix domain socket too long' " problem
-[ssh_connection]
-control_path = %(directory)s/%%h-%%p-%%r

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/amazon-ec2/conf/defaults.yml
----------------------------------------------------------------------
diff --git a/deployment/amazon-ec2/conf/defaults.yml b/deployment/amazon-ec2/conf/defaults.yml
deleted file mode 100644
index 76c4b98..0000000
--- a/deployment/amazon-ec2/conf/defaults.yml
+++ /dev/null
@@ -1,80 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-# ec2
-env: metron-test
-region: us-west-2
-instance_type: m4.xlarge
-image: ami-05cf2265
-volume_type: standard
-key_name: metron-key
-xvda_vol_size: 50
-xvdb_vol_size: 100
-xvdc_vol_size: 100
-
-# ambari
-ambari_host: "{{ groups.ambari_master[0] }}"
-ambari_port: 8080
-ambari_user: admin
-ambari_password: admin
-cluster_type: small_cluster
-
-# hbase
-pcap_hbase_table: pcap
-tracker_hbase_table: access_tracker
-threatintel_hbase_table: threatintel
-enrichment_hbase_table: enrichment
-
-# kafka
-num_partitions: 3
-retention_in_gb: 25
-
-# metron variables
-metron_version: 0.1BETA
-java_home: /usr/jdk64/jdk1.8.0_40
-pcapservice_port: 8081
-
-# sensors
-sensor_test_mode: True
-sniff_interface: tap0
-snort_alert_csv_path: "/var/log/snort/alert.csv"
-pcap_replay: True
-pcap_replay_interface: tap0
-install_tap: True
-pcap_path: /opt/pcap-replay
-
-# data directories
-zookeeper_data_dir: "/data1/hadoop/zookeeper"
-namenode_checkpoint_dir: "/data1/hadoop/hdfs/namesecondary"
-namenode_name_dir: "/data1/hadoop/hdfs/namenode"
-datanode_data_dir: "/data1/hadoop/hdfs/data,/data2/hadoop/hdfs/data"
-journalnode_edits_dir: "/data1/hadoop/hdfs/journalnode"
-nodemanager_local_dirs: "/data1/hadoop/yarn/local"
-timeline_ldb_store_path: "/data1/hadoop/yarn/timeline"
-timeline_ldb_state_path: "/data1/hadoop/yarn/timeline"
-nodemanager_log_dirs: "/data1/hadoop/yarn/log"
-jhs_recovery_store_ldb_path: "/data1/hadoop/mapreduce/jhs"
-storm_local_dir: "/data1/hadoop/storm"
-kafka_log_dirs: "/data2/kafka-log"
-elasticsearch_data_dir: "/data1/elasticsearch,/data2/elasticsearch"
-
-#Search
-install_elasticsearch: True
-install_solr: False
-elasticsearch_transport_port: 9300
-elasticsearch_network_interface: eth0
-elasticsearch_web_port: 9200

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/amazon-ec2/conf/ec2.ini
----------------------------------------------------------------------
diff --git a/deployment/amazon-ec2/conf/ec2.ini b/deployment/amazon-ec2/conf/ec2.ini
deleted file mode 100755
index 646ffaf..0000000
--- a/deployment/amazon-ec2/conf/ec2.ini
+++ /dev/null
@@ -1,105 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-#
-# Ansible EC2 external inventory script settings.
-#
-# Refer to https://github.com/ansible/ansible/blob/devel/contrib/inventory/ec2.ini
-# for additional information on available settings
-#
-
-[ec2]
-
-# AWS regions to make calls to. Set this to 'all' to make request to all regions
-# in AWS and merge the results together. Alternatively, set this to a comma
-# separated list of regions. E.g. 'us-east-1,us-west-1,us-west-2'
-regions = all
-
-# When generating inventory, Ansible needs to know how to address a server.
-# Each EC2 instance has a lot of variables associated with it. Here is the list:
-#   http://docs.pythonboto.org/en/latest/ref/ec2.html#module-boto.ec2.instance
-# Below are 2 variables that are used as the address of a server:
-#   - destination_variable
-#   - vpc_destination_variable
-
-# This is the normal destination variable to use. If you are running Ansible
-# from outside EC2, then 'public_dns_name' makes the most sense. If you are
-# running Ansible from within EC2, then perhaps you want to use the internal
-# address, and should set this to 'private_dns_name'. The key of an EC2 tag
-# may optionally be used; however the boto instance variables hold precedence
-# in the event of a collision.
-destination_variable = public_dns_name
-
-# For server inside a VPC, using DNS names may not make sense. When an instance
-# has 'subnet_id' set, this variable is used. If the subnet is public, setting
-# this to 'ip_address' will return the public IP address. For instances in a
-# private subnet, this should be set to 'private_ip_address', and Ansible must
-# be run from within EC2. The key of an EC2 tag may optionally be used; however
-# the boto instance variables hold precedence in the event of a collision.
-# WARNING: - instances that are in the private vpc, _without_ public ip address
-# will not be listed in the inventory until You set:
-# vpc_destination_variable = private_ip_address
-#vpc_destination_variable = ip_address
-vpc_destination_variable = public_dns_name
-
-# To exclude RDS instances from the inventory, uncomment and set to False.
-#rds = False
-
-# To exclude ElastiCache instances from the inventory, uncomment and set to False.
-#elasticache = False
-
-# API calls to EC2 are slow. For this reason, we cache the results of an API
-# call. Set this to the path you want cache files to be written to. Two files
-# will be written to this directory:
-#   - ansible-ec2.cache
-#   - ansible-ec2.index
-cache_path = ~/.ansible/tmp
-
-# The number of seconds a cache file is considered valid. After this many
-# seconds, a new API call will be made, and the cache file will be updated.
-# To disable the cache, set this value to 0
-cache_max_age = 300
-
-# Organize groups into a nested/hierarchy instead of a flat namespace.
-nested_groups = False
-
-# Replace - tags when creating groups to avoid issues with ansible
-replace_dash_in_groups = True
-
-# If set to true, any tag of the form "a,b,c" is expanded into a list
-# and the results are used to create additional tag_* inventory groups.
-expand_csv_tags = False
-
-# The EC2 inventory output can become very large. To manage its size,
-# configure which groups should be created.
-group_by_instance_id = True
-group_by_region = True
-group_by_availability_zone = True
-group_by_ami_id = True
-group_by_instance_type = True
-group_by_key_pair = True
-group_by_vpc_id = True
-group_by_security_group = True
-group_by_tag_keys = True
-group_by_tag_none = True
-group_by_route53_names = True
-group_by_rds_engine = True
-group_by_rds_parameter_group = True
-group_by_elasticache_engine = True
-group_by_elasticache_cluster = True
-group_by_elasticache_parameter_group = True
-group_by_elasticache_replication_group = True

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/amazon-ec2/playbook.yml
----------------------------------------------------------------------
diff --git a/deployment/amazon-ec2/playbook.yml b/deployment/amazon-ec2/playbook.yml
deleted file mode 100644
index 16d281d..0000000
--- a/deployment/amazon-ec2/playbook.yml
+++ /dev/null
@@ -1,80 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-#
-# instantiate the hosts on amazon ec2
-#
-- hosts: localhost
-  vars_files:
-    - conf/defaults.yml
-  tasks:
-    - include: tasks/create-keypair.yml
-    - include: tasks/create-vpc.yml
-    - include: tasks/create-open-inbound-security-group.yml
-    - include: tasks/create-open-outbound-security-group.yml
-    - include: tasks/create-hosts.yml host_count=1 host_type=sensors,ambari_master,metron,ec2
-    - include: tasks/create-hosts.yml host_count=4 host_type=ambari_slave,ec2
-    - include: tasks/create-hosts.yml host_count=1 host_type=ambari_slave,hadoop_client,metron,ec2
-    - include: tasks/create-hosts.yml host_count=1 host_type=ambari_slave,enrichment,metron,ec2
-    - include: tasks/create-hosts.yml host_count=2 host_type=search,metron,ec2
-    - include: tasks/create-hosts.yml host_count=1 host_type=web,mysql,metron,ec2
-  tags:
-    - ec2
-
-#
-# wait for all ec2 hosts to come up
-#
-- hosts: ec2
-  become: True
-  vars_files:
-    - conf/defaults.yml
-  gather_facts: False
-  tasks:
-    - include: tasks/check-hosts.yml
-  tags:
-    - ec2
-    - wait
-
-#
-# mount additional data volumes on all ec2 hosts
-#
-- hosts: ec2
-  become: True
-  vars_files:
-    - conf/defaults.yml
-  tasks:
-    - include: tasks/mount-volume.yml vol_src=/dev/xvdb vol_mnt=/data1
-    - include: tasks/mount-volume.yml vol_src=/dev/xvdc vol_mnt=/data2
-    - include: tasks/check-volume.yml vol_name=xvda vol_src=/dev/xvda vol_size={{ xvda_vol_size }}
-  tags:
-    - ec2
-
-#
-# build the metron cluster
-#
-- include: ../playbooks/metron_full_install.yml
-
-#
-# provisioning report
-#
-- hosts: localhost
-  vars_files:
-    - conf/defaults.yml
-  tasks:
-    - include: tasks/provisioning-report.yml
-  tags:
-    - ec2

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/amazon-ec2/tasks/check-hosts.yml
----------------------------------------------------------------------
diff --git a/deployment/amazon-ec2/tasks/check-hosts.yml b/deployment/amazon-ec2/tasks/check-hosts.yml
deleted file mode 100644
index 1a4b2c7..0000000
--- a/deployment/amazon-ec2/tasks/check-hosts.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Wait for connectivity to host(s)
-  local_action: wait_for host={{ inventory_hostname }} state=started timeout=300 delay=10
-  become: False

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/amazon-ec2/tasks/check-volume.yml
----------------------------------------------------------------------
diff --git a/deployment/amazon-ec2/tasks/check-volume.yml b/deployment/amazon-ec2/tasks/check-volume.yml
deleted file mode 100644
index b7ac63d..0000000
--- a/deployment/amazon-ec2/tasks/check-volume.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: "Check size of volume {{ vol_src }}"
-  shell: "lsblk | grep part | grep {{ vol_name }} | awk '{ print $4}' | sed 's/[^0-9]//g'"
-  register: current_size
-
-- name: "Status of {{ vol_src }} volume"
-  debug: msg="volume={{ vol_src }} current={{ current_size.stdout|int }} expected={{ vol_size|int }}"
-
-- include: expand-volume.yml vol_src={{ vol_src }}
-  when: current_size.stdout|int < vol_size|int

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/amazon-ec2/tasks/create-hosts.yml
----------------------------------------------------------------------
diff --git a/deployment/amazon-ec2/tasks/create-hosts.yml b/deployment/amazon-ec2/tasks/create-hosts.yml
deleted file mode 100644
index 39bae3a..0000000
--- a/deployment/amazon-ec2/tasks/create-hosts.yml
+++ /dev/null
@@ -1,54 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: "{{ env }}: Instantiate {{ host_count }} host(s) as {{ host_type }}"
-  ec2:
-    region: "{{ region }}"
-    instance_type: "{{ instance_type }}"
-    image: "{{ image }}"
-    key_name: "{{ env }}-{{ key_name }}"
-    assign_public_ip: True
-    group: ["{{ env }}-vpc-all-inbound","{{ env }}-vpc-all-outbound"]
-    vpc_subnet_id: "{{ vpc.subnets[0].id }}"
-    instance_tags:
-      Name: "[{{ env }}] {{ host_type }}"
-      type: "{{ host_type }}"
-      env: "{{ env }}"
-    exact_count: "{{ host_count }}"
-    count_tag:
-      type: "{{ host_type }}"
-      env: "{{ env }}"
-    volumes:
-    - device_name: /dev/sda1
-      volume_type: "{{ volume_type }}"
-      volume_size: "{{ xvda_vol_size }}"
-      delete_on_termination: true
-    - device_name: /dev/xvdb
-      volume_type: "{{ volume_type }}"
-      volume_size: "{{ xvdb_vol_size }}"
-      delete_on_termination: true
-    - device_name: /dev/xvdc
-      volume_type: "{{ volume_type }}"
-      volume_size: "{{ xvdc_vol_size }}"
-      delete_on_termination: true
-    wait: yes
-  register: ec2
-
-- name: Add host(s) to a hostgroup
-  add_host: hostname={{ item.public_dns_name }} groups={{ host_type }}
-  with_items: "{{ ec2.tagged_instances }}"
-  when: item.public_dns_name is defined

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/amazon-ec2/tasks/create-keypair.yml
----------------------------------------------------------------------
diff --git a/deployment/amazon-ec2/tasks/create-keypair.yml b/deployment/amazon-ec2/tasks/create-keypair.yml
deleted file mode 100644
index 693039e..0000000
--- a/deployment/amazon-ec2/tasks/create-keypair.yml
+++ /dev/null
@@ -1,29 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- set_fact:
-    the_key_file: "{{ key_file | default('~/.ssh/id_rsa.pub') }}"
-
-- name: Define keypair
-  ec2_key:
-    name: "{{ env }}-{{ key_name }}"
-    region: "{{ region }}"
-    key_material: "{{ item }}"
-  with_file: "{{ the_key_file }}"
-
-- debug: msg="Created keypair '{{ env }}-{{ key_name }}' from '{{ the_key_file }}'"
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/amazon-ec2/tasks/create-open-inbound-security-group.yml
----------------------------------------------------------------------
diff --git a/deployment/amazon-ec2/tasks/create-open-inbound-security-group.yml b/deployment/amazon-ec2/tasks/create-open-inbound-security-group.yml
deleted file mode 100644
index 67e89c8..0000000
--- a/deployment/amazon-ec2/tasks/create-open-inbound-security-group.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: "{{ env }}: Define open inbound security group"
-  ec2_group:
-    name: "{{ env }}-vpc-all-inbound"
-    description: WARNING allow all inbound connections from the internet
-    region: "{{ region }}"
-    vpc_id: "{{ vpc_id }}"
-    rules:
-      - proto: all
-        cidr_ip: 0.0.0.0/0

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/amazon-ec2/tasks/create-open-outbound-security-group.yml
----------------------------------------------------------------------
diff --git a/deployment/amazon-ec2/tasks/create-open-outbound-security-group.yml b/deployment/amazon-ec2/tasks/create-open-outbound-security-group.yml
deleted file mode 100644
index 53f505f..0000000
--- a/deployment/amazon-ec2/tasks/create-open-outbound-security-group.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: "{{ env }}: Define open outbound security group"
-  ec2_group:
-    name: "{{ env }}-vpc-all-outbound"
-    description: allow all outbound connections to the internet
-    region: "{{ region }}"
-    vpc_id: "{{ vpc_id }}"
-    rules_egress:
-      - proto: all
-        cidr_ip: 0.0.0.0/0

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/amazon-ec2/tasks/create-security-group.yml
----------------------------------------------------------------------
diff --git a/deployment/amazon-ec2/tasks/create-security-group.yml b/deployment/amazon-ec2/tasks/create-security-group.yml
deleted file mode 100644
index 1c9b909..0000000
--- a/deployment/amazon-ec2/tasks/create-security-group.yml
+++ /dev/null
@@ -1,28 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: "{{ env }}: Define the {{ name }} security group"
-  ec2_group:
-    name: "{{ env }}-{{ name }}"
-    region: "{{ region }}"
-    description: "[{{env}}] {{ name }}/{{ proto }}/{{ port }}"
-    vpc_id: "{{ vpc_id }}"
-    rules:
-      - proto: "{{ proto }}"
-        from_port: "{{ port }}"
-        to_port: "{{ port }}"
-        cidr_ip: 0.0.0.0/0

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/amazon-ec2/tasks/create-vpc.yml
----------------------------------------------------------------------
diff --git a/deployment/amazon-ec2/tasks/create-vpc.yml b/deployment/amazon-ec2/tasks/create-vpc.yml
deleted file mode 100644
index 7fc31e7..0000000
--- a/deployment/amazon-ec2/tasks/create-vpc.yml
+++ /dev/null
@@ -1,50 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-  - name: "{{ env }}:  Create virtual private cloud"
-    ec2_vpc:
-      region: "{{ region }}"
-      internet_gateway: True
-      resource_tags:
-        Name: "{{ env }}-virtual-private-cloud"
-        env: "{{ env }}"
-      cidr_block: 10.0.0.0/16
-      dns_hostnames: yes
-      dns_support: yes
-      subnets:
-        - cidr: 10.0.0.0/24
-          resource_tags:
-            tier: web
-        - cidr: 10.0.1.0/24
-          resource_tags:
-            tier: hdp
-        - cidr: 10.0.2.0/24
-          resource_tags:
-            tier: sensors
-      route_tables:
-        - subnets:
-          - 10.0.0.0/24
-          - 10.0.1.0/24
-          - 10.0.2.0/24
-          routes:
-          - dest: 0.0.0.0/0
-            gw: igw
-    register: vpc
-
-  - name: "[{{ env }}] Created vpc with id={{ vpc.vpc_id }}"
-    set_fact:
-      vpc_id: "{{ vpc.vpc_id }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/amazon-ec2/tasks/expand-volume.yml
----------------------------------------------------------------------
diff --git a/deployment/amazon-ec2/tasks/expand-volume.yml b/deployment/amazon-ec2/tasks/expand-volume.yml
deleted file mode 100644
index 1e25e27..0000000
--- a/deployment/amazon-ec2/tasks/expand-volume.yml
+++ /dev/null
@@ -1,30 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: "Expand {{ vol_src }} volume"
-#          sectors  delete 1  new      primary  first    past mbr to end  bootable     write and exit
-  shell: "(echo u s; echo d 1; echo n; echo p; echo 1; echo 2048 ; echo ;echo a; echo 1; echo w) | fdisk {{ vol_src }} || true"
-  args:
-    executable: /bin/bash
-
-- name: Restart host(s)
-  command: shutdown -r now "Trigger volume changes"
-  async: 0
-  poll: 0
-  ignore_errors: True
-
-- include: tasks/check-hosts.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/amazon-ec2/tasks/mount-volume.yml
----------------------------------------------------------------------
diff --git a/deployment/amazon-ec2/tasks/mount-volume.yml b/deployment/amazon-ec2/tasks/mount-volume.yml
deleted file mode 100644
index 11259a5..0000000
--- a/deployment/amazon-ec2/tasks/mount-volume.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Install xfsprogs
-  yum:
-    name: xfsprogs
-    state: present
-    update_cache: yes
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10
-
-- name: Format data volume(s)
-  filesystem: fstype=xfs dev={{ vol_src }}
-
-- name: Mount the volume
-  mount: name={{ vol_mnt }} src={{ vol_src }} opts=noatime fstype=xfs state=mounted

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/amazon-ec2/tasks/provisioning-report.yml
----------------------------------------------------------------------
diff --git a/deployment/amazon-ec2/tasks/provisioning-report.yml b/deployment/amazon-ec2/tasks/provisioning-report.yml
deleted file mode 100644
index d2abec0..0000000
--- a/deployment/amazon-ec2/tasks/provisioning-report.yml
+++ /dev/null
@@ -1,35 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the 'License'); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an 'AS IS' BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Known hosts groups
-  debug: var=groups
-
-- name: Sanity check Metron web
-  local_action: wait_for host="{{ groups.web[0] }}" port=5000 timeout=20
-
-- name: Sanity check Ambari web
-  local_action: wait_for host="{{ groups.ambari_master[0] }}" port="{{ ambari_port }}" timeout=20
-
-- set_fact:
-    Success:
-      - "Apache Metron deployed successfully"
-      - "   Metron  @  http://{{ groups.web[0] }}:5000"
-      - "   Ambari  @  http://{{ groups.ambari_master[0] }}:{{ ambari_port }}"
-      - "   Sensors @  {{ groups.sensors[0] }} on {{ sniff_interface }}"
-      - For additional information, see https://metron.incubator.apache.org/'
-
-- debug: var=Success

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/ansible.cfg
----------------------------------------------------------------------
diff --git a/deployment/ansible.cfg b/deployment/ansible.cfg
deleted file mode 100644
index 9b3916b..0000000
--- a/deployment/ansible.cfg
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-[defaults]
-host_key_checking = false
-library = extra_modules
-roles_path = ./roles
-
-[ssh_connection]
-control_path = %(directory)s/%%h-%%p-%%r
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/extra_modules/ambari_cluster_state.py
----------------------------------------------------------------------
diff --git a/deployment/extra_modules/ambari_cluster_state.py b/deployment/extra_modules/ambari_cluster_state.py
deleted file mode 100644
index 14c2004..0000000
--- a/deployment/extra_modules/ambari_cluster_state.py
+++ /dev/null
@@ -1,392 +0,0 @@
-#!/usr/bin/python
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-DOCUMENTATION = '''
----
-module: ambari_cluster_state
-version_added: "2.1"
-author: Mark Bittmann (https://github.com/mbittmann)
-short_description: Create, delete, start or stop an ambari cluster
-description:
-    - Create, delete, start or stop an ambari cluster
-options:
-  host:
-    description:
-      The hostname for the ambari web server
-  port:
-    description:
-      The port for the ambari web server
-  username:
-    description:
-      The username for the ambari web server
-  password:
-    description:
-      The name of the cluster in web server
-    required: yes
-  cluster_name:
-    description:
-      The name of the cluster in ambari
-    required: yes
-  cluster_state:
-    description:
-      The desired state for the ambari cluster ['present', 'absent', 'started', 'stopped']. Setting the cluster
-      state to absent will first stop the cluster.
-    required: yes
-  blueprint_var:
-    description:
-      The path to the file defining the cluster blueprint and host mapping. Required when state == 'present'
-    required: no
-  blueprint_name:
-    description:
-      The name of the blueprint. Required when state == 'present'
-    required: no
-  wait_for_complete:
-    description:
-      Whether to wait for the request to complete before returning. Default is False.
-    required: no
-  requirements: [ 'requests']
-'''
-
-EXAMPLES = '''
-# must use full relative path to any files in stored in roles/role_name/files/
-- name: Create a new ambari cluster
-    ambari_cluster_state:
-      host: localhost
-      port: 8080
-      username: admin
-      password: admin
-      cluster_name: my_cluster
-      cluster_state: present
-      blueprint_var: roles/my_role/files/blueprint.yml
-      blueprint_name: hadoop
-      wait_for_complete: True
-- name: Start the ambari cluster
-  ambari_cluster_state:
-    host: localhost
-    port: 8080
-    username: admin
-    password: admin
-    cluster_name: my_cluster
-    cluster_state: started
-    wait_for_complete: True
-- name: Stop the ambari cluster
-  ambari_cluster_state:
-    host: localhost
-    port: 8080
-    username: admin
-    password: admin
-    cluster_name: my_cluster
-    cluster_state: stopped
-    wait_for_complete: True
-- name: Delete the ambari cluster
-  ambari_cluster_state:
-    host: localhost
-    port: 8080
-    username: admin
-    password: admin
-    cluster_name: my_cluster
-    cluster_state: absent
-'''
-
-RETURN = '''
-results:
-    description: The content of the requests object returned from the RESTful call
-    returned: success
-    type: string
-created_blueprint:
-    description: Whether a blueprint was created
-    returned: success
-    type: boolean
-status:
-    description: The status of the blueprint creation process
-    returned: success
-    type: string
-'''
-
-__author__ = 'mbittmann'
-
-import json
-try:
-    import requests
-except ImportError:
-    REQUESTS_FOUND = False
-else:
-    REQUESTS_FOUND = True
-
-
-def main():
-
-    argument_spec = dict(
-        host=dict(type='str', default=None, required=True),
-        port=dict(type='int', default=None, required=True),
-        username=dict(type='str', default=None, required=True),
-        password=dict(type='str', default=None, required=True),
-        cluster_name=dict(type='str', default=None, required=True),
-        cluster_state=dict(type='str', default=None, required=True,
-                           choices=['present', 'absent', 'started', 'stopped']),
-        blueprint_var=dict(type='dict', required=False),
-        blueprint_name=dict(type='str', default=None, required=False),
-        configurations=dict(type='list', default=None, required=False),
-        wait_for_complete=dict(default=False, required=False, choices=BOOLEANS),
-    )
-
-    required_together = ['blueprint_var', 'blueprint_name']
-
-    module = AnsibleModule(
-        argument_spec=argument_spec,
-        required_together=required_together
-    )
-
-    if not REQUESTS_FOUND:
-        module.fail_json(
-            msg='requests library is required for this module')
-
-    p = module.params
-
-    host = p.get('host')
-    port = p.get('port')
-    username = p.get('password')
-    password = p.get('password')
-    cluster_name = p.get('cluster_name')
-    cluster_state = p.get('cluster_state')
-    blueprint_name = p.get('blueprint_name')
-    wait_for_complete = p.get('wait_for_complete')
-
-    ambari_url = 'http://{0}:{1}'.format(host, port)
-
-    try:
-        if cluster_state in ['started', 'stopped']:
-            if not cluster_exists(ambari_url, username, password, cluster_name):
-                module.fail_json(msg="Cluster name {0} does not exist".format(cluster_name))
-            state = ''
-            if cluster_state == 'started':
-                state = 'STARTED'
-            elif cluster_state == 'stopped':
-                state = 'INSTALLED'
-
-            request = set_cluster_state(ambari_url, username, password, cluster_name, state)
-            if wait_for_complete:
-                request_id = json.loads(request.content)['Requests']['id']
-                status = wait_for_request_complete(ambari_url, username, password, cluster_name, request_id, 2)
-                if status != 'COMPLETED':
-                    module.fail_json(msg="Request failed with status {0}".format(status))
-            module.exit_json(changed=True, results=request.content)
-        elif cluster_state == 'absent':
-            if not cluster_exists(ambari_url, username, password, cluster_name):
-                module.exit_json(changed=False, msg='Skipping. Cluster does not exist')
-            if not can_delete_cluster(ambari_url, username, password, cluster_name):
-                request = set_cluster_state(ambari_url, username, password, cluster_name, 'INSTALLED')
-                request_id = json.loads(request.content)['Requests']['id']
-                status = wait_for_request_complete(ambari_url, username, password, cluster_name, request_id, 2)
-                if status != 'COMPLETED':
-                    module.fail_json(msg="Request failed with status {0}".format(status))
-            request = delete_cluster(ambari_url, username, password, cluster_name)
-            module.exit_json(changed=True, results=request.content)
-        elif cluster_state == 'present':
-            if not p.get('blueprint_var') or not blueprint_name:  # have neither name nor file
-                module.fail_json(msg="Must provide blueprint_var and blueprint_name when cluster_state=='present'")
-
-            blueprint_var = p.get('blueprint_var')
-            blueprint, host_map = blueprint_var_to_ambari_converter(blueprint_var)
-            created_blueprint = False
-
-            if not blueprint_exists(ambari_url, username, password, blueprint_name):
-                create_blueprint(ambari_url, username, password, blueprint_name, blueprint)
-                created_blueprint = True
-
-            if cluster_exists(ambari_url, username, password, cluster_name):
-                module.exit_json(changed=False, msg='Cluster {0} already exists'.format(cluster_name),
-                                 created_blueprint=created_blueprint)
-
-            configurations = p.get('configurations')
-            request = create_cluster(ambari_url, username, password, cluster_name, blueprint_name, configurations, host_map)
-            request_id = json.loads(request.content)['Requests']['id']
-            if wait_for_complete:
-                status = wait_for_request_complete(ambari_url, username, password, cluster_name, request_id, 2)
-                if status != 'COMPLETED':
-                    module.fail_json(msg="Request failed with status {0}".format(status))
-            request_status = get_request_status(ambari_url, username, password, cluster_name, request_id)
-            module.exit_json(changed=True, results=request.content,
-                             created_blueprint=created_blueprint, status=request_status)
-
-    except requests.ConnectionError, e:
-        module.fail_json(msg="Could not connect to Ambari client: " + str(e.message))
-    except Exception, e:
-        module.fail_json(msg="Ambari client exception occurred: " + str(e.message))
-
-
-def get_clusters(ambari_url, user, password):
-    r = get(ambari_url, user, password, '/api/v1/clusters')
-    if r.status_code != 200:
-        msg = 'Coud not get cluster list: request code {0}, \
-                    request message {1}'.format(r.status_code, r.content)
-        raise Exception(msg)
-    clusters = json.loads(r.content)
-    return clusters['items']
-
-
-def cluster_exists(ambari_url, user, password, cluster_name):
-    clusters = get_clusters(ambari_url, user, password)
-    return cluster_name in [item['Clusters']['cluster_name'] for item in clusters]
-
-
-def set_cluster_state(ambari_url, user, password, cluster_name, cluster_state):
-    path = '/api/v1/clusters/{0}/services'.format(cluster_name)
-    request = {"RequestInfo": {"context": "Setting cluster state"},
-               "Body": {"ServiceInfo": {"state": "{0}".format(cluster_state)}}}
-    payload = json.dumps(request)
-    r = put(ambari_url, user, password, path, payload)
-    if r.status_code not in [202, 200]:
-        msg = 'Coud not set cluster state: request code {0}, \
-                    request message {1}'.format(r.status_code, r.content)
-        raise Exception(msg)
-    return r
-
-
-def create_cluster(ambari_url, user, password, cluster_name, blueprint_name, configurations, hosts_json):
-    path = '/api/v1/clusters/{0}'.format(cluster_name)
-    data = json.dumps({'blueprint': blueprint_name, 'configurations': configurations, 'host_groups': hosts_json})
-    f = open('cluster.log', 'w')
-    f.write(data)
-    f.close()
-    r = post(ambari_url, user, password, path, data)
-    if r.status_code != 202:
-        msg = 'Coud not create cluster: request code {0}, \
-                    request message {1}'.format(r.status_code, r.content)
-        raise Exception(msg)
-    return r
-
-
-def get_request_status(ambari_url, user, password, cluster_name, request_id):
-    path = '/api/v1/clusters/{0}/requests/{1}'.format(cluster_name, request_id)
-    r = get(ambari_url, user, password, path)
-    if r.status_code != 200:
-        msg = 'Coud not get cluster request status: request code {0}, \
-                    request message {1}'.format(r.status_code, r.content)
-        raise Exception(msg)
-    service = json.loads(r.content)
-    return service['Requests']['request_status']
-
-
-def wait_for_request_complete(ambari_url, user, password, cluster_name, request_id, sleep_time):
-    while True:
-        status = get_request_status(ambari_url, user, password, cluster_name, request_id)
-        if status == 'COMPLETED':
-            return status
-        elif status in ['FAILED', 'TIMEDOUT', 'ABORTED', 'SKIPPED_FAILED']:
-            return status
-        else:
-            time.sleep(sleep_time)
-
-
-def can_delete_cluster(ambari_url, user, password, cluster_name):
-    path = '/api/v1/clusters/{0}/services?ServiceInfo/state=STARTED'.format(cluster_name)
-    r = get(ambari_url, user, password, path)
-    items = json.loads(r.content)['items']
-    return len(items) > 0
-
-
-def get_blueprints(ambari_url, user, password):
-    path = '/api/v1/blueprints'
-    r = get(ambari_url, user, password, path)
-    if r.status_code != 200:
-        msg = 'Coud not get blueprint list: request code {0}, \
-                    request message {1}'.format(r.status_code, r.content)
-        raise Exception(msg)
-
-    services = json.loads(r.content)
-    return services['items']
-
-
-def create_blueprint(ambari_url, user, password, blueprint_name, blueprint_data):
-    data = json.dumps(blueprint_data)
-    f = open('blueprint.log', 'w')
-    f.write(data)
-    f.close()
-    path = "/api/v1/blueprints/" + blueprint_name
-    r = post(ambari_url, user, password, path, data)
-    if r.status_code != 201:
-        msg = 'Coud not create blueprint: request code {0}, \
-                    request message {1}'.format(r.status_code, r.content)
-        raise Exception(msg)
-    return r
-
-
-def blueprint_exists(ambari_url, user, password, blueprint_name):
-    blueprints = get_blueprints(ambari_url, user, password)
-    return blueprint_name in [item['Blueprints']['blueprint_name'] for item in blueprints]
-
-
-def delete_cluster(ambari_url, user, password, cluster_name):
-    path = '/api/v1/clusters/{0}'.format(cluster_name)
-    r = delete(ambari_url, user, password, path)
-    if r.status_code != 200:
-        msg = 'Coud not delete cluster: request code {0}, \
-                    request message {1}'.format(r.status_code, r.content)
-        raise Exception(msg)
-    return r
-
-
-def get(ambari_url, user, password, path):
-    r = requests.get(ambari_url + path, auth=(user, password))
-    return r
-
-
-def put(ambari_url, user, password, path, data):
-    headers = {'X-Requested-By': 'ambari'}
-    r = requests.put(ambari_url + path, data=data, auth=(user, password), headers=headers)
-    return r
-
-
-def post(ambari_url, user, password, path, data):
-    headers = {'X-Requested-By': 'ambari'}
-    r = requests.post(ambari_url + path, data=data, auth=(user, password), headers=headers)
-    return r
-
-
-def delete(ambari_url, user, password, path):
-    headers = {'X-Requested-By': 'ambari'}
-    r = requests.delete(ambari_url + path, auth=(user, password), headers=headers)
-    return r
-
-
-def blueprint_var_to_ambari_converter(blueprint_var):
-    groups = blueprint_var['groups']
-    new_groups = []
-    host_map = []
-    for group in groups:
-        components = []
-        for component in group['components']:
-            components.append({'name': component})
-        group['components'] = components
-        hosts = group.pop('hosts')
-        new_groups.append(group)
-        this_host_map = dict()
-        this_host_map['name'] = group['name']
-        this_host_list = [{'fqdn': host} for host in hosts]
-        this_host_map['hosts'] = this_host_list
-        host_map.append(this_host_map)
-    blueprint = dict()
-    blueprint['host_groups'] = new_groups
-    blueprint['Blueprints'] = {'stack_name': blueprint_var['stack_name'], 'stack_version': blueprint_var['stack_version']}
-
-    return blueprint, host_map
-
-from ansible.module_utils.basic import *
-if __name__ == '__main__':
-    main()

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/inventory/metron_example/group_vars/all
----------------------------------------------------------------------
diff --git a/deployment/inventory/metron_example/group_vars/all b/deployment/inventory/metron_example/group_vars/all
deleted file mode 100644
index 097516d..0000000
--- a/deployment/inventory/metron_example/group_vars/all
+++ /dev/null
@@ -1,77 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-#Ansible Variables
-ansible_ssh_private_key_file: /Path/to/private/key/file #Change This
-ansible_ssh_user: root
-
-#Ambari variables
-ambari_host: "{{ groups.ambari_master[0] }}"
-ambari_port: 8080
-ambari_user: admin
-ambari_password: admin
-cluster_type: small_cluster
-
-# hbase
-pcap_hbase_table: pcap
-tracker_hbase_table: access_tracker
-threatintel_hbase_table: threatintel
-enrichment_hbase_table: enrichment
-
-# metron variables
-metron_version: 0.1BETA
-java_home: /usr/jdk64/jdk1.8.0_40
-pcapservice_port: 8081
-
-# sensors
-sensor_test_mode: True
-sniff_interface: eth0
-bro_version: "2.4.1"
-fixbuf_version: "1.7.1"
-yaf_version: "2.8.0"
-daq_version: "2.0.6-1"
-iface: "eth0"
-pycapa_repo: "https://github.com/OpenSOC/pycapa.git"
-pycapa_home: "/opt/pycapa"
-snort_version: "2.9.8.0-1"
-snort_alert_csv_path: "/var/log/snort/alert.csv"
-
-#PCAP Replay
-pcap_replay: True
-pcap_replay_interface: eth1
-
-#data directories - only required to override defaults
-#zookeeper_data_dir: "/newdir/hadoop/zookeeper"
-#namenode_checkpoint_dir: "/newdir/hadoop/hdfs/namesecondary"
-#namenode_name_dir: "/newdir/hadoop/hdfs/namenode"
-#datanode_data_dir: "/newdir/hadoop/hdfs/data"
-#journalnode_edits_dir: "/newdir/hadoop/hdfs/journalnode"
-#nodemanager_local_dirs: "/newdir/hadoop/yarn/local"
-#timeline_ldb_store_path: "/newdir/hadoop/yarn/timeline"
-#timeline_ldb_state_path: "/newdir/hadoop/yarn/timeline"
-#nodemanager_log_dirs: "/newdir/hadoop/yarn/log"
-#jhs_recovery_store_ldb_path: "/newdir/hadoop/mapreduce/jhs"
-#storm_local_dir: "/newdir/hadoop/storm"
-#kafka_log_dirs: "/newdir/kafka-log"
-#elasticsearch_data_dir: "/newdir1/elasticsearch"
-
-#Search
-install_elasticsearch: True
-install_solr: False
-elasticsearch_transport_port: 9300
-elasticsearch_network_interface: eth1
-elasticsearch_web_port: 9200

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/inventory/metron_example/hosts
----------------------------------------------------------------------
diff --git a/deployment/inventory/metron_example/hosts b/deployment/inventory/metron_example/hosts
deleted file mode 100644
index 0d01327..0000000
--- a/deployment/inventory/metron_example/hosts
+++ /dev/null
@@ -1,63 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-[ambari_master]
-node1
-
-#minimum of 3 - 6 from 12 node cluser
-[ambari_slave]
-node2
-node3
-node4
-node5
-node6
-node7
-node8
-
-#last ambari_slave
-[hadoop_client]
-node9
-
-#3rd ambari_slave
-[enrichment]
-node1
-
-#1 or more
-[search]
-node10
-node11
-node12
-
-#1 only
-[sensors]
-node1
-
-#same as mysql in 12 node topology
-[web]
-node12
-
-[mysql]
-node12
-
-[metron:children]
-enrichment
-search
-web
-sensors
-mysql
-hadoop_client
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/inventory/multinode-vagrant/group_vars/all
----------------------------------------------------------------------
diff --git a/deployment/inventory/multinode-vagrant/group_vars/all b/deployment/inventory/multinode-vagrant/group_vars/all
deleted file mode 100644
index a4a6af5..0000000
--- a/deployment/inventory/multinode-vagrant/group_vars/all
+++ /dev/null
@@ -1,75 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-#Ambari variables
-ambari_host: "{{ groups.ambari_master[0] }}"
-hdp_host_group: "{{ groups.ambari_slave }}"
-ambari_port: 8080
-ambari_user: admin
-ambari_password: admin
-cluster_type: multi_vagrant_cluster
-
-# hbase
-pcap_hbase_table: pcap
-tracker_hbase_table: access_tracker
-threatintel_hbase_table: threatintel
-enrichment_hbase_table: enrichment
-
-#elasticsearch
-elasticsearch_transport_port: 9300
-elasticsearch_network_interface: eth1
-elasticsearch_web_port: 9200
-
-# metron variables
-metron_version: 0.1BETA
-java_home: /usr/jdk64/jdk1.8.0_40
-pcapservice_port: 8081
-
-# sensors
-sensor_test_mode: True
-sniff_interface: eth1
-bro_version: "2.4.1"
-fixbuf_version: "1.7.1"
-yaf_version: "2.8.0"
-daq_version: "2.0.6-1"
-iface: "eth0"
-pycapa_repo: "https://github.com/OpenSOC/pycapa.git"
-pycapa_home: "/opt/pycapa"
-snort_version: "2.9.8.0-1"
-snort_alert_csv_path: "/var/log/snort/alert.csv"
-
-#data directories
-#zookeeper_data_dir: "/newdir/hadoop/zookeeper"
-#namenode_checkpoint_dir: "/newdir/hadoop/hdfs/namesecondary"
-#namenode_name_dir: "/newdir/hadoop/hdfs/namenode"
-#datanode_data_dir: "/newdir/hadoop/hdfs/data"
-#journalnode_edits_dir: "/newdir/hadoop/hdfs/journalnode"
-#nodemanager_local_dirs: "/newdir/hadoop/yarn/local"
-#timeline_ldb_store_path: "/newdir/hadoop/yarn/timeline"
-#timeline_ldb_state_path: "/newdir/hadoop/yarn/timeline"
-#nodemanager_log_dirs: "/newdir/hadoop/yarn/log"
-#jhs_recovery_store_ldb_path: "/newdir/hadoop/mapreduce/jhs"
-#storm_local_dir: "/newdir/hadoop/storm"
-#kafka_log_dirs: "/newdir/kafka-log"
-#elasticsearch_data_dir: "/newdir1/elasticsearch"
-
-#Search
-install_elasticsearch: True
-install_solr: False
-elasticsearch_transport_port: 9300
-elasticsearch_network_interface: eth1
-elasticsearch_web_port: 9200

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/inventory/multinode-vagrant/hosts
----------------------------------------------------------------------
diff --git a/deployment/inventory/multinode-vagrant/hosts b/deployment/inventory/multinode-vagrant/hosts
deleted file mode 100644
index d84ab1e..0000000
--- a/deployment/inventory/multinode-vagrant/hosts
+++ /dev/null
@@ -1,59 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-[ambari_master]
-node1
-
-#minimum of 3 - 6 from 12 node cluser
-[ambari_slave]
-node2
-node3
-node4
-
-#last ambari_slave
-[hadoop_client]
-node4
-
-[enrichment]
-node4
-
-#1 or more
-[search]
-node1
-
-#1 only
-[sensors]
-node1
-
-#same as mysql in 12 node topology
-[web]
-node3
-
-[mysql]
-node3
-
-[metron:children]
-enrichment
-search
-web
-sensors
-mysql
-hadoop_client
-
-
-
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/inventory/singlenode-vagrant/group_vars/all
----------------------------------------------------------------------
diff --git a/deployment/inventory/singlenode-vagrant/group_vars/all b/deployment/inventory/singlenode-vagrant/group_vars/all
deleted file mode 100644
index 54dc168..0000000
--- a/deployment/inventory/singlenode-vagrant/group_vars/all
+++ /dev/null
@@ -1,87 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-#Ambari variables
-ambari_host: "{{ groups.ambari_master[0] }}"
-hdp_host_group: "{{ groups.ambari_slave }}"
-ambari_port: 8080
-ambari_user: admin
-ambari_password: admin
-cluster_type: single_node_vm
-
-# hbase
-pcap_hbase_table: pcap
-tracker_hbase_table: access_tracker
-threatintel_hbase_table: threatintel
-enrichment_hbase_table: enrichment
-
-# metron variables
-metron_version: 0.1BETA
-metron_directory: /usr/metron/{{ metron_version }}
-java_home: /usr/jdk64/jdk1.8.0_40
-bro_version: "2.4.1"
-fixbuf_version: "1.7.1"
-yaf_version: "2.8.0"
-daq_version: "2.0.6-1"
-pycapa_repo: "https://github.com/OpenSOC/pycapa.git"
-pycapa_home: "/opt/pycapa"
-snort_version: "2.9.8.0-1"
-snort_alert_csv_path: "/var/log/snort/alert.csv"
-
-#data directories - only required to override defaults
-zookeeper_data_dir: "/data1/hadoop/zookeeper"
-namenode_checkpoint_dir: "/data1/hadoop/hdfs/namesecondary"
-namenode_name_dir: "/data1/hadoop/hdfs/namenode"
-datanode_data_dir: "/data1/hadoop/hdfs/data,/data2/hadoop/hdfs/data"
-journalnode_edits_dir: "/data1/hadoop/hdfs/journalnode"
-nodemanager_local_dirs: "/data1/hadoop/yarn/local"
-timeline_ldb_store_path: "/data1/hadoop/yarn/timeline"
-timeline_ldb_state_path: "/data1/hadoop/yarn/timeline"
-nodemanager_log_dirs: "/data1/hadoop/yarn/log"
-jhs_recovery_store_ldb_path: "/data1/hadoop/mapreduce/jhs"
-storm_local_dir: "/data1/hadoop/storm"
-kafka_log_dirs: "/data1/kafka-log"
-elasticsearch_data_dir: "/data1/elasticsearch,/data2/elasticsearch"
-
-ambari_server_mem: 512
-threat_intel_bulk_load: False
-
-# sensors
-sensor_test_mode: True
-install_pycapa: False
-install_bro: True
-install_snort: True
-install_yaf: True
-pcap_replay: True
-sniff_interface: eth1
-pcap_replay_interface: "{{ sniff_interface }}"
-storm_topologies:
-    - "{{ metron_directory }}/config/topologies/bro/remote.yaml"
-    - "{{ metron_directory }}/config/topologies/snort/remote.yaml"
-    - "{{ metron_directory }}/config/topologies/yaf/remote.yaml"
-    - "{{ metron_directory }}/config/topologies/enrichment/remote.yaml"
-pcapservice_port: 8081
-
-#Search
-install_elasticsearch: True
-install_solr: False
-solr_collection_name: Metron
-solr_number_shards: 1
-solr_replication_factor: 1
-elasticsearch_transport_port: 9300
-elasticsearch_network_interface: eth1
-elasticsearch_web_port: 9200
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/inventory/singlenode-vagrant/hosts
----------------------------------------------------------------------
diff --git a/deployment/inventory/singlenode-vagrant/hosts b/deployment/inventory/singlenode-vagrant/hosts
deleted file mode 100644
index 6fd8b18..0000000
--- a/deployment/inventory/singlenode-vagrant/hosts
+++ /dev/null
@@ -1,48 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-[ambari_master]
-node1
-
-[ambari_slave]
-node1
-
-[hadoop_client]
-node1
-
-[enrichment]
-node1
-
-[search]
-node1
-
-[web]
-node1
-
-[sensors]
-node1
-
-[mysql]
-node1
-
-[metron:children]
-enrichment
-search
-web
-sensors
-mysql
-hadoop_client

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/playbooks/ambari_install.yml
----------------------------------------------------------------------
diff --git a/deployment/playbooks/ambari_install.yml b/deployment/playbooks/ambari_install.yml
deleted file mode 100644
index 685753c..0000000
--- a/deployment/playbooks/ambari_install.yml
+++ /dev/null
@@ -1,55 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- hosts: ec2
-  become: true
-  tasks:
-    - include_vars: ../amazon-ec2/conf/defaults.yml
-  tags:
-    - ec2
-
-- hosts: ambari_*
-  become: true
-  roles:
-    - role: ambari_common
-  tags:
-    - ambari-prereqs
-    - hdp-install
-
-- hosts: ambari_master
-  become: true
-  roles:
-    - role:  ambari_master
-  tags:
-    - ambari-server
-    - hdp-install
-
-- hosts: ambari_slave
-  become: true
-  roles:
-    - role: ambari_slave
-  tags:
-    - ambari-agent
-    - hdp-install
-
-- hosts: ambari_master
-  become: true
-  roles:
-    - role: ambari_config
-  tags:
-    - hdp-install
-    - hdp-deploy

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/playbooks/metron_full_install.yml
----------------------------------------------------------------------
diff --git a/deployment/playbooks/metron_full_install.yml b/deployment/playbooks/metron_full_install.yml
deleted file mode 100644
index 26ffd62..0000000
--- a/deployment/playbooks/metron_full_install.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- include: ambari_install.yml
-  tags:
-    - ambari
-- include: metron_install.yml
-  tags:
-    - metron

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/playbooks/metron_install.yml
----------------------------------------------------------------------
diff --git a/deployment/playbooks/metron_install.yml b/deployment/playbooks/metron_install.yml
deleted file mode 100644
index f6bc492..0000000
--- a/deployment/playbooks/metron_install.yml
+++ /dev/null
@@ -1,96 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- hosts: ec2
-  become: true
-  tasks:
-    - include_vars: ../amazon-ec2/conf/defaults.yml
-  tags:
-    - ec2
-
-- hosts: metron
-  become: true
-  roles:
-    - role: metron_common
-  tags:
-    - metron-prereqs
-
-- hosts: hadoop_client
-  become: true
-  roles:
-    - role: hadoop_setup
-  tags:
-    - metron-prereqs
-
-- hosts: search
-  become: true
-  vars:
-    es_hosts: "{% set comma = joiner(',') %}{% for host in groups['search'] -%}{{ comma() }}{{ host }}{%- endfor %}"
-  roles:
-    - { role: elasticsearch, when: install_elasticsearch | default(True) == True }
-  tags:
-    - search
-
-- hosts: search
-  become: true
-  roles:
-    - { role: solr, when: install_solr | default(False) == True  }
-  tags:
-    - search
-
-- hosts: mysql
-  become: true
-  roles:
-    - role: mysql_server
-  tags:
-    - mysql-server
-
-- hosts: ambari_slave
-  become: true
-  roles:
-    - role: mysql_client
-  tags:
-    - mysql-client
-
-- hosts: sensors
-  become: true
-  roles:
-    - { role: tap_interface, when: install_tap | default(False) == True }
-    - { role: pycapa, when: install_pycapa | default(True) == True }
-    - { role: bro, when: install_bro | default(True) == True }
-    - { role: flume,  when: install_snort | default(True) == True }
-    - { role: snort , when: install_snort | default(True) == True }
-    - { role: yaf, when: install_yaf | default(True) == True }
-    - { role: pcap_replay , when: (pcap_replay | default(False)) or (sensor_test_mode | default(False)) == True }
-    - { role: sensor-test-mode, when: sensor_test_mode | default(False) == True }
-  tags:
-      - sensors
-
-- hosts: enrichment
-  become: true
-  roles:
-    - role: metron_streaming
-  tags:
-    - enrichment
-
-- hosts: web
-  become: true
-  roles:
-    - { role: metron_ui, when: install_elasticsearch | default(True) == True }
-    - { role: metron_pcapservice, when: install_elasticsearch | default(True) == True }
-  tags:
-    - web

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_common/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_common/defaults/main.yml b/deployment/roles/ambari_common/defaults/main.yml
deleted file mode 100644
index 65c83d9..0000000
--- a/deployment/roles/ambari_common/defaults/main.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-hadoop_logrotate_frequency: daily
-hadoop_logrotate_retention: 30

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_common/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_common/meta/main.yml b/deployment/roles/ambari_common/meta/main.yml
deleted file mode 100644
index 8992ac1..0000000
--- a/deployment/roles/ambari_common/meta/main.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - libselinux-python
-  - yum-update
-  - epel
-  - ntp

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_common/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_common/tasks/main.yml b/deployment/roles/ambari_common/tasks/main.yml
deleted file mode 100644
index 2526923..0000000
--- a/deployment/roles/ambari_common/tasks/main.yml
+++ /dev/null
@@ -1,52 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Check OS Version
-  fail: msg="Ambari HDP deployment supports CentOS 6 only."
-  when: (ansible_distribution != "CentOS" or ansible_distribution_major_version != "6")
-
-- include: passwd_less_ssh.yml
-
-- name: Ensure iptables is stopped and is not running at boot time.
-  ignore_errors: yes
-  service: name=iptables state=stopped enabled=no
-
-#
-# ambari uses socket.getfqdn() to find the hostname. with 'localhost.localdomain'
-# in '/etc/hosts' this function will report the hostname as 'localhost.localdomain'
-# rather than 'node1' as would be expected.  other functions like socket.gethostname()
-# will always return 'node1' as expected.  ambari needs to see 'node1' to be able to
-# communicate between the master and agents.
-
-- name: Remove ipv4 'localhost.localdomain' from /etc/hosts
-  lineinfile: dest=/etc/hosts state=absent regexp="^127.0.0.1(.*)localdomain(.*)$"
-
-- name: Remove ipv6 'localhost.localdomain' from /etc/hosts
-  lineinfile: dest=/etc/hosts state=absent regexp="^::1(.*)localdomain(.*)$"
-
-- name: Add localhost to /etc/hosts
-  lineinfile: dest=/etc/hosts line="127.0.0.1   localhost"
-
-- name: Download Ambari repo
-  get_url: url="{{ rhel_ambari_install_url }}" dest=/etc/yum.repos.d/ambari.repo
-
-- name: Create Logrotate Script for Hadoop Services
-  template:
-    src: "metron-hadoop-logrotate.yml"
-    dest: "/etc/logrotate.d/metron-ambari"
-    mode: 0644
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ambari_common/tasks/passwd_less_ssh.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ambari_common/tasks/passwd_less_ssh.yml b/deployment/roles/ambari_common/tasks/passwd_less_ssh.yml
deleted file mode 100644
index 0928e34..0000000
--- a/deployment/roles/ambari_common/tasks/passwd_less_ssh.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Generate ssh key pair for "{{ ambari_user }}"
-  user: name={{ ambari_user }} generate_ssh_key=yes
-
-- name: Fetch the generated public key
-  fetch: src=~{{ ambari_user }}/.ssh/id_rsa.pub dest=/tmp/keys/{{ inventory_hostname }}.pub flat=yes
-
-- name: Add key pairs to existing authorized_keys
-  authorized_key: user={{ ambari_user }} key="{{ lookup('file', '/tmp/keys/{{ item }}.pub') }}"
-  with_items:
-    - "{{ play_hosts }}"
-
-- name: Remove local copy of ssh keys
-  local_action: file path=/tmp/keys/{{ inventory_hostname }}.pub state=absent
-  become: False
-


[07/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/KeywordsAlertAdapter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/KeywordsAlertAdapter.java b/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/KeywordsAlertAdapter.java
deleted file mode 100644
index 9204240..0000000
--- a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/KeywordsAlertAdapter.java
+++ /dev/null
@@ -1,291 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.alerts.adapters;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.validator.routines.InetAddressValidator;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.json.simple.JSONObject;
-import org.apache.log4j.Logger;
-import com.google.common.cache.Cache;
-import com.google.common.cache.CacheBuilder;
-
-import org.apache.metron.alerts.interfaces.AlertsAdapter;
-
-public class KeywordsAlertAdapter extends AbstractAlertAdapter {
-
-	HTableInterface blacklist_table;
-	HTableInterface whitelist_table;
-	InetAddressValidator ipvalidator = new InetAddressValidator();
-	String _whitelist_table_name;
-	String _blacklist_table_name;
-	String _quorum;
-	String _port;
-	String _topologyname;
-	Configuration conf = null;
-
-	String _topology_name;
-
-	Set<String> loaded_whitelist = new HashSet<String>();
-	Set<String> loaded_blacklist = new HashSet<String>();
-
-	List<String> keywordList;
-	List<String> keywordExceptionList;
-	
-	protected static final Logger LOG = Logger.getLogger(AllAlertAdapter.class);
-	
-	public KeywordsAlertAdapter(Map<String, String> config) {
-		try {
-			
-			if(!config.containsKey("keywords"))
-				throw new Exception("Keywords are missing");
-			
-			keywordList = Arrays.asList(config.get("keywords").split("\\|"));
-			
-			if(	config.containsKey("exceptions")) {
-				keywordExceptionList = Arrays.asList(config.get("exceptions").split("\\|"));
-			} else {
-				keywordExceptionList = new ArrayList<String>();
-			}
-				
-			if(!config.containsKey("whitelist_table_name"))
-				throw new Exception("Whitelist table name is missing");
-				
-			_whitelist_table_name = config.get("whitelist_table_name");
-			
-			if(!config.containsKey("blacklist_table_name"))
-				throw new Exception("Blacklist table name is missing");
-			
-			_blacklist_table_name = config.get("blacklist_table_name");
-			
-			if(!config.containsKey("quorum"))
-				throw new Exception("Quorum name is missing");
-			
-			_quorum = config.get("quorum");
-			
-			if(!config.containsKey("port"))
-				throw new Exception("port name is missing");
-			
-			_port = config.get("port");
-
-			if(!config.containsKey("_MAX_CACHE_SIZE_OBJECTS_NUM"))
-				throw new Exception("_MAX_CACHE_SIZE_OBJECTS_NUM name is missing");
-			
-			int _MAX_CACHE_SIZE_OBJECTS_NUM = Integer.parseInt(config
-					.get("_MAX_CACHE_SIZE_OBJECTS_NUM"));
-			
-			if(!config.containsKey("_MAX_TIME_RETAIN_MINUTES"))
-				throw new Exception("_MAX_TIME_RETAIN_MINUTES name is missing");
-			
-			int _MAX_TIME_RETAIN_MINUTES = Integer.parseInt(config
-					.get("_MAX_TIME_RETAIN_MINUTES"));
-
-			generateCache(_MAX_CACHE_SIZE_OBJECTS_NUM, _MAX_TIME_RETAIN_MINUTES);
-			
-		} catch (Exception e) {
-			System.out.println("Could not initialize Alerts Adapter");
-			e.printStackTrace();
-			System.exit(0);
-		}
-	}
-	
-	@Override
-	public boolean initialize() {
-		conf = HBaseConfiguration.create();
-		//conf.set("hbase.zookeeper.quorum", _quorum);
-		//conf.set("hbase.zookeeper.property.clientPort", _port);
-
-		LOG.trace("[Metron] Connecting to hbase with conf:" + conf);
-		LOG.trace("[Metron] Whitelist table name: " + _whitelist_table_name);
-		LOG.trace("[Metron] Whitelist table name: " + _blacklist_table_name);
-		LOG.trace("[Metron] ZK Client/port: "
-				+ conf.get("hbase.zookeeper.quorum") + " -> "
-				+ conf.get("hbase.zookeeper.property.clientPort"));
-
-		try {
-
-			LOG.trace("[Metron] Attempting to connect to hbase");
-
-			HConnection connection = HConnectionManager.createConnection(conf);
-
-			LOG.trace("[Metron] CONNECTED TO HBASE");
-
-			HBaseAdmin hba = new HBaseAdmin(conf);
-
-			if (!hba.tableExists(_whitelist_table_name))
-				throw new Exception("Whitelist table doesn't exist");
-
-			if (!hba.tableExists(_blacklist_table_name))
-				throw new Exception("Blacklist table doesn't exist");
-
-			whitelist_table = new HTable(conf, _whitelist_table_name);
-
-			LOG.trace("[Metron] CONNECTED TO TABLE: " + _whitelist_table_name);
-			blacklist_table = new HTable(conf, _blacklist_table_name);
-			LOG.trace("[Metron] CONNECTED TO TABLE: " + _blacklist_table_name);
-
-			if (connection == null || whitelist_table == null
-					|| blacklist_table == null)
-				throw new Exception("Unable to initialize hbase connection");
-
-			Scan scan = new Scan();
-
-			ResultScanner rs = whitelist_table.getScanner(scan);
-			try {
-				for (Result r = rs.next(); r != null; r = rs.next()) {
-					loaded_whitelist.add(Bytes.toString(r.getRow()));
-				}
-			} catch (Exception e) {
-				LOG.trace("[Metron] COULD NOT READ FROM HBASE");
-				e.printStackTrace();
-			} finally {
-				rs.close(); // always close the ResultScanner!
-				hba.close();
-			}
-			whitelist_table.close();
-
-			LOG.trace("[Metron] READ IN WHITELIST: " + loaded_whitelist.size());
-			
-			System.out.println("LOADED WHITELIST IS: ");
-			
-			for(String str: loaded_whitelist)
-				System.out.println("WHITELIST: " + str);
-
-			scan = new Scan();
-
-			rs = blacklist_table.getScanner(scan);
-			try {
-				for (Result r = rs.next(); r != null; r = rs.next()) {
-					loaded_blacklist.add(Bytes.toString(r.getRow()));
-				}
-			} catch (Exception e) {
-				LOG.trace("[Metron] COULD NOT READ FROM HBASE");
-				e.printStackTrace();
-			} finally {
-				rs.close(); // always close the ResultScanner!
-				hba.close();
-			}
-			blacklist_table.close();
-
-			LOG.trace("[Metron] READ IN WHITELIST: " + loaded_whitelist.size());
-
-			rs.close(); // always close the ResultScanner!
-			hba.close();
-
-			return true;
-		} catch (Exception e) {
-
-			e.printStackTrace();
-		}
-
-		return false;
-	}
-
-	@Override
-	public boolean refresh() throws Exception {
-		// TODO Auto-generated method stub
-		return false;
-	}
-
-	@Override
-	public boolean containsAlertId(String alert) {
-		// TODO Auto-generated method stub
-		return false;
-	}
-
-	@Override
-	public Map<String, JSONObject> alert(JSONObject raw_message) {
-		
-		Map<String, JSONObject> alerts = new HashMap<String, JSONObject>();
-		JSONObject content = (JSONObject) raw_message.get("message");
-
-		JSONObject enrichment = null;
-		if (raw_message.containsKey("enrichment"))
-			enrichment = (JSONObject) raw_message.get("enrichment");
-
-		for (String keyword : keywordList) {
-			if (content.toString().contains(keyword)) {
-				
-				//check it doesn't have an "exception" keyword in it
-				for (String exception : keywordExceptionList) {
-					if (content.toString().contains(exception)) {
-						LOG.info("[Metron] KeywordAlertsAdapter: Omitting alert due to exclusion: " + exception);
-						return null;
-					}
-				}
-				
-				LOG.info("[Metron] KeywordAlertsAdapter: Found match for " + keyword);
-				JSONObject alert = new JSONObject();
-
-				String source = "unknown";
-				String dest = "unknown";
-				String host = "unknown";
-
-				if (content.containsKey("ip_src_addr"))
-				{
-					source = content.get("ip_src_addr").toString();
-					
-					if(RangeChecker.checkRange(loaded_whitelist, source))
-						host = source;				
-				}
-
-				if (content.containsKey("ip_dst_addr"))
-				{
-					dest = content.get("ip_dst_addr").toString();
-					
-					if(RangeChecker.checkRange(loaded_whitelist, dest))
-						host = dest;	
-				}
-
-				alert.put("designated_host", host);
-				alert.put("description", content.get("original_string").toString());
-				alert.put("priority", "MED");	
-
-				String alert_id = generateAlertId(source, dest, 0);
-
-				alert.put("alert_id", alert_id);
-				alerts.put(alert_id, alert);
-
-				alert.put("enrichment", enrichment);
-
-				return alerts;
-			}
-		}
-		
-		return null;
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/RangeChecker.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/RangeChecker.java b/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/RangeChecker.java
deleted file mode 100644
index 0807371..0000000
--- a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/RangeChecker.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.alerts.adapters;
-
-import java.util.Set;
-
-import org.apache.commons.net.util.SubnetUtils;
-
-public class RangeChecker {
-
-	static boolean checkRange(Set<String> CIDR_networks, String ip) {
-		for (String network : CIDR_networks) {
-				
-			System.out.println("Looking at range: " + network + " and ip " + ip);
-			SubnetUtils utils = new SubnetUtils(network);
-			if(utils.getInfo().isInRange(ip)) {
-				System.out.println(ip + " in range " + network);
-				return true;
-			}
-		}
-		
-		//no matches
-		return false;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/ThreatAlertsAdapter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/ThreatAlertsAdapter.java b/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/ThreatAlertsAdapter.java
deleted file mode 100644
index 3e10db3..0000000
--- a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/alerts/adapters/ThreatAlertsAdapter.java
+++ /dev/null
@@ -1,329 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.alerts.adapters;
-
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.UUID;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.validator.routines.InetAddressValidator;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.HConnectionManager;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.cache.Cache;
-import com.google.common.cache.CacheBuilder;
-import org.apache.metron.alerts.interfaces.AlertsAdapter;
-
-@SuppressWarnings("serial")
-public class ThreatAlertsAdapter implements AlertsAdapter, Serializable {
-
-	String enrichment_tag;
-
-	HTableInterface blacklist_table;
-	HTableInterface whitelist_table;
-	InetAddressValidator ipvalidator = new InetAddressValidator();
-	String _whitelist_table_name;
-	String _blacklist_table_name;
-	String _quorum;
-	String _port;
-	String _topologyname;
-	Configuration conf = null;
-
-	Cache<String, String> cache;
-	String _topology_name;
-
-	Set<String> loaded_whitelist = new HashSet<String>();
-	Set<String> loaded_blacklist = new HashSet<String>();
-
-	protected static final Logger LOG = LoggerFactory
-			.getLogger(ThreatAlertsAdapter.class);
-
-	public ThreatAlertsAdapter(Map<String, String> config) {
-		try {
-
-			if (!config.containsKey("whitelist_table_name"))
-				throw new Exception("Whitelist table name is missing");
-
-			_whitelist_table_name = config.get("whitelist_table_name");
-
-			if (!config.containsKey("blacklist_table_name"))
-				throw new Exception("Blacklist table name is missing");
-
-			_blacklist_table_name = config.get("blacklist_table_name");
-
-			if (!config.containsKey("quorum"))
-				throw new Exception("Quorum name is missing");
-
-			_quorum = config.get("quorum");
-
-			if (!config.containsKey("port"))
-				throw new Exception("port name is missing");
-
-			_port = config.get("port");
-
-			if (!config.containsKey("_MAX_CACHE_SIZE_OBJECTS_NUM"))
-				throw new Exception("_MAX_CACHE_SIZE_OBJECTS_NUM name is missing");
-
-			int _MAX_CACHE_SIZE_OBJECTS_NUM = Integer.parseInt(config
-					.get("_MAX_CACHE_SIZE_OBJECTS_NUM"));
-
-			if (!config.containsKey("_MAX_TIME_RETAIN_MINUTES"))
-				throw new Exception("_MAX_TIME_RETAIN_MINUTES name is missing");
-
-			int _MAX_TIME_RETAIN_MINUTES = Integer.parseInt(config
-					.get("_MAX_TIME_RETAIN_MINUTES"));
-
-			cache = CacheBuilder.newBuilder().maximumSize(_MAX_CACHE_SIZE_OBJECTS_NUM)
-					.expireAfterWrite(_MAX_TIME_RETAIN_MINUTES, TimeUnit.MINUTES)
-					.build();
-
-			enrichment_tag = config.get("enrichment_tag");
-
-		} catch (Exception e) {
-			System.out.println("Could not initialize alerts adapter");
-			e.printStackTrace();
-			System.exit(0);
-		}
-	}
-
-	@SuppressWarnings("resource")
-    @Override
-	public boolean initialize() {
-
-		conf = HBaseConfiguration.create();
-		// conf.set("hbase.zookeeper.quorum", _quorum);
-		// conf.set("hbase.zookeeper.property.clientPort", _port);
-
-		LOG.trace("[Metron] Connecting to hbase with conf:" + conf);
-		LOG.trace("[Metron] Whitelist table name: " + _whitelist_table_name);
-		LOG.trace("[Metron] Whitelist table name: " + _blacklist_table_name);
-		LOG.trace("[Metron] ZK Client/port: "
-				+ conf.get("hbase.zookeeper.quorum") + " -> "
-				+ conf.get("hbase.zookeeper.property.clientPort"));
-
-		try {
-
-			LOG.trace("[Metron] Attempting to connect to hbase");
-
-			HConnection connection = HConnectionManager.createConnection(conf);
-
-			LOG.trace("[Metron] CONNECTED TO HBASE");
-
-			HBaseAdmin hba = new HBaseAdmin(conf);
-
-			if (!hba.tableExists(_whitelist_table_name))
-				throw new Exception("Whitelist table doesn't exist");
-
-			if (!hba.tableExists(_blacklist_table_name))
-				throw new Exception("Blacklist table doesn't exist");
-
-			whitelist_table = new HTable(conf, _whitelist_table_name);
-
-			LOG.trace("[Metron] CONNECTED TO TABLE: " + _whitelist_table_name);
-			blacklist_table = new HTable(conf, _blacklist_table_name);
-			LOG.trace("[Metron] CONNECTED TO TABLE: " + _blacklist_table_name);
-
-			if (connection == null || whitelist_table == null
-					|| blacklist_table == null)
-				throw new Exception("Unable to initialize hbase connection");
-
-			Scan scan = new Scan();
-
-			ResultScanner rs = whitelist_table.getScanner(scan);
-			try {
-				for (Result r = rs.next(); r != null; r = rs.next()) {
-					loaded_whitelist.add(Bytes.toString(r.getRow()));
-				}
-			} catch (Exception e) {
-				LOG.trace("[Metron] COULD NOT READ FROM HBASE");
-				e.printStackTrace();
-			} finally {
-				rs.close(); // always close the ResultScanner!
-				hba.close();
-			}
-			whitelist_table.close();
-
-			LOG.trace("[Metron] READ IN WHITELIST: " + loaded_whitelist.size());
-
-			scan = new Scan();
-
-			rs = blacklist_table.getScanner(scan);
-			try {
-				for (Result r = rs.next(); r != null; r = rs.next()) {
-					loaded_blacklist.add(Bytes.toString(r.getRow()));
-				}
-			} catch (Exception e) {
-				LOG.trace("[Metron] COULD NOT READ FROM HBASE");
-				e.printStackTrace();
-			} finally {
-				rs.close(); // always close the ResultScanner!
-				hba.close();
-			}
-			blacklist_table.close();
-
-			LOG.trace("[Metron] READ IN WHITELIST: " + loaded_whitelist.size());
-
-			rs.close(); // always close the ResultScanner!
-			hba.close();
-
-			return true;
-		} catch (Exception e) {
-
-			e.printStackTrace();
-		}
-
-		return false;
-
-	}
-
-	@Override
-	public boolean refresh() throws Exception {
-		return true;
-	}
-
-	@SuppressWarnings("unchecked")
-    @Override
-	public Map<String, JSONObject> alert(JSONObject raw_message) {
-
-		System.out.println("LOOKING FOR ENRICHMENT TAG: " + enrichment_tag);
-
-		Map<String, JSONObject> alerts = new HashMap<String, JSONObject>();
-		JSONObject content = (JSONObject) raw_message.get("message");
-
-		JSONObject enrichment = null;
-
-		if (raw_message.containsKey("enrichment"))
-			enrichment = (JSONObject) raw_message.get("enrichment");
-		else
-			return null;
-
-		if (enrichment.containsKey(enrichment_tag)) {
-
-			System.out.println("FOUND TAG: " + enrichment_tag);
-
-			JSONObject threat = (JSONObject) enrichment.get(enrichment_tag);
-
-			int cnt = 0;
-			Object enriched_key = null;
-			
-			for (Object key : threat.keySet()) {
-				JSONObject tmp = (JSONObject) threat.get(key);
-				cnt = cnt + tmp.size();
-				if (tmp.size() > 0)
-					enriched_key = key;
-			}
-
-			if (cnt == 0) {
-				System.out.println("TAG HAS NO ELEMENTS");
-				return null;
-			}
-
-			JSONObject alert = new JSONObject();
-
-			String source = "unknown";
-			String dest = "unknown";
-			String host = "unknown";
-
-			if (content.containsKey("ip_src_addr")) {
-				source = content.get("ip_src_addr").toString();
-
-				if (RangeChecker.checkRange(loaded_whitelist, source))
-					host = source;
-			}
-
-			if (content.containsKey("ip_dst_addr")) {
-				dest = content.get("ip_dst_addr").toString();
-
-				if (RangeChecker.checkRange(loaded_whitelist, dest))
-					host = dest;
-			}
-			
-			JSONObject threatQualifier = (JSONObject) threat.get(enriched_key);
-			
-			alert.put("designated_host", host);
-			String description =
-
-					new StringBuilder()
-					.append("Threat Intelligence match for ")
-					.append(content.get(enriched_key).toString())
-					.append(" from source: ")
-					.append(threatQualifier.keySet().iterator().next().toString())
-					.toString();	
-			alert.put("description", description);
-			alert.put("priority", "MED");
-
-			String alert_id = generateAlertId(source, dest, 0);
-
-			alert.put("alert_id", alert_id);
-			alerts.put(alert_id, alert);
-
-			alert.put("enrichment", enrichment);
-
-			return alerts;
-		} else {
-			System.out.println("DID NOT FIND TAG: " + enrichment_tag);
-			return null;
-		}
-
-	}
-
-	@Override
-	public boolean containsAlertId(String alert) {
-		// TODO Auto-generated method stub
-		return false;
-	}
-
-	protected String generateAlertId(String source_ip, String dst_ip,
-			int alert_type) {
-
-		String key = makeKey(source_ip, dst_ip, alert_type);
-
-		if (cache.getIfPresent(key) != null)
-			return cache.getIfPresent(key);
-
-		String new_UUID = System.currentTimeMillis() + "-" + UUID.randomUUID();
-
-		cache.put(key, new_UUID);
-		key = makeKey(dst_ip, source_ip, alert_type);
-		cache.put(key, new_UUID);
-
-		return new_UUID;
-
-	}
-
-	private String makeKey(String ip1, String ip2, int alert_type) {
-		return (ip1 + "-" + ip2 + "-" + alert_type);
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/AbstractTaggerBolt.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/AbstractTaggerBolt.java b/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/AbstractTaggerBolt.java
deleted file mode 100644
index 274d7cc..0000000
--- a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/AbstractTaggerBolt.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.tagging;
-
-import java.io.IOException;
-import java.util.Map;
-
-import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseRichBolt;
-import backtype.storm.tuple.Fields;
-
-import com.codahale.metrics.Counter;
-import org.apache.metron.alerts.interfaces.TaggerAdapter;
-import org.apache.metron.metrics.MetricReporter;
-
-@SuppressWarnings("rawtypes")
-public abstract class AbstractTaggerBolt extends BaseRichBolt {
-	/**
-	 * 
-	 */
-	private static final long serialVersionUID = -6710596708304282838L;
-
-	protected static final Logger LOG = LoggerFactory
-			.getLogger(AbstractTaggerBolt.class);
-
-	protected OutputCollector _collector;
-	protected TaggerAdapter _adapter;
-
-	protected String OutputFieldName;
-	protected JSONObject _identifier;
-	protected MetricReporter _reporter;
-	
-	protected Counter ackCounter, emitCounter, failCounter;
-
-	protected void registerCounters() {
-
-		String ackString = _adapter.getClass().getSimpleName() + ".ack";
-
-		String emitString = _adapter.getClass().getSimpleName() + ".emit";
-
-		String failString = _adapter.getClass().getSimpleName() + ".fail";
-
-		ackCounter = _reporter.registerCounter(ackString);
-		emitCounter = _reporter.registerCounter(emitString);
-		failCounter = _reporter.registerCounter(failString);
-
-	}
-
-	public final void prepare(Map conf, TopologyContext topologyContext,
-			OutputCollector collector) {
-		_collector = collector;
-		
-		if (this._adapter == null)
-			throw new IllegalStateException("Tagging must be specified");
-		if(this._identifier == null)
-			throw new IllegalStateException("Identifier must be specified");
-		try {
-			doPrepare(conf, topologyContext, collector);
-		} catch (IOException e) {
-			LOG.error("Counld not initialize...");
-			e.printStackTrace();
-		}
-	}
-
-	public void declareOutputFields(OutputFieldsDeclarer declearer) {
-		declearer.declare(new Fields(this.OutputFieldName));
-	}
-
-	abstract void doPrepare(Map conf, TopologyContext topologyContext,
-			OutputCollector collector) throws IOException;
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/TelemetryTaggerBolt.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/TelemetryTaggerBolt.java b/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/TelemetryTaggerBolt.java
deleted file mode 100644
index d20fcec..0000000
--- a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/TelemetryTaggerBolt.java
+++ /dev/null
@@ -1,200 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.tagging;
-
-import java.io.IOException;
-import java.util.Map;
-import java.util.Properties;
-
-import org.apache.commons.configuration.Configuration;
-import org.json.simple.JSONArray;
-import org.json.simple.JSONObject;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-
-import org.apache.metron.alerts.interfaces.TaggerAdapter;
-import org.apache.metron.json.serialization.JSONEncoderHelper;
-import org.apache.metron.metrics.MetricReporter;
-
-@SuppressWarnings("rawtypes")
-public class TelemetryTaggerBolt extends AbstractTaggerBolt {
-
-	/**
-	 * Use an adapter to tag existing telemetry messages with alerts. The list
-	 * of available tagger adapters is located under
-	 * org.apache.metron.tagging.adapters. At the time of the release the following
-	 * adapters are available:
-	 * 
-	 * <p>
-	 * <ul>
-	 * <li>RegexTagger = read a list or regular expressions and tag a message if
-	 * they exist in a message
-	 * <li>StaticAllTagger = tag each message with a static alert
-	 * <ul>
-	 * <p>
-	 */
-	private static final long serialVersionUID = -2647123143398352020L;
-	private Properties metricProperties;
-	private JSONObject metricConfiguration;
-
-	/**
-	 * 
-	 * @param tagger
-	 *            - tagger adapter for generating alert tags
-	 * @return instance of bolt
-	 */
-	public TelemetryTaggerBolt withMessageTagger(TaggerAdapter tagger) {
-		_adapter = tagger;
-		return this;
-	}
-
-	/**
-	 * 
-	 * @param OutputFieldName
-	 *            - output name of the tuple coming out of this bolt
-	 * @return - instance of this bolt
-	 */
-	public TelemetryTaggerBolt withOutputFieldName(String OutputFieldName) {
-		this.OutputFieldName = OutputFieldName;
-		return this;
-	}
-
-	/**
-	 * 
-	 * @param metricProperties
-	 *            - metric output to graphite
-	 * @return - instance of this bolt
-	 */
-	public TelemetryTaggerBolt withMetricProperties(Properties metricProperties) {
-		this.metricProperties = metricProperties;
-		return this;
-	}
-
-	/**
-	 * 
-	 * @param identifier
-	 *            - the identifier tag for tagging telemetry messages with
-	 *            alerts out of this bolt
-	 * @return - instance of this bolt
-	 */
-
-	public TelemetryTaggerBolt withIdentifier(JSONObject identifier) {
-		this._identifier = identifier;
-		return this;
-	}
-	
-	/**
-	 * @param config
-	 *            A class for generating custom metrics into graphite
-	 * @return Instance of this class
-	 */
-
-	public TelemetryTaggerBolt withMetricConfiguration(Configuration config) {
-		this.metricConfiguration = JSONEncoderHelper.getJSON(config
-				.subset("org.apache.metron.metrics"));
-		return this;
-	}
-
-	@Override
-	void doPrepare(Map conf, TopologyContext topologyContext,
-			OutputCollector collector) throws IOException {
-
-		LOG.info("[Metron] Preparing TelemetryParser Bolt...");
-
-		try {
-			_reporter = new MetricReporter();
-			_reporter.initialize(metricProperties, TelemetryTaggerBolt.class);
-			LOG.info("[Metron] Initialized metrics");
-		} catch (Exception e) {
-			LOG.info("[Metron] Could not initialize metrics");
-		}
-	}
-
-	@SuppressWarnings("unchecked")
-	public void execute(Tuple tuple) {
-
-		LOG.trace("[Metron] Starting to process message for alerts");
-		JSONObject original_message = null;
-
-		try {
-
-			original_message = (JSONObject) tuple.getValue(0);
-
-			if (original_message == null || original_message.isEmpty())
-				throw new Exception("Could not parse message from byte stream");
-
-			LOG.trace("[Metron] Received tuple: " + original_message);
-
-			JSONObject alerts_tag = new JSONObject();
-			JSONArray alerts_list = _adapter.tag(original_message);
-
-			LOG.trace("[Metron] Tagged message: " + alerts_list);
-
-			if (alerts_list.size() != 0) {
-				if (original_message.containsKey("alerts")) {
-					JSONObject tag = (JSONObject) original_message
-							.get("alerts");
-					JSONArray already_triggered = (JSONArray) tag
-							.get("triggered");
-					alerts_list.addAll(already_triggered);
-					LOG.trace("[Metron] Created a new string of alerts");
-				}
-
-				alerts_tag.put("identifier", _identifier);
-				alerts_tag.put("triggered", alerts_list);
-				original_message.put("alerts", alerts_tag);
-				
-				LOG.debug("[Metron] Detected alerts: " + alerts_tag);
-			}
-			else
-			{
-				LOG.debug("[Metron] The following messages did not contain alerts: " + original_message);
-			}
-
-			_collector.ack(tuple);
-			_collector.emit(new Values(original_message));
-			
-			/*if (metricConfiguration != null) {
-				emitCounter.inc();
-				ackCounter.inc();
-			}*/
-
-		} catch (Exception e) {
-			e.printStackTrace();
-			LOG.error("Failed to tag message :" + original_message);
-			e.printStackTrace();
-			_collector.fail(tuple);
-			
-			/*
-			if (metricConfiguration != null) {
-				failCounter.inc();
-			}*/
-		}
-	}
-
-	public void declareOutputFields(OutputFieldsDeclarer declearer) {
-		declearer.declare(new Fields(this.OutputFieldName));
-
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/adapters/AbstractTaggerAdapter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/adapters/AbstractTaggerAdapter.java b/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/adapters/AbstractTaggerAdapter.java
deleted file mode 100644
index 8ea6807..0000000
--- a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/adapters/AbstractTaggerAdapter.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.tagging.adapters;
-
-import java.io.Serializable;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import org.apache.metron.alerts.interfaces.TaggerAdapter;
-
-@SuppressWarnings("serial")
-public abstract class AbstractTaggerAdapter implements TaggerAdapter, Serializable{
-	
-	protected static final Logger _LOG = LoggerFactory
-			.getLogger(AbstractTaggerAdapter.class);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/adapters/RegexTagger.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/adapters/RegexTagger.java b/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/adapters/RegexTagger.java
deleted file mode 100644
index adc46ce..0000000
--- a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/adapters/RegexTagger.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.tagging.adapters;
-
-import java.util.Map;
-
-import org.json.simple.JSONArray;
-import org.json.simple.JSONObject;
-
-public class RegexTagger extends AbstractTaggerAdapter{
-	
-	/**
-	 * Reads a regex rules file and tags a message with alerts if any rule from that file
-	 * matches anything in the telemetry message
-	 */
-	private static final long serialVersionUID = -6091495636459799411L;
-	Map <String, JSONObject> _rules;
-	
-	/**
-	 * 
-	 * @param rules rules read from a properties XML file
-	 */
-	public RegexTagger(Map<String, JSONObject> rules)
-	{
-		_rules = rules;
-	}
-
-	/**
-	 * @param raw_message telemetry message to be tagged
-	 */
-	@SuppressWarnings("unchecked")
-	public JSONArray tag(JSONObject raw_message) {
-
-		JSONArray ja = new JSONArray();
-		String message_as_string = raw_message.toString();
-		
-		for(String rule : _rules.keySet())
-		{		
-			if (message_as_string.matches(rule))
-			{
-				ja.add(_rules.get(rule));
-			}
-		}	
-		
-		return ja;
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/adapters/StaticAllTagger.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/adapters/StaticAllTagger.java b/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/adapters/StaticAllTagger.java
deleted file mode 100644
index 1e9c6c4..0000000
--- a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/adapters/StaticAllTagger.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.tagging.adapters;
-
-import org.json.simple.JSONArray;
-import org.json.simple.JSONObject;
-
-public class StaticAllTagger extends AbstractTaggerAdapter {
-
-	/**
-	 * Attaches a static alerts tag to JSON telemetry messages
-	 */
-	private static final long serialVersionUID = 7759427661169094065L;
-	private JSONObject _static_tag_message;
-	JSONArray ja = new JSONArray();
-
-	/**
-	 * 
-	 * @param static_tag_message
-	 *            static alerts tag to attach to the message as a JSON
-	 */
-	@SuppressWarnings("unchecked")
-	public StaticAllTagger(JSONObject static_tag_message) {
-		_static_tag_message = static_tag_message;
-		ja.add(_static_tag_message);
-	}
-
-	/**
-	 * @param raw_message
-	 *            message to tag
-	 */
-	public JSONArray tag(JSONObject raw_message) {
-
-		return ja;
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/adapters/TaggerAdapter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/adapters/TaggerAdapter.java b/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/adapters/TaggerAdapter.java
deleted file mode 100644
index 4fecb90..0000000
--- a/metron-streaming/Metron-Alerts/src/main/java/org/apache/metron/tagging/adapters/TaggerAdapter.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.tagging.adapters;
-
-import org.json.simple.JSONArray;
-import org.json.simple.JSONObject;
-
-public interface TaggerAdapter {
-
-	JSONArray tag(JSONObject raw_message);
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/test/java/org/apache/metron/alerts/adapters/AllAlertAdapterTest.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/test/java/org/apache/metron/alerts/adapters/AllAlertAdapterTest.java b/metron-streaming/Metron-Alerts/src/test/java/org/apache/metron/alerts/adapters/AllAlertAdapterTest.java
deleted file mode 100644
index 574d960..0000000
--- a/metron-streaming/Metron-Alerts/src/test/java/org/apache/metron/alerts/adapters/AllAlertAdapterTest.java
+++ /dev/null
@@ -1,168 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.alerts.adapters;
-
-import java.lang.reflect.Constructor;
-import java.util.Map;
-import java.util.Properties;
-
-import org.apache.metron.test.AbstractConfigTest;
-import org.apache.metron.alerts.adapters.AllAlertAdapter;
-import org.junit.Assert;
-
- /**
- * <ul>
- * <li>Title: AllAlertAdapterTest</li>
- * <li>Description: Tests for AllAlertAdapter</li>
- * <li>Created: Oct 8, 2014</li>
- * </ul>
- * @version $Revision: 1.1 $
- */
-public class AllAlertAdapterTest extends AbstractConfigTest {
-
-     /**
-     * The allAlertAdapter.
-     */
-    private static AllAlertAdapter allAlertAdapter=null;
-    
-     /**
-     * The connected.
-     */
-    private static boolean connected=false;
-
-    /**
-     * Constructs a new <code>AllAlertAdapterTest</code> instance.
-     * @param name
-     */
-    public AllAlertAdapterTest(String name) {
-        super(name);
-    }
-
-    /**
-     * @throws java.lang.Exception
-     */
-    protected static void setUpBeforeClass() throws Exception {
-    }
-
-    /**
-     * @throws java.lang.Exception
-     */
-    protected static void tearDownAfterClass() throws Exception {
-    }
-
-    /* 
-     * (non-Javadoc)
-     * @see junit.framework.TestCase#setUp()
-     */
-
-    @SuppressWarnings("unchecked")
-    protected void setUp() throws Exception {
-          super.setUp("org.apache.metron.alerts.adapters.AllAlertAdapter");
-          Properties prop = super.getTestProperties();
-          Assert.assertNotNull(prop);
-       // this.setMode("global");
-        if(skipTests(this.getMode())){
-            System.out.println(getClass().getName()+" Skipping Tests !!Local Mode");
-            return;//skip tests
-       }else{      
-           Map<String, String> settings = super.getSettings();
-           @SuppressWarnings("rawtypes")
-        Class loaded_class = Class.forName("org.apache.metron.alerts.adapters.AllAlertAdapter");
-           @SuppressWarnings("rawtypes")
-        Constructor constructor = loaded_class.getConstructor(new Class[] { Map.class});
-           
-           AllAlertAdapterTest.allAlertAdapter = (AllAlertAdapter) constructor.newInstance(settings);
-            // AllAlertAdapterTest.allAlertAdapter = new AllAlertAdapter(settings)
-      }
-    }
-
-    /* 
-     * (non-Javadoc)
-     * @see junit.framework.TestCase#tearDown()
-     */
-
-    protected void tearDown() throws Exception {
-        super.tearDown();
-    }
-
-
-    /**
-     * Test method for {@link org.apache.metron.alerts.adapters.AlllterAdapter#initialize()}.
-     */
-    public void testInitializeAdapter() {
-        if(skipTests(this.getMode())){
-            return;//skip tests
-       }else{        
-           
-        boolean initialized =AllAlertAdapterTest.getAllAlertAdapter().initialize();
-        Assert.assertTrue(initialized);
-       }
-    }
-    
-    /**
-     * Test method for containsAlertId(@link  org.apache.metron.alerts.adapters.AlllterAdapter#containsAlertId()}.
-     */
-    public void testContainsAlertId(){
-        if(skipTests(this.getMode())){
-            return;//skip tests
-       }else{          
-            boolean containsAlert =AllAlertAdapterTest.getAllAlertAdapter().containsAlertId("test");
-            Assert.assertFalse(containsAlert);
-       }
-    }
- 
-   
-
-    /**
-     * Returns the allAlertAdapter.
-     * @return the allAlertAdapter.
-     */
-    
-    public static AllAlertAdapter getAllAlertAdapter() {
-        return allAlertAdapter;
-    }
-
-    /**
-     * Sets the allAlertAdapter.
-     * @param allAlertAdapter the allAlertAdapter.
-     */
-    
-    public static void setAllAlertAdapter(AllAlertAdapter allAlertAdapter) {
-    
-        AllAlertAdapterTest.allAlertAdapter = allAlertAdapter;
-    }
-    /**
-     * Returns the connected.
-     * @return the connected.
-     */
-    
-    public static boolean isConnected() {
-        return connected;
-    }
-
-    /**
-     * Sets the connected.
-     * @param connected the connected.
-     */
-    
-    public static void setConnected(boolean connected) {
-    
-        AllAlertAdapterTest.connected = connected;
-    }    
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/test/resources/AllAlertAdapterTest.properties
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/test/resources/AllAlertAdapterTest.properties b/metron-streaming/Metron-Alerts/src/test/resources/AllAlertAdapterTest.properties
deleted file mode 100644
index da6584f..0000000
--- a/metron-streaming/Metron-Alerts/src/test/resources/AllAlertAdapterTest.properties
+++ /dev/null
@@ -1,17 +0,0 @@
-#  Licensed to the Apache Software Foundation (ASF) under one
-#  or more contributor license agreements.  See the NOTICE file
-#  distributed with this work for additional information
-#  regarding copyright ownership.  The ASF licenses this file
-#  to you under the Apache License, Version 2.0 (the
-#  "License"); you may not use this file except in compliance
-#  with the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/test/resources/TestSchemas/AllAlertAdapterSchema.json
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/test/resources/TestSchemas/AllAlertAdapterSchema.json b/metron-streaming/Metron-Alerts/src/test/resources/TestSchemas/AllAlertAdapterSchema.json
deleted file mode 100644
index c4f2a82..0000000
--- a/metron-streaming/Metron-Alerts/src/test/resources/TestSchemas/AllAlertAdapterSchema.json
+++ /dev/null
@@ -1,42 +0,0 @@
-{
-"title": "GeoMySql Schema",
-"type": "object",
-"properties": {
-
-         "city"    : {
-					   "type": "string"
-				  },
-		 "country" : {
-						"type": "string"
-					},
-		 "dmaCode" :
-		 			 {
-						"type": "string"
-					},
-	     "geoHash" : 
-	     			{
-						"type": "string"
-					},
-		 "latitude" : 
-		 			{
-						"type": "string"
-				   },
-		 "locID" : 
-		 			{
-					   "type": "string"
-				   },
-		 "location_point" : 
-		 			{
-					   "type": "string"
-				    },
-		 "longitude" : 
-		 			{
-						"type": "string"
-					},
-		 "postalCode" : 
-		 			{
-						"type": "string"
-					}
-   },
-   "required": ["city", "country", "dmaCode","latitude","locID","location_point","postalCode"]
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Alerts/src/test/resources/config/AllAlertAdapterTest.config
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Alerts/src/test/resources/config/AllAlertAdapterTest.config b/metron-streaming/Metron-Alerts/src/test/resources/config/AllAlertAdapterTest.config
deleted file mode 100644
index 11c545c..0000000
--- a/metron-streaming/Metron-Alerts/src/test/resources/config/AllAlertAdapterTest.config
+++ /dev/null
@@ -1,25 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-#Alerts Bolt
-bolt.alerts.adapter=org.apache.metron.alerts.adapters.AllAlertAdapter
-org.apache.metron.alerts.adapters.AllAlertAdapter.whitelist_table_name = ip_whitelist
-org.apache.metron.alerts.adapters.AllAlertAdapter.blacklist_table_name = ip_blacklist
-org.apache.metron.alerts.adapters.AllAlertAdapter.quorum=zkpr1,zkpr2,zkpr3
-org.apache.metron.alerts.adapters.AllAlertAdapter.port=2181
-org.apache.metron.alerts.adapters.AllAlertAdapter._MAX_CACHE_SIZE_OBJECTS_NUM=3600
-org.apache.metron.alerts.adapters.AllAlertAdapter._MAX_TIME_RETAIN_MINUTES=1000

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/.gitignore
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/.gitignore b/metron-streaming/Metron-Common/.gitignore
deleted file mode 100644
index b83d222..0000000
--- a/metron-streaming/Metron-Common/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-/target/

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/pom.xml
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/pom.xml b/metron-streaming/Metron-Common/pom.xml
deleted file mode 100644
index 6985e6d..0000000
--- a/metron-streaming/Metron-Common/pom.xml
+++ /dev/null
@@ -1,291 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!-- 
-  Licensed to the Apache Software 
-	Foundation (ASF) under one or more contributor license agreements. See the 
-	NOTICE file distributed with this work for additional information regarding 
-	copyright ownership. The ASF licenses this file to You under the Apache License, 
-	Version 2.0 (the "License"); you may not use this file except in compliance 
-	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 
-	Unless required by applicable law or agreed to in writing, software distributed 
-	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES 
-	OR CONDITIONS OF ANY KIND, either express or implied. See the License for 
-  the specific language governing permissions and limitations under the License. 
-  -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.metron</groupId>
-        <artifactId>Metron-Streaming</artifactId>
-        <version>0.1BETA</version>
-    </parent>
-    <artifactId>Metron-Common</artifactId>
-    <name>Metron-Common</name>
-    <description>Components common to all enrichments</description>
-    <properties>
-        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
-        <commons.config.version>1.10</commons.config.version>
-    </properties>
-    <repositories>
-        <repository>
-            <id>Metron-Kraken-Repo</id>
-            <name>Metron Kraken Repository</name>
-            <url>https://raw.github.com/opensoc/kraken/mvn-repo</url>
-        </repository>
-    </repositories>
-    <dependencies>
-        <dependency>
-            <groupId>com.opencsv</groupId>
-            <artifactId>opencsv</artifactId>
-            <version>${global_opencsv_version}</version>
-        </dependency>
-        <dependency>
-            <groupId>com.googlecode.json-simple</groupId>
-            <artifactId>json-simple</artifactId>
-            <version>${global_json_simple_version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.storm</groupId>
-            <artifactId>storm-core</artifactId>
-            <version>${global_storm_version}</version>
-            <scope>provided</scope>
-            <exclusions>
-                <exclusion>
-                    <artifactId>servlet-api</artifactId>
-                    <groupId>javax.servlet</groupId>
-                </exclusion>
-                <exclusion>
-                    <artifactId>log4j-over-slf4j</artifactId>
-                    <groupId>org.slf4j</groupId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.kafka</groupId>
-            <artifactId>kafka_2.9.2</artifactId>
-            <version>${global_kafka_version}</version>
-            <scope>provided</scope>
-            <exclusions>
-                <exclusion>
-                    <groupId>com.sun.jmx</groupId>
-                    <artifactId>jmxri</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>com.sun.jdmk</groupId>
-                    <artifactId>jmxtools</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>javax.jms</groupId>
-                    <artifactId>jms</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>com.codahale.metrics</groupId>
-            <artifactId>metrics-core</artifactId>
-            <version>${global_metrics_version}</version>
-        </dependency>
-        <dependency>
-            <groupId>com.codahale.metrics</groupId>
-            <artifactId>metrics-graphite</artifactId>
-            <version>${global_metrics_version}</version>
-        </dependency>
-        <dependency>
-            <groupId>commons-configuration</groupId>
-            <artifactId>commons-configuration</artifactId>
-            <version>${commons.config.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.krakenapps</groupId>
-            <artifactId>kraken-pcap</artifactId>
-            <version>${global_pcap_version}</version>
-            <exclusions>
-                <exclusion>
-                    <artifactId>slf4j-api</artifactId>
-                    <groupId>org.slf4j</groupId>
-                </exclusion>
-                <exclusion>
-                    <artifactId>slf4j-simple</artifactId>
-                    <groupId>org.slf4j</groupId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>com.google.guava</groupId>
-            <artifactId>guava</artifactId>
-            <version>${global_guava_version}</version>
-            <scope>compile</scope>
-        </dependency>
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <version>${global_junit_version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-common</artifactId>
-            <version>${global_hbase_version}</version>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-client</artifactId>
-            <version>${global_hbase_version}</version>
-            <scope>provided</scope>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.slf4j</groupId>
-                    <artifactId>slf4j-log4j12</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>log4j</groupId>
-                    <artifactId>log4j</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>com.google.guava</groupId>
-                    <artifactId>guava</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>com.github.fge</groupId>
-            <artifactId>json-schema-validator</artifactId>
-            <version>${global_json_schema_validator_version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.curator</groupId>
-            <artifactId>curator-recipes</artifactId>
-            <version>2.7.1</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.storm</groupId>
-            <artifactId>flux-core</artifactId>
-            <version>${global_flux_version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.storm</groupId>
-            <artifactId>storm-kafka</artifactId>
-            <version>${global_storm_version}</version>
-            <exclusions>
-                <exclusion>
-                    <artifactId>org.apache.curator</artifactId>
-                    <groupId>curator-client</groupId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.curator</groupId>
-            <artifactId>curator-test</artifactId>
-            <version>2.7.1</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.mockito</groupId>
-            <artifactId>mockito-all</artifactId>
-            <version>${global_mockito_version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>nl.jqno.equalsverifier</groupId>
-            <artifactId>equalsverifier</artifactId>
-            <version>2.0.2</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.metron</groupId>
-            <artifactId>Metron-TestingUtilities</artifactId>
-            <version>0.1BETA</version>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
-
-    <reporting>
-        <plugins>
-            <!-- Normally, dependency report takes time, skip it -->
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-project-info-reports-plugin</artifactId>
-                <version>2.7</version>
-
-                <configuration>
-                    <dependencyLocationsEnabled>false</dependencyLocationsEnabled>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.codehaus.mojo</groupId>
-                <artifactId>emma-maven-plugin</artifactId>
-                <version>1.0-alpha-3</version>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-pmd-plugin</artifactId>
-                <configuration>
-                    <targetJdk>1.7</targetJdk>
-                </configuration>
-            </plugin>
-        </plugins>
-    </reporting>
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-compiler-plugin</artifactId>
-                <version>3.1</version>
-                <configuration>
-                    <source>1.7</source>
-                    <compilerArgument>-Xlint:unchecked</compilerArgument>
-                    <target>1.7</target>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-shade-plugin</artifactId>
-                <version>1.4</version>
-                <configuration>
-                    <createDependencyReducedPom>true</createDependencyReducedPom>
-                    <artifactSet>
-                        <excludes>
-                            <exclude>*slf4j*</exclude>
-                        </excludes>
-                    </artifactSet>
-                </configuration>
-                <executions>
-                    <execution>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>shade</goal>
-                        </goals>
-                        <configuration>
-                            <relocations>
-                                <relocation>
-                                    <pattern>com.google.common</pattern>
-                                    <shadedPattern>org.apache.metron.guava</shadedPattern>
-                                </relocation>
-                            </relocations>
-                            <transformers>
-                                <transformer
-                                        implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
-                                    <resource>.yaml</resource>
-                                </transformer>
-                                <transformer
-                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
-                                <transformer
-                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
-                                    <mainClass></mainClass>
-                                </transformer>
-                            </transformers>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-        </plugins>
-        <resources>
-            <resource>
-                <directory>src/main/resources</directory>
-            </resource>
-        </resources>
-    </build>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/Constants.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/Constants.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/Constants.java
deleted file mode 100644
index 32f1441..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/Constants.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron;
-
-public class Constants {
-
-  public static final String GLOBAL_CONFIG_NAME = "global";
-  public static final String SENSORS_CONFIG_NAME = "sensors";
-  public static final String ZOOKEEPER_ROOT = "/metron";
-  public static final String ZOOKEEPER_TOPOLOGY_ROOT = ZOOKEEPER_ROOT + "/topology";
-  public static final String ZOOKEEPER_GLOBAL_ROOT = ZOOKEEPER_TOPOLOGY_ROOT + "/" + GLOBAL_CONFIG_NAME;
-  public static final String ZOOKEEPER_SENSOR_ROOT = ZOOKEEPER_TOPOLOGY_ROOT + "/" + SENSORS_CONFIG_NAME;
-  public static final long DEFAULT_CONFIGURED_BOLT_TIMEOUT = 5000;
-  public static final String SENSOR_TYPE = "source.type";
-  public static final String ENRICHMENT_TOPIC = "enrichments";
-  public static final String ERROR_STREAM = "error";
-  public static final String SIMPLE_HBASE_ENRICHMENT = "hbaseEnrichment";
-  public static final String SIMPLE_HBASE_THREAT_INTEL = "hbaseThreatIntel";
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/alerts/interfaces/AlertsAdapter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/alerts/interfaces/AlertsAdapter.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/alerts/interfaces/AlertsAdapter.java
deleted file mode 100644
index a26054a..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/alerts/interfaces/AlertsAdapter.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.alerts.interfaces;
-
-import java.util.Map;
-
-import org.json.simple.JSONObject;
-
-public interface AlertsAdapter {
-
-	boolean initialize();
-
-	boolean refresh() throws Exception;
-
-	Map<String, JSONObject> alert(JSONObject raw_message);
-
-	boolean containsAlertId(String alert);
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/alerts/interfaces/AlertsInterface.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/alerts/interfaces/AlertsInterface.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/alerts/interfaces/AlertsInterface.java
deleted file mode 100644
index 575ad2c..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/alerts/interfaces/AlertsInterface.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.alerts.interfaces;
-
-import org.json.simple.JSONObject;
-
-public interface AlertsInterface {
-
-	public JSONObject getContent();
-	public void setContent(JSONObject content);
-	public String getUuid();
-	public void setUuid(String uuid);
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/alerts/interfaces/TaggerAdapter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/alerts/interfaces/TaggerAdapter.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/alerts/interfaces/TaggerAdapter.java
deleted file mode 100644
index 1ccdf24..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/alerts/interfaces/TaggerAdapter.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.alerts.interfaces;
-
-import org.json.simple.JSONArray;
-import org.json.simple.JSONObject;
-
-public interface TaggerAdapter {
-
-	JSONArray tag(JSONObject raw_message);
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/bolt/BulkMessageWriterBolt.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/bolt/BulkMessageWriterBolt.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/bolt/BulkMessageWriterBolt.java
deleted file mode 100644
index f1f79f0..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/bolt/BulkMessageWriterBolt.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.bolt;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import org.apache.metron.Constants;
-import org.apache.metron.domain.SensorEnrichmentConfig;
-import org.apache.metron.helpers.topology.ErrorUtils;
-import org.apache.metron.topology.TopologyUtils;
-import org.apache.metron.writer.interfaces.BulkMessageWriter;
-import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.*;
-
-public class BulkMessageWriterBolt extends ConfiguredBolt {
-
-  private static final Logger LOG = LoggerFactory
-          .getLogger(BulkMessageWriterBolt.class);
-  private OutputCollector collector;
-  private BulkMessageWriter<JSONObject> bulkMessageWriter;
-  private Map<String, List<Tuple>> sensorTupleMap = new HashMap<>();
-  private Map<String, List<JSONObject>> sensorMessageMap = new HashMap<>();
-
-  public BulkMessageWriterBolt(String zookeeperUrl) {
-    super(zookeeperUrl);
-  }
-
-  public BulkMessageWriterBolt withBulkMessageWriter(BulkMessageWriter<JSONObject> bulkMessageWriter) {
-    this.bulkMessageWriter = bulkMessageWriter;
-    return this;
-  }
-
-  @Override
-  public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
-    this.collector = collector;
-    super.prepare(stormConf, context, collector);
-    try {
-      bulkMessageWriter.init(stormConf, configurations);
-    } catch (Exception e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  @SuppressWarnings("unchecked")
-  @Override
-  public void execute(Tuple tuple) {
-    JSONObject message = (JSONObject)((JSONObject) tuple.getValueByField("message")).clone();
-    message.put("index." + bulkMessageWriter.getClass().getSimpleName().toLowerCase() + ".ts", "" + System.currentTimeMillis());
-    String sensorType = TopologyUtils.getSensorType(message);
-    SensorEnrichmentConfig sensorEnrichmentConfig = configurations.getSensorEnrichmentConfig(sensorType);
-    int batchSize = sensorEnrichmentConfig != null ? sensorEnrichmentConfig.getBatchSize() : 1;
-    List<Tuple> tupleList = sensorTupleMap.get(sensorType);
-    if (tupleList == null) tupleList = new ArrayList<>();
-    tupleList.add(tuple);
-    List<JSONObject> messageList = sensorMessageMap.get(sensorType);
-    if (messageList == null) messageList = new ArrayList<>();
-    messageList.add(message);
-    if (messageList.size() < batchSize) {
-      sensorTupleMap.put(sensorType, tupleList);
-      sensorMessageMap.put(sensorType, messageList);
-    } else {
-      try {
-        bulkMessageWriter.write(sensorType, configurations, tupleList, messageList);
-        for(Tuple t: tupleList) {
-          collector.ack(t);
-        }
-      } catch (Exception e) {
-        for(Tuple t: tupleList) {
-          collector.fail(t);
-        }
-        ErrorUtils.handleError(collector, e, Constants.ERROR_STREAM);
-      }
-      sensorTupleMap.remove(sensorType);
-      sensorMessageMap.remove(sensorType);
-    }
-  }
-
-  @Override
-  public void declareOutputFields(OutputFieldsDeclarer declarer) {
-    declarer.declareStream("error", new Fields("message"));
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/bolt/ConfiguredBolt.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/bolt/ConfiguredBolt.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/bolt/ConfiguredBolt.java
deleted file mode 100644
index 2a1cb13..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/bolt/ConfiguredBolt.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.bolt;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.base.BaseRichBolt;
-import org.apache.curator.RetryPolicy;
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.CuratorFrameworkFactory;
-import org.apache.curator.framework.recipes.cache.TreeCache;
-import org.apache.curator.framework.recipes.cache.TreeCacheEvent;
-import org.apache.curator.framework.recipes.cache.TreeCacheListener;
-import org.apache.curator.retry.ExponentialBackoffRetry;
-import org.apache.log4j.Logger;
-import org.apache.metron.Constants;
-import org.apache.metron.domain.Configurations;
-import org.apache.metron.utils.ConfigurationsUtils;
-
-import java.io.IOException;
-import java.util.Map;
-
-public abstract class ConfiguredBolt extends BaseRichBolt {
-
-  private static final Logger LOG = Logger.getLogger(ConfiguredBolt.class);
-
-  private String zookeeperUrl;
-
-  protected final Configurations configurations = new Configurations();
-  protected CuratorFramework client;
-  protected TreeCache cache;
-
-  public ConfiguredBolt(String zookeeperUrl) {
-    this.zookeeperUrl = zookeeperUrl;
-  }
-
-  public Configurations getConfigurations() {
-    return configurations;
-  }
-
-  public void setCuratorFramework(CuratorFramework client) {
-    this.client = client;
-  }
-
-  public void setTreeCache(TreeCache cache) {
-    this.cache = cache;
-  }
-
-  protected void reloadCallback(String name, Configurations.Type type) {
-  }
-
-  @Override
-  public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
-    try {
-      if (client == null) {
-        RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
-        client = CuratorFrameworkFactory.newClient(zookeeperUrl, retryPolicy);
-      }
-      client.start();
-      if (cache == null) {
-        cache = new TreeCache(client, Constants.ZOOKEEPER_TOPOLOGY_ROOT);
-        TreeCacheListener listener = new TreeCacheListener() {
-          @Override
-          public void childEvent(CuratorFramework client, TreeCacheEvent event) throws Exception {
-            if (event.getType().equals(TreeCacheEvent.Type.NODE_ADDED) || event.getType().equals(TreeCacheEvent.Type.NODE_UPDATED)) {
-              String path = event.getData().getPath();
-              byte[] data = event.getData().getData();
-              updateConfig(path, data);
-            }
-          }
-        };
-        cache.getListenable().addListener(listener);
-        try {
-          ConfigurationsUtils.updateConfigsFromZookeeper(configurations, client);
-        } catch (Exception e) {
-          LOG.warn("Unable to load configs from zookeeper, but the cache should load lazily...");
-        }
-      }
-      cache.start();
-    } catch (Exception e) {
-      LOG.error(e.getMessage(), e);
-      throw new RuntimeException(e);
-    }
-  }
-
-  public void updateConfig(String path, byte[] data) throws IOException {
-    if (data.length != 0) {
-      String name = path.substring(path.lastIndexOf("/") + 1);
-      Configurations.Type type;
-      if (path.startsWith(Constants.ZOOKEEPER_SENSOR_ROOT)) {
-        configurations.updateSensorEnrichmentConfig(name, data);
-        type = Configurations.Type.SENSOR;
-      } else if (Constants.ZOOKEEPER_GLOBAL_ROOT.equals(path)) {
-        configurations.updateGlobalConfig(data);
-        type = Configurations.Type.GLOBAL;
-      } else {
-        configurations.updateConfig(name, data);
-        type = Configurations.Type.OTHER;
-      }
-      reloadCallback(name, type);
-    }
-  }
-
-  @Override
-  public void cleanup() {
-    cache.close();
-    client.close();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/bolt/JoinBolt.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/bolt/JoinBolt.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/bolt/JoinBolt.java
deleted file mode 100644
index 9c2c67e..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/bolt/JoinBolt.java
+++ /dev/null
@@ -1,130 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.bolt;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-import com.google.common.base.Joiner;
-import com.google.common.cache.CacheBuilder;
-import com.google.common.cache.CacheLoader;
-import com.google.common.cache.LoadingCache;
-import com.google.common.collect.Sets;
-import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.TimeUnit;
-
-public abstract class JoinBolt<V> extends ConfiguredBolt {
-
-  private static final Logger LOG = LoggerFactory
-          .getLogger(JoinBolt.class);
-  protected OutputCollector collector;
-
-  protected transient CacheLoader<String, Map<String, V>> loader;
-  protected transient LoadingCache<String, Map<String, V>> cache;
-  protected Long maxCacheSize;
-  protected Long maxTimeRetain;
-
-  public JoinBolt(String zookeeperUrl) {
-    super(zookeeperUrl);
-  }
-
-  public JoinBolt withMaxCacheSize(long maxCacheSize) {
-    this.maxCacheSize = maxCacheSize;
-    return this;
-  }
-
-  public JoinBolt withMaxTimeRetain(long maxTimeRetain) {
-    this.maxTimeRetain = maxTimeRetain;
-    return this;
-  }
-
-  @Override
-  public void prepare(Map map, TopologyContext topologyContext, OutputCollector outputCollector) {
-    super.prepare(map, topologyContext, outputCollector);
-    this.collector = outputCollector;
-    if (this.maxCacheSize == null)
-      throw new IllegalStateException("maxCacheSize must be specified");
-    if (this.maxTimeRetain == null)
-      throw new IllegalStateException("maxTimeRetain must be specified");
-    loader = new CacheLoader<String, Map<String, V>>() {
-      public Map<String, V> load(String key) throws Exception {
-        return new HashMap<>();
-      }
-    };
-    cache = CacheBuilder.newBuilder().maximumSize(maxCacheSize)
-            .expireAfterWrite(maxTimeRetain, TimeUnit.MINUTES)
-            .build(loader);
-    prepare(map, topologyContext);
-  }
-
-  @SuppressWarnings("unchecked")
-  @Override
-  public void execute(Tuple tuple) {
-    String streamId = tuple.getSourceStreamId();
-    String key = (String) tuple.getValueByField("key");
-    V message = (V) tuple.getValueByField("message");
-    try {
-      Map<String, V> streamMessageMap = cache.get(key);
-      if (streamMessageMap.containsKey(streamId)) {
-        LOG.warn(String.format("Received key %s twice for " +
-                "stream %s", key, streamId));
-      }
-      streamMessageMap.put(streamId, message);
-      Set<String> streamIds = getStreamIds(message);
-      Set<String> streamMessageKeys = streamMessageMap.keySet();
-      if (streamMessageKeys.size() == streamIds.size() && Sets.symmetricDifference
-              (streamMessageKeys, streamIds)
-              .isEmpty()) {
-        collector.emit("message", tuple, new Values(key, joinMessages
-                (streamMessageMap)));
-        collector.ack(tuple);
-        cache.invalidate(key);
-      } else {
-        cache.put(key, streamMessageMap);
-        if(LOG.isDebugEnabled()) {
-          LOG.debug(getClass().getSimpleName() + ": Missed joining portions for "+ key + ". Expected " + Joiner.on(",").join(streamIds)
-                  + " != " + Joiner.on(",").join(streamMessageKeys)
-                   );
-        }
-      }
-    } catch (ExecutionException e) {
-      collector.reportError(e);
-      LOG.error(e.getMessage(), e);
-    }
-  }
-
-  @Override
-  public void declareOutputFields(OutputFieldsDeclarer declarer) {
-    declarer.declareStream("message", new Fields("key", "message"));
-  }
-
-  public abstract void prepare(Map map, TopologyContext topologyContext);
-
-  public abstract Set<String> getStreamIds(V value);
-
-  public abstract V joinMessages(Map<String, V> streamMessageMap);
-}


[23/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-hbase/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-hbase/pom.xml b/metron-platform/metron-hbase/pom.xml
new file mode 100644
index 0000000..fc12808
--- /dev/null
+++ b/metron-platform/metron-hbase/pom.xml
@@ -0,0 +1,77 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 
+  Licensed to the Apache Software 
+	Foundation (ASF) under one or more contributor license agreements. See the 
+	NOTICE file distributed with this work for additional information regarding 
+	copyright ownership. The ASF licenses this file to You under the Apache License, 
+	Version 2.0 (the "License"); you may not use this file except in compliance 
+	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 
+	Unless required by applicable law or agreed to in writing, software distributed 
+	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES 
+	OR CONDITIONS OF ANY KIND, either express or implied. See the License for 
+  the specific language governing permissions and limitations under the License. 
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.metron</groupId>
+        <artifactId>metron-platform</artifactId>
+        <version>0.1BETA</version>
+    </parent>
+    <artifactId>metron-hbase</artifactId>
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+        <mysql.version>5.1.31</mysql.version>
+        <slf4j.version>1.7.7</slf4j.version>
+        <storm.hdfs.version>0.1.2</storm.hdfs.version>
+        <guava.version>${global_hbase_guava_version}</guava.version>
+    </properties>
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-common</artifactId>
+            <version>${project.parent.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-client</artifactId>
+            <version>${global_hbase_version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>log4j</groupId>
+                    <artifactId>log4j</artifactId>
+                </exclusion>
+            </exclusions>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-core</artifactId>
+            <version>${global_storm_version}</version>
+            <scope>provided</scope>
+            <exclusions>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>log4j-over-slf4j</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+            <version>1.2.17</version>
+            <scope>provided</scope>
+        </dependency>
+    </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/Connector.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/Connector.java b/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/Connector.java
new file mode 100644
index 0000000..e787e43
--- /dev/null
+++ b/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/Connector.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.hbase;
+
+import org.apache.hadoop.hbase.client.Put;
+
+import java.io.IOException;
+
+public abstract class Connector {
+  protected TableConfig tableConf;
+  protected String _quorum;
+  protected String _port;
+
+  public Connector(final TableConfig conf, String _quorum, String _port) throws IOException {
+    this.tableConf = conf;
+    this._quorum = _quorum;
+    this._port = _port;
+  }
+  public abstract void put(Put put) throws IOException;
+  public abstract void close();
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/HTableConnector.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/HTableConnector.java b/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/HTableConnector.java
new file mode 100644
index 0000000..ace4d80
--- /dev/null
+++ b/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/HTableConnector.java
@@ -0,0 +1,157 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.hbase;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.lang.reflect.InvocationTargetException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.log4j.Logger;
+
+import backtype.storm.generated.Bolt;
+
+/**
+ * HTable connector for Storm {@link Bolt}
+ * <p>
+ * The HBase configuration is picked up from the first <tt>hbase-site.xml</tt> encountered in the
+ * classpath
+ */
+@SuppressWarnings("serial")
+public class HTableConnector extends Connector implements Serializable{
+  private static final Logger LOG = Logger.getLogger(HTableConnector.class);
+  private Configuration conf;
+  protected HTableInterface table;
+  private String tableName;
+  private String connectorImpl;
+
+
+  /**
+   * Initialize HTable connection
+   * @param conf The {@link TupleTableConfig}
+   * @throws IOException
+   */
+  public HTableConnector(final TableConfig conf, String _quorum, String _port) throws IOException {
+    super(conf, _quorum, _port);
+    this.connectorImpl = conf.getConnectorImpl();
+    this.tableName = conf.getTableName();
+    this.conf = HBaseConfiguration.create();
+    
+    if(_quorum != null && _port != null)
+    {
+    	this.conf.set("hbase.zookeeper.quorum", _quorum);
+    	this.conf.set("hbase.zookeeper.property.clientPort", _port);
+    }
+
+    LOG.info(String.format("Initializing connection to HBase table %s at %s", tableName,
+      this.conf.get("hbase.rootdir")));
+
+    try {
+      this.table = getTableProvider().getTable(this.conf, this.tableName);
+    } catch (IOException ex) {
+      throw new IOException("Unable to establish connection to HBase table " + this.tableName, ex);
+    }
+
+    if (conf.isBatch()) {
+      // Enable client-side write buffer
+      this.table.setAutoFlush(false, true);
+      LOG.info("Enabled client-side write buffer");
+    }
+
+    // If set, override write buffer size
+    if (conf.getWriteBufferSize() > 0) {
+      try {
+        this.table.setWriteBufferSize(conf.getWriteBufferSize());
+
+        LOG.info("Setting client-side write buffer to " + conf.getWriteBufferSize());
+      } catch (IOException ex) {
+        LOG.error("Unable to set client-side write buffer size for HBase table " + this.tableName,
+          ex);
+      }
+    }
+
+    // Check the configured column families exist
+    for (String cf : conf.getColumnFamilies()) {
+      if (!columnFamilyExists(cf)) {
+        throw new RuntimeException(String.format(
+          "HBase table '%s' does not have column family '%s'", conf.getTableName(), cf));
+      }
+    }
+  }
+
+  protected TableProvider getTableProvider() throws IOException {
+    if(connectorImpl == null || connectorImpl.length() == 0 || connectorImpl.charAt(0) == '$') {
+      return new HTableProvider();
+    }
+    else {
+      try {
+        Class<? extends TableProvider> clazz = (Class<? extends TableProvider>) Class.forName(connectorImpl);
+        return clazz.getConstructor().newInstance();
+      } catch (InstantiationException e) {
+        throw new IOException("Unable to instantiate connector.", e);
+      } catch (IllegalAccessException e) {
+        throw new IOException("Unable to instantiate connector: illegal access", e);
+      } catch (InvocationTargetException e) {
+        throw new IOException("Unable to instantiate connector", e);
+      } catch (NoSuchMethodException e) {
+        throw new IOException("Unable to instantiate connector: no such method", e);
+      } catch (ClassNotFoundException e) {
+        throw new IOException("Unable to instantiate connector: class not found", e);
+      }
+    }
+  }
+
+  /**
+   * Checks to see if table contains the given column family
+   * @param columnFamily The column family name
+   * @return boolean
+   * @throws IOException
+   */
+  private boolean columnFamilyExists(final String columnFamily) throws IOException {
+    return this.table.getTableDescriptor().hasFamily(Bytes.toBytes(columnFamily));
+  }
+
+  /**
+   * @return the table
+   */
+  public HTableInterface getTable() {
+    return table;
+  }
+
+  @Override
+  public void put(Put put) throws IOException {
+      table.put(put);
+  }
+
+  /**
+   * Close the table
+   */
+  @Override
+  public void close() {
+    try {
+      this.table.close();
+    } catch (IOException ex) {
+      LOG.error("Unable to close connection to HBase table " + tableName, ex);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/HTableProvider.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/HTableProvider.java b/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/HTableProvider.java
new file mode 100644
index 0000000..e454f04
--- /dev/null
+++ b/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/HTableProvider.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.hbase;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.HTableInterface;
+
+import java.io.IOException;
+
+public class HTableProvider implements TableProvider {
+    @Override
+    public HTableInterface getTable(Configuration config, String tableName) throws IOException {
+        return new HTable(config, tableName);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/TableConfig.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/TableConfig.java b/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/TableConfig.java
new file mode 100644
index 0000000..de2e929
--- /dev/null
+++ b/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/TableConfig.java
@@ -0,0 +1,118 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.hbase;
+
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
+
+public class TableConfig implements Serializable {
+    static final long serialVersionUID = -1L;
+    private String tableName;
+    private boolean batch = true;
+    protected Map<String, Set<String>> columnFamilies = new HashMap<>();
+    private long writeBufferSize = 0L;
+    private String connectorImpl;
+
+    public TableConfig() {
+
+    }
+
+    public TableConfig(String tableName) {
+        this.tableName = tableName;
+    }
+
+    public String getTableName() {
+        return tableName;
+    }
+
+    public TableConfig withConnectorImpl(String impl) {
+        connectorImpl = impl;
+        return this;
+    }
+
+    public TableConfig withTable(String table) {
+        this.tableName = table;
+        return this;
+    }
+
+    public TableConfig withBatch(Boolean isBatch) {
+        this.batch = isBatch;
+        return this;
+    }
+
+    public String getConnectorImpl() {
+        return connectorImpl;
+    }
+
+    /**
+     * @return Whether batch mode is enabled
+     */
+    public boolean isBatch() {
+        return batch;
+    }
+
+    /**
+     * @param batch
+     *          Whether to enable HBase's client-side write buffer.
+     *          <p>
+     *          When enabled your bolt will store put operations locally until the
+     *          write buffer is full, so they can be sent to HBase in a single RPC
+     *          call. When disabled each put operation is effectively an RPC and
+     *          is sent straight to HBase. As your bolt can process thousands of
+     *          values per second it is recommended that the write buffer is
+     *          enabled.
+     *          <p>
+     *          Enabled by default
+     */
+    public void setBatch(boolean batch) {
+        this.batch = batch;
+    }
+    /**
+     * @param writeBufferSize
+     *          Overrides the client-side write buffer size.
+     *          <p>
+     *          By default the write buffer size is 2 MB (2097152 bytes). If you
+     *          are storing larger data, you may want to consider increasing this
+     *          value to allow your bolt to efficiently group together a larger
+     *          number of records per RPC
+     *          <p>
+     *          Overrides the write buffer size you have set in your
+     *          hbase-site.xml e.g. <code>hbase.client.write.buffer</code>
+     */
+    public void setWriteBufferSize(long writeBufferSize) {
+        this.writeBufferSize = writeBufferSize;
+    }
+
+    /**
+     * @return the writeBufferSize
+     */
+    public long getWriteBufferSize() {
+        return writeBufferSize;
+    }
+    /**
+     * @return A Set of configured column families
+     */
+    public Set<String> getColumnFamilies() {
+        return this.columnFamilies.keySet();
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/TableProvider.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/TableProvider.java b/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/TableProvider.java
new file mode 100644
index 0000000..dc0569e
--- /dev/null
+++ b/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/TableProvider.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.hbase;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.HTableInterface;
+
+import java.io.IOException;
+import java.io.Serializable;
+
+public interface TableProvider extends Serializable {
+    HTableInterface getTable(Configuration config, String tableName) throws IOException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/TupleTableConfig.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/TupleTableConfig.java b/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/TupleTableConfig.java
new file mode 100644
index 0000000..8257d8a
--- /dev/null
+++ b/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/TupleTableConfig.java
@@ -0,0 +1,275 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.hbase;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.NavigableMap;
+import java.util.Set;
+import java.util.TreeMap;
+
+import com.google.common.base.Joiner;
+import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.Increment;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import backtype.storm.tuple.Tuple;
+import org.apache.log4j.Logger;
+
+/**
+ * Configuration for Storm {@link Tuple} to HBase serialization.
+ */
+@SuppressWarnings("serial")
+public class TupleTableConfig extends TableConfig implements Serializable {
+  private static final Logger LOG = Logger.getLogger(TupleTableConfig.class);
+  static final long serialVersionUID = -1L;
+  public static final long DEFAULT_INCREMENT = 1L;
+  
+  protected String tupleRowKeyField;
+  protected String tupleTimestampField;
+  protected Durability durability = Durability.USE_DEFAULT;
+  private String fields;
+
+  /**
+   * Initialize configuration
+   * 
+   * @param table
+   *          The HBase table name
+   * @param rowKeyField
+   *          The {@link Tuple} field used to set the rowKey
+   */
+  public TupleTableConfig(final String table, final String rowKeyField) {
+    super(table);
+    this.tupleRowKeyField = rowKeyField;
+    this.tupleTimestampField = "";
+    this.columnFamilies = new HashMap<String, Set<String>>();
+  }
+  
+  /**
+   * Initialize configuration
+   * 
+   * @param table
+   *          The HBase table name
+   * @param rowKeyField
+   *          The {@link Tuple} field used to set the rowKey
+   * @param timestampField
+   *          The {@link Tuple} field used to set the timestamp
+   */
+  public TupleTableConfig(final String table, final String rowKeyField, final String timestampField) {
+    super(table);
+    this.tupleRowKeyField = rowKeyField;
+    this.tupleTimestampField = timestampField;
+    this.columnFamilies = new HashMap<String, Set<String>>();
+  }
+
+  public TupleTableConfig() {
+    super(null);
+    this.columnFamilies = new HashMap<String, Set<String>>();
+  }
+
+
+
+  public TupleTableConfig withRowKeyField(String rowKeyField) {
+    this.tupleRowKeyField = rowKeyField;
+    return this;
+  }
+
+  public TupleTableConfig withTimestampField(String timestampField) {
+    this.tupleTimestampField = timestampField;
+    return this;
+  }
+
+  public TupleTableConfig withFields(String fields) {
+    this.fields = fields;
+    return this;
+  }
+
+
+
+  public String getFields() {
+    return fields;
+  }
+
+
+
+  /**
+   * Add column family and column qualifier to be extracted from tuple
+   * 
+   * @param columnFamily
+   *          The column family name
+   * @param columnQualifier
+   *          The column qualifier name
+   */
+  public void addColumn(final String columnFamily, final String columnQualifier) {
+    Set<String> columns = this.columnFamilies.get(columnFamily);
+    
+    if (columns == null) {
+      columns = new HashSet<String>();
+    }
+    columns.add(columnQualifier);
+    
+    this.columnFamilies.put(columnFamily, columns);
+  }
+  
+  /**
+   * Creates a HBase {@link Put} from a Storm {@link Tuple}
+   * 
+   * @param tuple
+   *          The {@link Tuple}
+   * @return {@link Put}
+   */
+  public Put getPutFromTuple(final Tuple tuple) throws IOException{
+    byte[] rowKey = null;
+    try {
+      rowKey = Bytes.toBytes(tuple.getStringByField(tupleRowKeyField));
+    }
+    catch(IllegalArgumentException iae) {
+      throw new IOException("Unable to retrieve " + tupleRowKeyField + " from " + tuple + " [ " + Joiner.on(',').join(tuple.getFields()) + " ]", iae);
+    }
+    
+    long ts = 0;
+    if (!tupleTimestampField.equals("")) {
+      ts = tuple.getLongByField(tupleTimestampField);
+    }
+    
+    Put p = new Put(rowKey);
+    
+    p.setDurability(durability);
+    
+    if (columnFamilies.size() > 0) {
+      for (String cf : columnFamilies.keySet()) {
+        byte[] cfBytes = Bytes.toBytes(cf);
+        for (String cq : columnFamilies.get(cf)) {
+          byte[] cqBytes = Bytes.toBytes(cq);
+          byte[] val = tuple.getBinaryByField(cq);
+          
+          if (ts > 0) {
+            p.add(cfBytes, cqBytes, ts, val);
+          } else {
+            p.add(cfBytes, cqBytes, val);
+          }
+        }
+      }
+    }
+    
+    return p;
+  }
+  
+  /**
+   * Creates a HBase {@link Increment} from a Storm {@link Tuple}
+   * 
+   * @param tuple
+   *          The {@link Tuple}
+   * @param increment
+   *          The amount to increment the counter by
+   * @return {@link Increment}
+   */
+  public Increment getIncrementFromTuple(final Tuple tuple, final long increment) {
+    byte[] rowKey = Bytes.toBytes(tuple.getStringByField(tupleRowKeyField));
+    
+    Increment inc = new Increment(rowKey);
+    inc.setDurability(durability);
+    
+    if (columnFamilies.size() > 0) {
+      for (String cf : columnFamilies.keySet()) {
+        byte[] cfBytes = Bytes.toBytes(cf);
+        for (String cq : columnFamilies.get(cf)) {
+          byte[] val;
+          try {
+            val = Bytes.toBytes(tuple.getStringByField(cq));
+          } catch (IllegalArgumentException ex) {
+            // if cq isn't a tuple field, use cq for counter instead of tuple
+            // value
+            val = Bytes.toBytes(cq);
+          }
+          inc.addColumn(cfBytes, val, increment);
+        }
+      }
+    }
+    
+    return inc;
+  }
+  
+  /**
+   * Increment the counter for the given family and column by the specified
+   * amount
+   * <p>
+   * If the family and column already exist in the Increment the counter value
+   * is incremented by the specified amount rather than overridden, as it is in
+   * HBase's {@link Increment#addColumn(byte[], byte[], long)} method
+   * 
+   * @param inc
+   *          The {@link Increment} to update
+   * @param family
+   *          The column family
+   * @param qualifier
+   *          The column qualifier
+   * @param amount
+   *          The amount to increment the counter by
+   */
+  public static void addIncrement(Increment inc, final byte[] family, final byte[] qualifier, final Long amount) {
+    
+    NavigableMap<byte[], Long> set = inc.getFamilyMapOfLongs().get(family);
+    if (set == null) {
+      set = new TreeMap<byte[], Long>(Bytes.BYTES_COMPARATOR);
+    }
+    
+    // If qualifier exists, increment amount
+    Long counter = set.get(qualifier);
+    if (counter == null) {
+      counter = 0L;
+    }
+    set.put(qualifier, amount + counter);
+    
+    inc.getFamilyMapOfLongs().put(family, set);
+  }
+  
+
+
+  /**
+   * @param durability
+   *          Sets whether to write to HBase's edit log.
+   *          <p>
+   *          Setting to false will mean fewer operations to perform when
+   *          writing to HBase and hence better performance, but changes that
+   *          haven't been flushed to a store file will be lost in the event of
+   *          HBase failure
+   *          <p>
+   *          Enabled by default
+   */
+  public void setDurability(Durability durability) {
+    this.durability = durability;
+  }
+  
+  
+  public Durability getDurability() {
+    return  durability;
+  }
+  
+
+
+  /**
+   * @return the tupleRowKeyField
+   */
+  public String getTupleRowKeyField() {
+    return tupleRowKeyField;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/writer/HBaseWriter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/writer/HBaseWriter.java b/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/writer/HBaseWriter.java
new file mode 100644
index 0000000..1fd69b3
--- /dev/null
+++ b/metron-platform/metron-hbase/src/main/java/org/apache/metron/hbase/writer/HBaseWriter.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.hbase.writer;
+
+import backtype.storm.tuple.Tuple;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.metron.common.configuration.Configurations;
+import org.apache.metron.hbase.HTableProvider;
+import org.apache.metron.hbase.TableProvider;
+import org.apache.metron.common.utils.ReflectionUtils;
+import org.apache.metron.common.interfaces.MessageWriter;
+import org.json.simple.JSONObject;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.Map;
+
+public abstract class HBaseWriter implements MessageWriter<JSONObject>, Serializable {
+
+  private String tableName;
+  private String connectorImpl;
+  private TableProvider provider;
+  private HTableInterface table;
+
+  public HBaseWriter(String tableName) {
+    this.tableName = tableName;
+  }
+
+  public HBaseWriter withProviderImpl(String connectorImpl) {
+    this.connectorImpl = connectorImpl;
+    return this;
+  }
+
+  @Override
+  public void init() {
+    final Configuration config = HBaseConfiguration.create();
+    try {
+      provider = ReflectionUtils.createInstance(connectorImpl, new HTableProvider());
+      table = provider.getTable(config, tableName);
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+  }
+
+  @Override
+  public void write(String sourceType, Configurations configurations, Tuple tuple, JSONObject message) throws Exception {
+    Put put = new Put(getKey(tuple, message));
+    Map<String, byte[]> values = getValues(tuple, message);
+    for(String column: values.keySet()) {
+      String[] columnParts = column.split(":");
+      long timestamp = getTimestamp(tuple, message);
+      if (timestamp > -1) {
+        put.addColumn(Bytes.toBytes(columnParts[0]), Bytes.toBytes(columnParts[1]), timestamp, values.get(column));
+      } else {
+        put.addColumn(Bytes.toBytes(columnParts[0]), Bytes.toBytes(columnParts[1]), values.get(column));
+      }
+    }
+    table.put(put);
+  }
+
+  @Override
+  public void close() throws Exception {
+    table.close();
+  }
+
+  public abstract byte[] getKey(Tuple tuple, JSONObject message);
+  public abstract long getTimestamp(Tuple tuple, JSONObject message);
+  public abstract Map<String, byte[]> getValues(Tuple tuple, JSONObject message);
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/pom.xml b/metron-platform/metron-integration-test/pom.xml
new file mode 100644
index 0000000..b3c3f09
--- /dev/null
+++ b/metron-platform/metron-integration-test/pom.xml
@@ -0,0 +1,121 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 
+  Licensed to the Apache Software 
+  Foundation (ASF) under one or more contributor license agreements. See the 
+  NOTICE file distributed with this work for additional information regarding 
+  copyright ownership. The ASF licenses this file to You under the Apache License, 
+  Version 2.0 (the "License"); you may not use this file except in compliance 
+  with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 
+  Unless required by applicable law or agreed to in writing, software distributed 
+  under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES 
+  OR CONDITIONS OF ANY KIND, either express or implied. See the License for 
+  the specific language governing permissions and limitations under the License. 
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.metron</groupId>
+    <artifactId>metron-platform</artifactId>
+    <version>0.1BETA</version>
+  </parent>
+  <artifactId>metron-integration-test</artifactId>
+  <description>Metron Integration Test</description>
+  <properties>
+  </properties>
+  <dependencies>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+      <version>1.2.17</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>${global_junit_version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.storm</groupId>
+      <artifactId>flux-core</artifactId>
+      <version>${global_flux_version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.storm</groupId>
+      <artifactId>storm-core</artifactId>
+      <version>${global_storm_version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+      <version>${global_hbase_guava_version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-client</artifactId>
+      <version>${global_hbase_version}</version>
+      <exclusions>
+        <exclusion>
+          <artifactId>log4j</artifactId>
+          <groupId>log4j</groupId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client</artifactId>
+      <version>${global_hadoop_version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.slf4j</groupId>
+          <artifactId>slf4j-log4j12</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kafka</groupId>
+      <artifactId>kafka_2.9.2</artifactId>
+      <version>${global_kafka_version}</version>
+      <classifier>test</classifier>
+      <exclusions>
+        <exclusion>
+          <groupId>log4j</groupId>
+          <artifactId>log4j</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kafka</groupId>
+      <artifactId>kafka_2.9.2</artifactId>
+      <version>${global_kafka_version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>log4j</groupId>
+          <artifactId>log4j</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.metron</groupId>
+      <artifactId>metron-enrichment</artifactId>
+      <version>0.1BETA</version>
+    </dependency>
+    <!--dependency>
+      <groupId>org.apache.metron</groupId>
+      <artifactId>metron-parsers</artifactId>
+      <version>0.1BETA</version>
+    </dependency-->
+    <dependency>
+      <groupId>org.apache.metron</groupId>
+      <artifactId>metron-test-utilities</artifactId>
+      <version>0.1BETA</version>
+    </dependency>
+  </dependencies>
+
+  <build>
+  </build>
+  <reporting>
+  </reporting>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/BaseIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/BaseIntegrationTest.java b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/BaseIntegrationTest.java
new file mode 100644
index 0000000..3f21c0d
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/BaseIntegrationTest.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.integration;
+
+import com.google.common.base.Function;
+import org.apache.metron.TestConstants;
+import org.apache.metron.integration.components.KafkaWithZKComponent;
+import org.apache.metron.common.cli.ConfigurationsUtils;
+
+import javax.annotation.Nullable;
+import java.util.List;
+import java.util.Properties;
+
+public abstract class BaseIntegrationTest {
+
+  protected KafkaWithZKComponent getKafkaComponent(final Properties topologyProperties, List<KafkaWithZKComponent.Topic> topics) {
+    return new KafkaWithZKComponent().withTopics(topics)
+            .withPostStartCallback(new Function<KafkaWithZKComponent, Void>() {
+              @Nullable
+              @Override
+              public Void apply(@Nullable KafkaWithZKComponent kafkaWithZKComponent) {
+                topologyProperties.setProperty("kafka.zk", kafkaWithZKComponent.getZookeeperConnect());
+                try {
+                  ConfigurationsUtils.uploadConfigsToZookeeper(TestConstants.SAMPLE_CONFIG_PATH, kafkaWithZKComponent.getZookeeperConnect());
+                } catch (Exception e) {
+                  throw new IllegalStateException(e);
+                }
+                return null;
+              }
+            });
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/ComponentRunner.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/ComponentRunner.java b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/ComponentRunner.java
new file mode 100644
index 0000000..c938741
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/ComponentRunner.java
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.integration;
+
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+public class ComponentRunner {
+    public static class Builder {
+        LinkedHashMap<String, InMemoryComponent> components;
+        String[] startupOrder;
+        String[] shutdownOrder;
+        long timeBetweenAttempts = 1000;
+        int numRetries = 5;
+        long maxTimeMS = 120000;
+        public Builder() {
+            components = new LinkedHashMap<String, InMemoryComponent>();
+        }
+
+        public Builder withNumRetries(int numRetries) {
+            this.numRetries = numRetries;
+            return this;
+        }
+
+        public Builder withMaxTimeMS(long maxTimeMS) {
+            this.maxTimeMS = maxTimeMS;
+            return this;
+        }
+
+        public Builder withComponent(String name, InMemoryComponent component) {
+            components.put(name, component);
+            return this;
+        }
+
+        public Builder withCustomStartupOrder(String[] startupOrder) {
+            this.startupOrder = startupOrder;
+            return this;
+        }
+        public Builder withCustomShutdownOrder(String[] shutdownOrder) {
+            this.shutdownOrder = shutdownOrder;
+            return this;
+        }
+        public Builder withMillisecondsBetweenAttempts(long timeBetweenAttempts) {
+            this.timeBetweenAttempts = timeBetweenAttempts;
+            return this;
+        }
+        private static String[] toOrderedList(Map<String, InMemoryComponent> components) {
+            String[] ret = new String[components.size()];
+            int i = 0;
+            for(String component : components.keySet()) {
+                ret[i++] = component;
+            }
+            return ret;
+        }
+        public ComponentRunner build() {
+            if(shutdownOrder == null) {
+                shutdownOrder = toOrderedList(components);
+            }
+            if(startupOrder == null) {
+                startupOrder = toOrderedList(components);
+            }
+            return new ComponentRunner(components, startupOrder, shutdownOrder, timeBetweenAttempts, numRetries, maxTimeMS);
+        }
+
+    }
+
+    LinkedHashMap<String, InMemoryComponent> components;
+    String[] startupOrder;
+    String[] shutdownOrder;
+    long timeBetweenAttempts;
+    int numRetries;
+    long maxTimeMS;
+    public ComponentRunner( LinkedHashMap<String, InMemoryComponent> components
+                          , String[] startupOrder
+                          , String[] shutdownOrder
+                          , long timeBetweenAttempts
+                          , int numRetries
+                          , long maxTimeMS
+                          )
+    {
+        this.components = components;
+        this.startupOrder = startupOrder;
+        this.shutdownOrder = shutdownOrder;
+        this.timeBetweenAttempts = timeBetweenAttempts;
+        this.numRetries = numRetries;
+        this.maxTimeMS = maxTimeMS;
+    }
+
+    public <T extends InMemoryComponent> T getComponent(String name, Class<T> clazz) {
+        return clazz.cast(getComponents().get(name));
+    }
+
+    public LinkedHashMap<String, InMemoryComponent> getComponents() {
+        return components;
+    }
+
+    public void start() throws UnableToStartException {
+        for(String componentName : startupOrder) {
+            components.get(componentName).start();
+        }
+    }
+    public void stop() {
+        for(String componentName : shutdownOrder) {
+            components.get(componentName).stop();
+        }
+    }
+
+
+    public <T> T process(Processor<T> successState) {
+        int retryCount = 0;
+        long start = System.currentTimeMillis();
+        while(true) {
+            long duration = System.currentTimeMillis() - start;
+            if(duration > maxTimeMS) {
+                throw new RuntimeException("Took too long to complete: " + duration + " > " + maxTimeMS);
+            }
+            ReadinessState state = successState.process(this);
+            if(state == ReadinessState.READY) {
+                return successState.getResult();
+            }
+            else if(state == ReadinessState.NOT_READY) {
+                retryCount++;
+                if(numRetries > 0 && retryCount > numRetries) {
+                    throw new RuntimeException("Too many retries: " + retryCount);
+                }
+            }
+            try {
+                Thread.sleep(timeBetweenAttempts);
+            } catch (InterruptedException e) {
+                throw new RuntimeException("Unable to sleep", e);
+            }
+        }
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/EnrichmentIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/EnrichmentIntegrationTest.java b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/EnrichmentIntegrationTest.java
new file mode 100644
index 0000000..21019c3
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/EnrichmentIntegrationTest.java
@@ -0,0 +1,468 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.integration;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.google.common.base.*;
+
+import com.google.common.collect.Iterables;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.metron.common.Constants;
+import org.apache.metron.TestConstants;
+import org.apache.metron.common.configuration.Configurations;
+import org.apache.metron.hbase.TableProvider;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.enrichment.converter.EnrichmentHelper;
+import org.apache.metron.integration.utils.TestUtils;
+import org.apache.metron.test.utils.UnitTestHelper;
+import org.apache.metron.integration.components.FluxTopologyComponent;
+import org.apache.metron.integration.components.KafkaWithZKComponent;
+import org.apache.metron.integration.mock.MockGeoAdapter;
+import org.apache.metron.test.mock.MockHTable;
+import org.apache.metron.enrichment.lookup.LookupKV;
+
+import org.apache.metron.integration.utils.SampleUtil;
+import org.apache.metron.common.utils.JSONUtils;
+import org.junit.Assert;
+import org.junit.Test;
+
+import javax.annotation.Nullable;
+import java.io.File;
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.Stack;
+
+public abstract class EnrichmentIntegrationTest extends BaseIntegrationTest {
+  private static final String SRC_IP = "ip_src_addr";
+  private static final String DST_IP = "ip_dst_addr";
+  private static final String MALICIOUS_IP_TYPE = "malicious_ip";
+  private static final String PLAYFUL_CLASSIFICATION_TYPE = "playful_classification";
+  private static final Map<String, String> PLAYFUL_ENRICHMENT = new HashMap<String, String>() {{
+    put("orientation", "north");
+  }};
+  private String fluxPath = "../metron-enrichment/src/main/flux/enrichment/test.yaml";
+  protected String hdfsDir = "target/enrichmentIntegrationTest/hdfs";
+  private String sampleParsedPath = TestConstants.SAMPLE_DATA_PARSED_PATH + "YafExampleParsed";
+  private String sampleIndexedPath = TestConstants.SAMPLE_DATA_INDEXED_PATH + "YafIndexed";
+
+
+  public static class Provider implements TableProvider, Serializable {
+    MockHTable.Provider  provider = new MockHTable.Provider();
+    @Override
+    public HTableInterface getTable(Configuration config, String tableName) throws IOException {
+      return provider.getTable(config, tableName);
+    }
+  }
+
+  public static void cleanHdfsDir(String hdfsDirStr) {
+    File hdfsDir = new File(hdfsDirStr);
+    Stack<File> fs = new Stack<>();
+    if(hdfsDir.exists()) {
+      fs.push(hdfsDir);
+      while(!fs.empty()) {
+        File f = fs.pop();
+        if (f.isDirectory()) {
+          for(File child : f.listFiles()) {
+            fs.push(child);
+          }
+        }
+        else {
+          if (f.getName().startsWith("enrichment") || f.getName().endsWith(".json")) {
+            f.delete();
+          }
+        }
+      }
+    }
+  }
+
+  public static List<Map<String, Object> > readDocsFromDisk(String hdfsDirStr) throws IOException {
+    List<Map<String, Object>> ret = new ArrayList<>();
+    File hdfsDir = new File(hdfsDirStr);
+    Stack<File> fs = new Stack<>();
+    if(hdfsDir.exists()) {
+      fs.push(hdfsDir);
+      while(!fs.empty()) {
+        File f = fs.pop();
+        if(f.isDirectory()) {
+          for (File child : f.listFiles()) {
+            fs.push(child);
+          }
+        }
+        else {
+          System.out.println("Processed " + f);
+          if (f.getName().startsWith("enrichment") || f.getName().endsWith(".json")) {
+            List<byte[]> data = TestUtils.readSampleData(f.getPath());
+            Iterables.addAll(ret, Iterables.transform(data, new Function<byte[], Map<String, Object>>() {
+              @Nullable
+              @Override
+              public Map<String, Object> apply(@Nullable byte[] bytes) {
+                String s = new String(bytes);
+                try {
+                  return JSONUtils.INSTANCE.load(s, new TypeReference<Map<String, Object>>() {
+                  });
+                } catch (IOException e) {
+                  throw new RuntimeException(e);
+                }
+              }
+            }));
+          }
+        }
+      }
+    }
+    return ret;
+  }
+
+
+  @Test
+  public void test() throws Exception {
+    cleanHdfsDir(hdfsDir);
+    final Configurations configurations = SampleUtil.getSampleConfigs();
+    final String dateFormat = "yyyy.MM.dd.HH";
+    final List<byte[]> inputMessages = TestUtils.readSampleData(sampleParsedPath);
+    final String cf = "cf";
+    final String trackerHBaseTableName = "tracker";
+    final String threatIntelTableName = "threat_intel";
+    final String enrichmentsTableName = "enrichments";
+    final Properties topologyProperties = new Properties() {{
+      setProperty("org.apache.metron.enrichment.host.known_hosts", "[{\"ip\":\"10.1.128.236\", \"local\":\"YES\", \"type\":\"webserver\", \"asset_value\" : \"important\"},\n" +
+              "{\"ip\":\"10.1.128.237\", \"local\":\"UNKNOWN\", \"type\":\"unknown\", \"asset_value\" : \"important\"},\n" +
+              "{\"ip\":\"10.60.10.254\", \"local\":\"YES\", \"type\":\"printer\", \"asset_value\" : \"important\"},\n" +
+              "{\"ip\":\"10.0.2.15\", \"local\":\"YES\", \"type\":\"printer\", \"asset_value\" : \"important\"}]");
+      setProperty("hbase.provider.impl","" + Provider.class.getName());
+      setProperty("threat.intel.tracker.table", trackerHBaseTableName);
+      setProperty("threat.intel.tracker.cf", cf);
+      setProperty("threat.intel.simple.hbase.table", threatIntelTableName);
+      setProperty("threat.intel.simple.hbase.cf", cf);
+      setProperty("enrichment.simple.hbase.table", enrichmentsTableName);
+      setProperty("enrichment.simple.hbase.cf", cf);
+      setProperty("es.clustername", "metron");
+      setProperty("es.port", "9300");
+      setProperty("es.ip", "localhost");
+      setProperty("index.date.format", dateFormat);
+      setProperty("index.hdfs.output", hdfsDir);
+    }};
+    setAdditionalProperties(topologyProperties);
+    final KafkaWithZKComponent kafkaComponent = getKafkaComponent(topologyProperties, new ArrayList<KafkaWithZKComponent.Topic>() {{
+      add(new KafkaWithZKComponent.Topic(Constants.ENRICHMENT_TOPIC, 1));
+    }});
+
+    //create MockHBaseTables
+    final MockHTable trackerTable = (MockHTable)MockHTable.Provider.addToCache(trackerHBaseTableName, cf);
+    final MockHTable threatIntelTable = (MockHTable)MockHTable.Provider.addToCache(threatIntelTableName, cf);
+    EnrichmentHelper.INSTANCE.load(threatIntelTable, cf, new ArrayList<LookupKV<EnrichmentKey, EnrichmentValue>>(){{
+      add(new LookupKV<>(new EnrichmentKey(MALICIOUS_IP_TYPE, "10.0.2.3"), new EnrichmentValue(new HashMap<String, String>())));
+    }});
+    final MockHTable enrichmentTable = (MockHTable)MockHTable.Provider.addToCache(enrichmentsTableName, cf);
+    EnrichmentHelper.INSTANCE.load(enrichmentTable, cf, new ArrayList<LookupKV<EnrichmentKey, EnrichmentValue>>(){{
+      add(new LookupKV<>(new EnrichmentKey(PLAYFUL_CLASSIFICATION_TYPE, "10.0.2.3")
+                        , new EnrichmentValue(PLAYFUL_ENRICHMENT )
+                        )
+         );
+    }});
+    FluxTopologyComponent fluxComponent = new FluxTopologyComponent.Builder()
+            .withTopologyLocation(new File(fluxPath))
+            .withTopologyName("test")
+            .withTopologyProperties(topologyProperties)
+            .build();
+
+    InMemoryComponent searchComponent = getSearchComponent(topologyProperties);
+
+    UnitTestHelper.verboseLogging();
+    ComponentRunner runner = new ComponentRunner.Builder()
+            .withComponent("kafka", kafkaComponent)
+            .withComponent("search", searchComponent)
+            .withComponent("storm", fluxComponent)
+            .withMillisecondsBetweenAttempts(10000)
+            .withNumRetries(10)
+            .build();
+    runner.start();
+
+    try {
+      fluxComponent.submitTopology();
+
+      kafkaComponent.writeMessages(Constants.ENRICHMENT_TOPIC, inputMessages);
+      List<Map<String, Object>> docs = runner.process(getProcessor(inputMessages));
+      Assert.assertEquals(inputMessages.size(), docs.size());
+      List<Map<String, Object>> cleanedDocs = cleanDocs(docs);
+      validateAll(cleanedDocs);
+
+
+      List<Map<String, Object>> docsFromDisk = readDocsFromDisk(hdfsDir);
+      Assert.assertEquals(docsFromDisk.size(), docs.size()) ;
+      Assert.assertEquals(new File(hdfsDir).list().length, 1);
+      Assert.assertEquals(new File(hdfsDir).list()[0], "yaf");
+      validateAll(docsFromDisk);
+    }
+    finally {
+      cleanHdfsDir(hdfsDir);
+      runner.stop();
+    }
+  }
+
+  public List<Map<String, Object>> cleanDocs(List<Map<String, Object>> docs) {
+    List<Map<String, Object>> cleanedDocs = new ArrayList<>();
+    for(Map<String, Object> doc: docs) {
+      Map<String, Object> cleanedFields = new HashMap<>();
+      for(String field: doc.keySet()) {
+        cleanedFields.put(cleanField(field), doc.get(field));
+      }
+      cleanedDocs.add(cleanedFields);
+    }
+    return cleanedDocs;
+  }
+
+  public static void validateAll(List<Map<String, Object>> docs) {
+    for (Map<String, Object> doc : docs) {
+      baseValidation(doc);
+      hostEnrichmentValidation(doc);
+      geoEnrichmentValidation(doc);
+      threatIntelValidation(doc);
+      simpleEnrichmentValidation(doc);
+    }
+  }
+
+  public static void baseValidation(Map<String, Object> jsonDoc) {
+    assertEnrichmentsExists("threatintels.", setOf("hbaseThreatIntel"), jsonDoc.keySet());
+    assertEnrichmentsExists("enrichments.", setOf("geo", "host", "hbaseEnrichment" ), jsonDoc.keySet());
+    for(Map.Entry<String, Object> kv : jsonDoc.entrySet()) {
+      //ensure no values are empty.
+      Assert.assertTrue(kv.getValue().toString().length() > 0);
+    }
+    //ensure we always have a source ip and destination ip
+    Assert.assertNotNull(jsonDoc.get(SRC_IP));
+    Assert.assertNotNull(jsonDoc.get(DST_IP));
+  }
+
+  private static class EvaluationPayload {
+    Map<String, Object> indexedDoc;
+    String key;
+    public EvaluationPayload(Map<String, Object> indexedDoc, String key) {
+      this.indexedDoc = indexedDoc;
+      this.key = key;
+    }
+  }
+
+  private static enum HostEnrichments implements Predicate<EvaluationPayload>{
+    LOCAL_LOCATION(new Predicate<EvaluationPayload>() {
+
+      @Override
+      public boolean apply(@Nullable EvaluationPayload evaluationPayload) {
+        return evaluationPayload.indexedDoc.get("enrichments.host." + evaluationPayload.key + ".known_info.local").equals("YES");
+      }
+    })
+    ,UNKNOWN_LOCATION(new Predicate<EvaluationPayload>() {
+
+      @Override
+      public boolean apply(@Nullable EvaluationPayload evaluationPayload) {
+        return evaluationPayload.indexedDoc.get("enrichments.host." + evaluationPayload.key + ".known_info.local").equals("UNKNOWN");
+      }
+    })
+    ,IMPORTANT(new Predicate<EvaluationPayload>() {
+      @Override
+      public boolean apply(@Nullable EvaluationPayload evaluationPayload) {
+        return evaluationPayload.indexedDoc.get("enrichments.host." + evaluationPayload.key + ".known_info.asset_value").equals("important");
+      }
+    })
+    ,PRINTER_TYPE(new Predicate<EvaluationPayload>() {
+      @Override
+      public boolean apply(@Nullable EvaluationPayload evaluationPayload) {
+        return evaluationPayload.indexedDoc.get("enrichments.host." + evaluationPayload.key + ".known_info.type").equals("printer");
+      }
+    })
+    ,WEBSERVER_TYPE(new Predicate<EvaluationPayload>() {
+      @Override
+      public boolean apply(@Nullable EvaluationPayload evaluationPayload) {
+        return evaluationPayload.indexedDoc.get("enrichments.host." + evaluationPayload.key + ".known_info.type").equals("webserver");
+      }
+    })
+    ,UNKNOWN_TYPE(new Predicate<EvaluationPayload>() {
+      @Override
+      public boolean apply(@Nullable EvaluationPayload evaluationPayload) {
+        return evaluationPayload.indexedDoc.get("enrichments.host." + evaluationPayload.key + ".known_info.type").equals("unknown");
+      }
+    })
+    ;
+
+    Predicate<EvaluationPayload> _predicate;
+    HostEnrichments(Predicate<EvaluationPayload> predicate) {
+      this._predicate = predicate;
+    }
+
+    public boolean apply(EvaluationPayload payload) {
+      return _predicate.apply(payload);
+    }
+
+  }
+
+  private static void assertEnrichmentsExists(String topLevel, Set<String> expectedEnrichments, Set<String> keys) {
+    for(String key : keys) {
+      if(key.startsWith(topLevel)) {
+        String secondLevel = Iterables.get(Splitter.on(".").split(key), 1);
+        String message = "Found an enrichment/threat intel (" + secondLevel + ") that I didn't expect (expected enrichments :"
+                       + Joiner.on(",").join(expectedEnrichments) + "), but it was not there.  If you've created a new"
+                       + " enrichment, then please add a validation method to this unit test.  Otherwise, it's a solid error"
+                       + " and should be investigated.";
+        Assert.assertTrue( message, expectedEnrichments.contains(secondLevel));
+      }
+    }
+  }
+  private static void simpleEnrichmentValidation(Map<String, Object> indexedDoc) {
+    if(indexedDoc.get(SRC_IP).equals("10.0.2.3")
+            || indexedDoc.get(DST_IP).equals("10.0.2.3")
+            ) {
+      Assert.assertTrue(keyPatternExists("enrichments.hbaseEnrichment", indexedDoc));
+      if(indexedDoc.get(SRC_IP).equals("10.0.2.3")) {
+        Assert.assertEquals(indexedDoc.get("enrichments.hbaseEnrichment." + SRC_IP + "." + PLAYFUL_CLASSIFICATION_TYPE+ ".orientation")
+                , PLAYFUL_ENRICHMENT.get("orientation")
+        );
+      }
+      else if(indexedDoc.get(DST_IP).equals("10.0.2.3")) {
+        Assert.assertEquals( indexedDoc.get("enrichments.hbaseEnrichment." + DST_IP + "." + PLAYFUL_CLASSIFICATION_TYPE + ".orientation")
+                , PLAYFUL_ENRICHMENT.get("orientation")
+        );
+      }
+    }
+
+  }
+  private static void threatIntelValidation(Map<String, Object> indexedDoc) {
+    if(indexedDoc.get(SRC_IP).equals("10.0.2.3")
+    || indexedDoc.get(DST_IP).equals("10.0.2.3")
+            ) {
+      //if we have any threat intel messages, we want to tag is_alert to true
+      Assert.assertTrue(keyPatternExists("threatintels.", indexedDoc));
+      Assert.assertEquals(indexedDoc.get("is_alert"), "true");
+    }
+    else {
+      //For YAF this is the case, but if we do snort later on, this will be invalid.
+      Assert.assertNull(indexedDoc.get("is_alert"));
+      Assert.assertFalse(keyPatternExists("threatintels.", indexedDoc));
+    }
+    //ip threat intels
+    if(keyPatternExists("threatintels.hbaseThreatIntel.", indexedDoc)) {
+      if(indexedDoc.get(SRC_IP).equals("10.0.2.3")) {
+        Assert.assertEquals(indexedDoc.get("threatintels.hbaseThreatIntel." + SRC_IP + "." + MALICIOUS_IP_TYPE), "alert");
+      }
+      else if(indexedDoc.get(DST_IP).equals("10.0.2.3")) {
+        Assert.assertEquals(indexedDoc.get("threatintels.hbaseThreatIntel." + DST_IP + "." + MALICIOUS_IP_TYPE), "alert");
+      }
+      else {
+        Assert.fail("There was a threat intels that I did not expect: " + indexedDoc);
+      }
+    }
+
+  }
+
+  private static void geoEnrichmentValidation(Map<String, Object> indexedDoc) {
+    //should have geo enrichment on every message due to mock geo adapter
+    Assert.assertEquals(indexedDoc.get("enrichments.geo." + DST_IP + ".location_point"), MockGeoAdapter.DEFAULT_LOCATION_POINT);
+    Assert.assertEquals(indexedDoc.get("enrichments.geo." + SRC_IP +".location_point"), MockGeoAdapter.DEFAULT_LOCATION_POINT);
+    Assert.assertEquals(indexedDoc.get("enrichments.geo." + DST_IP + ".longitude"), MockGeoAdapter.DEFAULT_LONGITUDE);
+    Assert.assertEquals(indexedDoc.get("enrichments.geo." + SRC_IP + ".longitude"), MockGeoAdapter.DEFAULT_LONGITUDE);
+    Assert.assertEquals(indexedDoc.get("enrichments.geo." + DST_IP + ".city"), MockGeoAdapter.DEFAULT_CITY);
+    Assert.assertEquals(indexedDoc.get("enrichments.geo." + SRC_IP + ".city"), MockGeoAdapter.DEFAULT_CITY);
+    Assert.assertEquals(indexedDoc.get("enrichments.geo." + DST_IP + ".latitude"), MockGeoAdapter.DEFAULT_LATITUDE);
+    Assert.assertEquals(indexedDoc.get("enrichments.geo." + SRC_IP + ".latitude"), MockGeoAdapter.DEFAULT_LATITUDE);
+    Assert.assertEquals(indexedDoc.get("enrichments.geo." + DST_IP + ".country"), MockGeoAdapter.DEFAULT_COUNTRY);
+    Assert.assertEquals(indexedDoc.get("enrichments.geo." + SRC_IP + ".country"), MockGeoAdapter.DEFAULT_COUNTRY);
+    Assert.assertEquals(indexedDoc.get("enrichments.geo." + DST_IP + ".dmaCode"), MockGeoAdapter.DEFAULT_DMACODE);
+    Assert.assertEquals(indexedDoc.get("enrichments.geo." + SRC_IP + ".dmaCode"), MockGeoAdapter.DEFAULT_DMACODE);
+    Assert.assertEquals(indexedDoc.get("enrichments.geo." + DST_IP + ".postalCode"), MockGeoAdapter.DEFAULT_POSTAL_CODE);
+    Assert.assertEquals(indexedDoc.get("enrichments.geo." + SRC_IP + ".postalCode"), MockGeoAdapter.DEFAULT_POSTAL_CODE);
+  }
+
+  private static void hostEnrichmentValidation(Map<String, Object> indexedDoc) {
+    boolean enriched = false;
+    //important local printers
+    {
+      Set<String> ips = setOf("10.0.2.15", "10.60.10.254");
+      if (ips.contains(indexedDoc.get(SRC_IP))) {
+        //this is a local, important, printer
+        Assert.assertTrue(Predicates.and(HostEnrichments.LOCAL_LOCATION
+                ,HostEnrichments.IMPORTANT
+                ,HostEnrichments.PRINTER_TYPE
+                ).apply(new EvaluationPayload(indexedDoc, SRC_IP))
+        );
+        enriched = true;
+      }
+      if (ips.contains(indexedDoc.get(DST_IP))) {
+        Assert.assertTrue(Predicates.and(HostEnrichments.LOCAL_LOCATION
+                ,HostEnrichments.IMPORTANT
+                ,HostEnrichments.PRINTER_TYPE
+                ).apply(new EvaluationPayload(indexedDoc, DST_IP))
+        );
+        enriched = true;
+      }
+    }
+    //important local webservers
+    {
+      Set<String> ips = setOf("10.1.128.236");
+      if (ips.contains(indexedDoc.get(SRC_IP))) {
+        //this is a local, important, printer
+        Assert.assertTrue(Predicates.and(HostEnrichments.LOCAL_LOCATION
+                ,HostEnrichments.IMPORTANT
+                ,HostEnrichments.WEBSERVER_TYPE
+                ).apply(new EvaluationPayload(indexedDoc, SRC_IP))
+        );
+        enriched = true;
+      }
+      if (ips.contains(indexedDoc.get(DST_IP))) {
+        Assert.assertTrue(Predicates.and(HostEnrichments.LOCAL_LOCATION
+                ,HostEnrichments.IMPORTANT
+                ,HostEnrichments.WEBSERVER_TYPE
+                ).apply(new EvaluationPayload(indexedDoc, DST_IP))
+        );
+        enriched = true;
+      }
+    }
+    if(!enriched) {
+      Assert.assertFalse(keyPatternExists("enrichments.host", indexedDoc));
+    }
+  }
+
+
+  private static boolean keyPatternExists(String pattern, Map<String, Object> indexedObj) {
+    for(String k : indexedObj.keySet()) {
+      if(k.startsWith(pattern)) {
+        return true;
+      }
+    }
+    return false;
+  }
+  private static Set<String> setOf(String... items) {
+    Set<String> ret = new HashSet<>();
+    for(String item : items) {
+      ret.add(item);
+    }
+    return ret;
+  }
+
+  abstract public InMemoryComponent getSearchComponent(Properties topologyProperties) throws Exception;
+  abstract public Processor<List<Map<String, Object>>> getProcessor(List<byte[]> inputMessages);
+  abstract public void setAdditionalProperties(Properties topologyProperties);
+  abstract public String cleanField(String field);
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/InMemoryComponent.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/InMemoryComponent.java b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/InMemoryComponent.java
new file mode 100644
index 0000000..8a9ee96
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/InMemoryComponent.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.integration;
+
+public interface InMemoryComponent {
+    public void start() throws UnableToStartException;
+    public void stop();
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/Processor.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/Processor.java b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/Processor.java
new file mode 100644
index 0000000..bbcfb73
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/Processor.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.integration;
+
+public interface Processor<T> {
+    ReadinessState process(ComponentRunner runner);
+    T getResult();
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/ReadinessState.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/ReadinessState.java b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/ReadinessState.java
new file mode 100644
index 0000000..5cdfbb4
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/ReadinessState.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.integration;
+
+public enum ReadinessState {
+    READY, NOT_READY;
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/UnableToStartException.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/UnableToStartException.java b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/UnableToStartException.java
new file mode 100644
index 0000000..0fcda14
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/UnableToStartException.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.integration;
+
+public class UnableToStartException extends Exception {
+    public UnableToStartException(String message) {
+        super(message);
+    }
+    public UnableToStartException(String message, Throwable t) {
+        super(message, t);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/components/FluxTopologyComponent.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/components/FluxTopologyComponent.java b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/components/FluxTopologyComponent.java
new file mode 100644
index 0000000..3bb0c56
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/components/FluxTopologyComponent.java
@@ -0,0 +1,132 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.integration.components;
+
+import backtype.storm.Config;
+import backtype.storm.LocalCluster;
+import backtype.storm.generated.StormTopology;
+import org.apache.metron.integration.InMemoryComponent;
+import org.apache.metron.integration.UnableToStartException;
+import org.apache.storm.flux.FluxBuilder;
+import org.apache.storm.flux.model.ExecutionContext;
+import org.apache.storm.flux.model.TopologyDef;
+import org.apache.storm.flux.parser.FluxParser;
+import org.apache.thrift7.TException;
+import org.junit.Assert;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.lang.reflect.InvocationTargetException;
+import java.util.Properties;
+
+public class FluxTopologyComponent implements InMemoryComponent {
+    LocalCluster stormCluster;
+    String topologyName;
+    File topologyLocation;
+    Properties topologyProperties;
+
+    public static class Builder {
+        String topologyName;
+        File topologyLocation;
+        Properties topologyProperties;
+        public Builder withTopologyName(String name) {
+            this.topologyName = name;
+            return this;
+        }
+        public Builder withTopologyLocation(File location) {
+            this.topologyLocation = location;
+            return this;
+        }
+        public Builder withTopologyProperties(Properties properties) {
+            this.topologyProperties = properties;
+            return this;
+        }
+
+        public FluxTopologyComponent build() {
+            return new FluxTopologyComponent(topologyName, topologyLocation, topologyProperties);
+        }
+    }
+
+    public FluxTopologyComponent(String topologyName, File topologyLocation, Properties topologyProperties) {
+        this.topologyName = topologyName;
+        this.topologyLocation = topologyLocation;
+        this.topologyProperties = topologyProperties;
+    }
+
+    public LocalCluster getStormCluster() {
+        return stormCluster;
+    }
+
+    public String getTopologyName() {
+        return topologyName;
+    }
+
+    public File getTopologyLocation() {
+        return topologyLocation;
+    }
+
+    public Properties getTopologyProperties() {
+        return topologyProperties;
+    }
+
+    public void start() throws UnableToStartException{
+        try {
+            stormCluster = new LocalCluster();
+        } catch (Exception e) {
+            throw new UnableToStartException("Unable to start flux topology: " + getTopologyLocation(), e);
+        }
+    }
+
+    public void stop() {
+        stormCluster.shutdown();
+    }
+
+    public void submitTopology() throws NoSuchMethodException, IOException, InstantiationException, TException, IllegalAccessException, InvocationTargetException, ClassNotFoundException {
+        startTopology(getTopologyName(), getTopologyLocation(), getTopologyProperties());
+    }
+    private void startTopology(String topologyName, File topologyLoc, Properties properties) throws IOException, ClassNotFoundException, NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException, TException {
+        TopologyDef topologyDef = loadYaml(topologyName, topologyLoc, properties);
+        Config conf = FluxBuilder.buildConfig(topologyDef);
+        ExecutionContext context = new ExecutionContext(topologyDef, conf);
+        StormTopology topology = FluxBuilder.buildTopology(context);
+        Assert.assertNotNull(topology);
+        topology.validate();
+        stormCluster.submitTopology(topologyName, conf, topology);
+    }
+
+    private static TopologyDef loadYaml(String topologyName, File yamlFile, Properties properties) throws IOException {
+        File tmpFile = File.createTempFile(topologyName, "props");
+        tmpFile.deleteOnExit();
+        FileWriter propWriter = null;
+        try {
+            propWriter = new FileWriter(tmpFile);
+            properties.store(propWriter, topologyName + " properties");
+        }
+        finally {
+            if(propWriter != null) {
+                propWriter.close();
+                return FluxParser.parseFile(yamlFile.getAbsolutePath(), false, true, tmpFile.getAbsolutePath(), false);
+            }
+
+            return null;
+        }
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/components/KafkaWithZKComponent.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/components/KafkaWithZKComponent.java b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/components/KafkaWithZKComponent.java
new file mode 100644
index 0000000..fb7bcde
--- /dev/null
+++ b/metron-platform/metron-integration-test/src/main/java/org/apache/metron/integration/components/KafkaWithZKComponent.java
@@ -0,0 +1,225 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.integration.components;
+
+
+import com.google.common.base.Function;
+import kafka.admin.AdminUtils;
+import kafka.api.FetchRequest;
+import kafka.api.FetchRequestBuilder;
+import kafka.consumer.ConsumerConfig;
+import kafka.consumer.ConsumerIterator;
+import kafka.consumer.KafkaStream;
+import kafka.javaapi.FetchResponse;
+import kafka.javaapi.consumer.ConsumerConnector;
+import kafka.javaapi.consumer.SimpleConsumer;
+import kafka.message.MessageAndOffset;
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import kafka.server.KafkaConfig;
+import kafka.server.KafkaServer;
+import kafka.utils.*;
+import kafka.zk.EmbeddedZookeeper;
+import org.I0Itec.zkclient.ZkClient;
+import org.apache.metron.integration.InMemoryComponent;
+
+import java.nio.ByteBuffer;
+import java.util.*;
+
+
+public class KafkaWithZKComponent implements InMemoryComponent {
+
+
+  public static class Topic {
+    public int numPartitions;
+    public String name;
+
+    public Topic(String name, int numPartitions) {
+      this.numPartitions = numPartitions;
+      this.name = name;
+    }
+  }
+  private transient KafkaServer kafkaServer;
+  private transient EmbeddedZookeeper zkServer;
+  private transient ZkClient zkClient;
+  private transient ConsumerConnector consumer;
+  private String zookeeperConnectString;
+  private int brokerPort = 6667;
+  private List<Topic> topics = Collections.emptyList();
+  private Function<KafkaWithZKComponent, Void> postStartCallback;
+
+  public KafkaWithZKComponent withPostStartCallback(Function<KafkaWithZKComponent, Void> f) {
+    postStartCallback = f;
+    return this;
+  }
+
+  public KafkaWithZKComponent withExistingZookeeper(String zookeeperConnectString) {
+    this.zookeeperConnectString = zookeeperConnectString;
+    return this;
+  }
+
+  public KafkaWithZKComponent withBrokerPort(int brokerPort) {
+    if(brokerPort <= 0)
+    {
+      brokerPort = TestUtils.choosePort();
+    }
+    this.brokerPort = brokerPort;
+    return this;
+  }
+
+  public KafkaWithZKComponent withTopics(List<Topic> topics) {
+    this.topics = topics;
+    return this;
+  }
+
+  public List<Topic> getTopics() {
+    return topics;
+  }
+
+  public int getBrokerPort() {
+    return brokerPort;
+  }
+
+
+  public String getBrokerList()  {
+    return "localhost:" + brokerPort;
+  }
+
+  public KafkaProducer<String, byte[]> createProducer()
+  {
+    return createProducer(new HashMap<String, Object>());
+  }
+
+  public KafkaProducer<String, byte[]> createProducer(Map<String, Object> properties)
+  {
+    Map<String, Object> producerConfig = new HashMap<>();
+    producerConfig.put("bootstrap.servers", getBrokerList());
+    producerConfig.put("key.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
+    producerConfig.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
+    producerConfig.put("request.required.acks", "-1");
+    producerConfig.put("fetch.message.max.bytes", ""+ 1024*1024*10);
+    producerConfig.put("replica.fetch.max.bytes", "" + 1024*1024*10);
+    producerConfig.put("message.max.bytes", "" + 1024*1024*10);
+    producerConfig.put("message.send.max.retries", "10");
+    producerConfig.putAll(properties);
+    return new KafkaProducer<>(producerConfig);
+  }
+
+  @Override
+  public void start() {
+    // setup Zookeeper
+    if(zookeeperConnectString == null) {
+      String zkConnect = TestZKUtils.zookeeperConnect();
+      zkServer = new EmbeddedZookeeper(zkConnect);
+      zookeeperConnectString = zkServer.connectString();
+    }
+    zkClient = new ZkClient(zookeeperConnectString, 30000, 30000, ZKStringSerializer$.MODULE$);
+
+    // setup Broker
+    Properties props = TestUtils.createBrokerConfig(0, brokerPort, true);
+    KafkaConfig config = new KafkaConfig(props);
+    Time mock = new MockTime();
+    kafkaServer = TestUtils.createServer(config, mock);
+    for(Topic topic : getTopics()) {
+      try {
+        createTopic(topic.name, topic.numPartitions, true);
+      } catch (InterruptedException e) {
+        throw new RuntimeException("Unable to create topic", e);
+      }
+    }
+    postStartCallback.apply(this);
+  }
+
+  public String getZookeeperConnect() {
+    return zookeeperConnectString;
+  }
+
+  @Override
+  public void stop() {
+    kafkaServer.shutdown();
+    zkClient.close();
+    if(zkServer != null) {
+      zkServer.shutdown();
+    }
+
+  }
+
+  public List<byte[]> readMessages(String topic) {
+    SimpleConsumer consumer = new SimpleConsumer("localhost", 6667, 100000, 64 * 1024, "consumer");
+    FetchRequest req = new FetchRequestBuilder()
+            .clientId("consumer")
+            .addFetch(topic, 0, 0, 100000)
+            .build();
+    FetchResponse fetchResponse = consumer.fetch(req);
+    Iterator<MessageAndOffset> results = fetchResponse.messageSet(topic, 0).iterator();
+    List<byte[]> messages = new ArrayList<>();
+    while(results.hasNext()) {
+      ByteBuffer payload = results.next().message().payload();
+      byte[] bytes = new byte[payload.limit()];
+      payload.get(bytes);
+      messages.add(bytes);
+    }
+    return messages;
+  }
+
+  public ConsumerIterator<byte[], byte[]> getStreamIterator(String topic) {
+    return getStreamIterator(topic, "group0", "consumer0");
+  }
+  public ConsumerIterator<byte[], byte[]> getStreamIterator(String topic, String group, String consumerName) {
+    // setup simple consumer
+    Properties consumerProperties = TestUtils.createConsumerProperties(zkServer.connectString(), group, consumerName, -1);
+    consumer = kafka.consumer.Consumer.createJavaConsumerConnector(new ConsumerConfig(consumerProperties));
+    Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
+    topicCountMap.put(topic, 1);
+    Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap = consumer.createMessageStreams(topicCountMap);
+    KafkaStream<byte[], byte[]> stream = consumerMap.get(topic).get(0);
+    ConsumerIterator<byte[], byte[]> iterator = stream.iterator();
+    return iterator;
+  }
+
+  public void shutdownConsumer() {
+    consumer.shutdown();
+  }
+
+  public void createTopic(String name) throws InterruptedException {
+    createTopic(name, 1, true);
+  }
+
+  public void waitUntilMetadataIsPropagated(String topic, int numPartitions) {
+    List<KafkaServer> servers = new ArrayList<>();
+    servers.add(kafkaServer);
+    for(int part = 0;part < numPartitions;++part) {
+      TestUtils.waitUntilMetadataIsPropagated(scala.collection.JavaConversions.asScalaBuffer(servers), topic, part, 5000);
+    }
+  }
+
+  public void createTopic(String name, int numPartitions, boolean waitUntilMetadataIsPropagated) throws InterruptedException {
+    AdminUtils.createTopic(zkClient, name, numPartitions, 1, new Properties());
+    if(waitUntilMetadataIsPropagated) {
+      waitUntilMetadataIsPropagated(name, numPartitions);
+    }
+  }
+
+  public void writeMessages(String topic, Collection<byte[]> messages) {
+    KafkaProducer<String, byte[]> kafkaProducer = createProducer();
+    for(byte[] message: messages) {
+      kafkaProducer.send(new ProducerRecord<String, byte[]>(topic, message));
+    }
+    kafkaProducer.close();
+  }
+}
\ No newline at end of file



[02/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/PersistentAccessTracker.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/PersistentAccessTracker.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/PersistentAccessTracker.java
deleted file mode 100644
index 46eb010..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/accesstracker/PersistentAccessTracker.java
+++ /dev/null
@@ -1,206 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.reference.lookup.accesstracker;
-
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.log4j.Logger;
-import org.apache.metron.reference.lookup.LookupKey;
-
-import java.io.*;
-import java.util.Map;
-import java.util.Timer;
-import java.util.TimerTask;
-
-public class PersistentAccessTracker implements AccessTracker {
-    private static final Logger LOG = Logger.getLogger(PersistentAccessTracker.class);
-    private static final long serialVersionUID = 1L;
-
-    public static class AccessTrackerKey {
-        String name;
-        String containerName;
-        long timestamp;
-        public AccessTrackerKey(String name, String containerName, long timestamp) {
-            this.name = name;
-            this.containerName = containerName;
-            this.timestamp = timestamp;
-        }
-
-        public byte[] toRowKey() {
-            ByteArrayOutputStream os = new ByteArrayOutputStream();
-            DataOutputStream dos = new DataOutputStream(os);
-            try {
-                dos.writeUTF(name);
-                dos.writeLong(timestamp);
-                dos.writeUTF(containerName);
-                dos.flush();
-            } catch (IOException e) {
-                throw new RuntimeException("Unable to write rowkey: " + this, e);
-            }
-
-            return os.toByteArray();
-        }
-
-        public static byte[] getTimestampScanKey(String name, long timestamp) {
-            ByteArrayOutputStream os = new ByteArrayOutputStream();
-            DataOutputStream dos = new DataOutputStream(os);
-            try {
-                dos.writeUTF(name);
-                dos.writeLong(timestamp);
-            } catch (IOException e) {
-                throw new RuntimeException("Unable to create scan key " , e);
-            }
-
-            return os.toByteArray();
-        }
-
-        public static AccessTrackerKey fromRowKey(byte[] rowKey) {
-            ByteArrayInputStream is = new ByteArrayInputStream(rowKey);
-            DataInputStream dis = new DataInputStream(is);
-            try {
-                String name = dis.readUTF();
-                long timestamp = dis.readLong();
-                String containerName = dis.readUTF();
-                return new AccessTrackerKey(name, containerName, timestamp);
-            } catch (IOException e) {
-                throw new RuntimeException("Unable to read rowkey: ", e);
-            }
-        }
-    }
-
-    private static class Persister extends TimerTask {
-        PersistentAccessTracker tracker;
-        public Persister(PersistentAccessTracker tracker) {
-            this.tracker = tracker;
-        }
-        /**
-         * The action to be performed by this timer task.
-         */
-        @Override
-        public void run() {
-            tracker.persist(false);
-        }
-    }
-
-    Object sync = new Object();
-    HTableInterface accessTrackerTable;
-    String accessTrackerColumnFamily;
-    AccessTracker underlyingTracker;
-    long timestamp = System.currentTimeMillis();
-    String name;
-    String containerName;
-    private Timer timer;
-    long maxMillisecondsBetweenPersists;
-
-    public PersistentAccessTracker( String name
-                                  , String containerName
-                                  , HTableInterface accessTrackerTable
-                                  , String columnFamily
-                                  , AccessTracker underlyingTracker
-                                  , long maxMillisecondsBetweenPersists
-                                  )
-    {
-        this.containerName = containerName;
-        this.accessTrackerTable = accessTrackerTable;
-        this.name = name;
-        this.accessTrackerColumnFamily = columnFamily;
-        this.underlyingTracker = underlyingTracker;
-        this.maxMillisecondsBetweenPersists = maxMillisecondsBetweenPersists;
-        timer = new Timer();
-        if(maxMillisecondsBetweenPersists > 0) {
-            timer.scheduleAtFixedRate(new Persister(this), maxMillisecondsBetweenPersists, maxMillisecondsBetweenPersists);
-        }
-    }
-
-    public void persist(boolean force) {
-        synchronized(sync) {
-            if(force || (System.currentTimeMillis() - timestamp) >= maxMillisecondsBetweenPersists) {
-                //persist
-                try {
-                    AccessTrackerUtil.INSTANCE.persistTracker(accessTrackerTable, accessTrackerColumnFamily, new AccessTrackerKey(name, containerName, timestamp), underlyingTracker);
-                    timestamp = System.currentTimeMillis();
-                    reset();
-                } catch (IOException e) {
-                    LOG.error("Unable to persist access tracker.", e);
-                }
-            }
-        }
-    }
-
-    @Override
-    public void logAccess(LookupKey key) {
-        synchronized (sync) {
-            underlyingTracker.logAccess(key);
-            if (isFull()) {
-                persist(true);
-            }
-        }
-    }
-
-    @Override
-    public void configure(Map<String, Object> config) {
-        underlyingTracker.configure(config);
-    }
-
-    @Override
-    public boolean hasSeen(LookupKey key) {
-        synchronized(sync) {
-            return underlyingTracker.hasSeen(key);
-        }
-    }
-
-    @Override
-    public String getName() {
-        return underlyingTracker.getName();
-    }
-
-    @Override
-    public AccessTracker union(AccessTracker tracker) {
-        PersistentAccessTracker t1 = (PersistentAccessTracker)tracker;
-        underlyingTracker = underlyingTracker.union(t1.underlyingTracker);
-        return this;
-    }
-
-    @Override
-    public void reset() {
-        synchronized(sync) {
-            underlyingTracker.reset();
-        }
-    }
-
-    @Override
-    public boolean isFull() {
-        synchronized (sync) {
-            return underlyingTracker.isFull();
-        }
-    }
-
-    @Override
-    public void cleanup() throws IOException {
-        synchronized(sync) {
-            try {
-                persist(true);
-            }
-            catch(Throwable t) {
-                LOG.error("Unable to persist underlying tracker", t);
-            }
-            underlyingTracker.cleanup();
-            accessTrackerTable.close();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/handler/Handler.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/handler/Handler.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/handler/Handler.java
deleted file mode 100644
index 198f90e..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/reference/lookup/handler/Handler.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.reference.lookup.handler;
-
-import org.apache.metron.reference.lookup.LookupKey;
-
-import java.io.IOException;
-
-public interface Handler<CONTEXT_T, KEY_T extends LookupKey, RESULT_T> extends AutoCloseable{
-  boolean exists(KEY_T key, CONTEXT_T context, boolean logAccess) throws IOException;
-  RESULT_T get(KEY_T key, CONTEXT_T context, boolean logAccess) throws IOException;
-  Iterable<Boolean> exists(Iterable<KEY_T> key, CONTEXT_T context, boolean logAccess) throws IOException;
-  Iterable<RESULT_T> get(Iterable<KEY_T> key, CONTEXT_T context, boolean logAccess) throws IOException;
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/spout/pcap/HDFSWriterCallback.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/spout/pcap/HDFSWriterCallback.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/spout/pcap/HDFSWriterCallback.java
deleted file mode 100644
index 2c430d3..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/spout/pcap/HDFSWriterCallback.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.spout.pcap;
-
-import com.google.common.base.Joiner;
-import com.google.common.collect.ImmutableList;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.log4j.Logger;
-import storm.kafka.Callback;
-import storm.kafka.EmitContext;
-
-import java.io.Closeable;
-import java.io.IOException;
-import java.util.List;
-
-public class HDFSWriterCallback implements Callback {
-  static final long serialVersionUID = 0xDEADBEEFL;
-  private static final Logger LOG = Logger.getLogger(HDFSWriterCallback.class);
-  public static final byte[] PCAP_GLOBAL_HEADER = new byte[] {
-          (byte) 0xd4, (byte) 0xc3, (byte) 0xb2, (byte) 0xa1, 0x02, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00
-          ,0x00, 0x00, 0x00, 0x00, (byte) 0xff, (byte) 0xff, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00
-  };
-
-  private static final List<Object> RET_TUPLE = ImmutableList.of((Object)Byte.valueOf((byte) 0x00), Byte.valueOf((byte)0x00));
-  private FileSystem fs;
-  private SequenceFile.Writer writer;
-  private HDFSWriterConfig config;
-  private long batchStartTime;
-  private long numWritten;
-  private EmitContext context;
-
-  public HDFSWriterCallback() {
-    //this.config = config;
-  }
-
-  public HDFSWriterCallback withConfig(HDFSWriterConfig config) {
-    LOG.info("Configured: " + config);
-    this.config = config;
-    return this;
-  }
-
-  @Override
-  public List<Object> apply(List<Object> tuple, EmitContext context) {
-
-    LongWritable ts = (LongWritable) tuple.get(0);
-    BytesWritable rawPacket = (BytesWritable)tuple.get(1);
-    try {
-      turnoverIfNecessary(ts.get());
-      writer.append(ts, headerize(rawPacket.getBytes()));
-      writer.hflush();
-    } catch (IOException e) {
-      LOG.error(e.getMessage(), e);
-      //drop?  not sure..
-    }
-    return RET_TUPLE;
-  }
-
-  private static BytesWritable headerize(byte[] packet) {
-    byte[] ret = new byte[packet.length + PCAP_GLOBAL_HEADER.length];
-    int offset = 0;
-    System.arraycopy(PCAP_GLOBAL_HEADER, 0, ret, offset, PCAP_GLOBAL_HEADER.length);
-    offset += PCAP_GLOBAL_HEADER.length;
-    System.arraycopy(packet, 0, ret, offset, packet.length);
-    return new BytesWritable(ret);
-  }
-
-
-  private synchronized void turnoverIfNecessary(long ts) throws IOException {
-    long duration = ts - batchStartTime;
-    if(batchStartTime == 0L || duration > config.getMaxTimeMS() || numWritten > config.getNumPackets()) {
-      //turnover
-      Path path = getPath(ts);
-      if(writer != null) {
-        writer.close();
-      }
-      writer = SequenceFile.createWriter(new Configuration()
-              , SequenceFile.Writer.file(path)
-              , SequenceFile.Writer.keyClass(LongWritable.class)
-              , SequenceFile.Writer.valueClass(BytesWritable.class)
-      );
-      //reset state
-      LOG.info("Turning over and writing to " + path);
-      batchStartTime = ts;
-      numWritten = 0;
-    }
-  }
-
-  private Path getPath(long ts) {
-    String fileName = Joiner.on("_").join("pcap"
-            , "" + ts
-            , context.get(EmitContext.Type.UUID)
-    );
-    return new Path(config.getOutputPath(), fileName);
-  }
-
-  @Override
-  public void initialize(EmitContext context) {
-    this.context = context;
-    try {
-      fs = FileSystem.get(new Configuration());
-    } catch (IOException e) {
-      throw new IllegalStateException("Unable to create filesystem", e);
-    }
-  }
-
-  /**
-   * Closes this resource, relinquishing any underlying resources.
-   * This method is invoked automatically on objects managed by the
-   * {@code try}-with-resources statement.
-   * <p/>
-   * <p>While this interface method is declared to throw {@code
-   * Exception}, implementers are <em>strongly</em> encouraged to
-   * declare concrete implementations of the {@code close} method to
-   * throw more specific exceptions, or to throw no exception at all
-   * if the close operation cannot fail.
-   * <p/>
-   * <p><em>Implementers of this interface are also strongly advised
-   * to not have the {@code close} method throw {@link
-   * InterruptedException}.</em>
-   * <p/>
-   * This exception interacts with a thread's interrupted status,
-   * and runtime misbehavior is likely to occur if an {@code
-   * InterruptedException} is {@linkplain Throwable#addSuppressed
-   * suppressed}.
-   * <p/>
-   * More generally, if it would cause problems for an
-   * exception to be suppressed, the {@code AutoCloseable.close}
-   * method should not throw it.
-   * <p/>
-   * <p>Note that unlike the {@link Closeable#close close}
-   * method of {@link Closeable}, this {@code close} method
-   * is <em>not</em> required to be idempotent.  In other words,
-   * calling this {@code close} method more than once may have some
-   * visible side effect, unlike {@code Closeable.close} which is
-   * required to have no effect if called more than once.
-   * <p/>
-   * However, implementers of this interface are strongly encouraged
-   * to make their {@code close} methods idempotent.
-   *
-   * @throws Exception if this resource cannot be closed
-   */
-  @Override
-  public void close() throws Exception {
-    if(writer != null) {
-      writer.close();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/spout/pcap/HDFSWriterConfig.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/spout/pcap/HDFSWriterConfig.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/spout/pcap/HDFSWriterConfig.java
deleted file mode 100644
index ccfc884..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/spout/pcap/HDFSWriterConfig.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.spout.pcap;
-
-import com.google.common.base.Splitter;
-import com.google.common.collect.Iterables;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.List;
-
-public class HDFSWriterConfig implements Serializable {
-  static final long serialVersionUID = 0xDEADBEEFL;
-  private long numPackets;
-  private long maxTimeMS;
-  private String outputPath;
-  private String zookeeperQuorum;
-
-  public HDFSWriterConfig withOutputPath(String path) {
-    outputPath = path;
-    return this;
-  }
-
-  public HDFSWriterConfig withNumPackets(long n) {
-    numPackets = n;
-    return this;
-  }
-
-  public HDFSWriterConfig withMaxTimeMS(long t) {
-    maxTimeMS = t;
-    return this;
-  }
-
-  public HDFSWriterConfig withZookeeperQuorum(String zookeeperQuorum) {
-    this.zookeeperQuorum = zookeeperQuorum;
-    return this;
-  }
-
-  public List<String> getZookeeperServers() {
-    List<String> out = new ArrayList<>();
-    if(zookeeperQuorum != null) {
-      for (String hostPort : Splitter.on(',').split(zookeeperQuorum)) {
-        Iterable<String> tokens = Splitter.on(':').split(hostPort);
-        String host = Iterables.getFirst(tokens, null);
-        if(host != null) {
-          out.add(host);
-        }
-      }
-    }
-    return out;
-  }
-
-  public Integer getZookeeperPort() {
-    if(zookeeperQuorum != null) {
-      String hostPort = Iterables.getFirst(Splitter.on(',').split(zookeeperQuorum), null);
-      String portStr = Iterables.getLast(Splitter.on(':').split(hostPort));
-      return Integer.parseInt(portStr);
-    }
-    return  null;
-  }
-
-  public String getOutputPath() {
-    return outputPath;
-  }
-
-  public long getNumPackets() {
-    return numPackets;
-  }
-
-  public long getMaxTimeMS() {
-    return maxTimeMS;
-  }
-
-  @Override
-  public String toString() {
-    return "HDFSWriterConfig{" +
-            "numPackets=" + numPackets +
-            ", maxTimeMS=" + maxTimeMS +
-            ", outputPath='" + outputPath + '\'' +
-            '}';
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/test/AbstractConfigTest.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/test/AbstractConfigTest.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/test/AbstractConfigTest.java
deleted file mode 100644
index dc3917d..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/test/AbstractConfigTest.java
+++ /dev/null
@@ -1,299 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.test;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Map;
-
-import org.apache.commons.configuration.Configuration;
-import org.apache.commons.configuration.ConfigurationException;
-import org.apache.commons.configuration.PropertiesConfiguration;
-
-import com.fasterxml.jackson.databind.JsonNode;
-import com.github.fge.jackson.JsonLoader;
-import com.github.fge.jsonschema.core.report.ProcessingReport;
-import com.github.fge.jsonschema.main.JsonSchemaFactory;
-import com.github.fge.jsonschema.main.JsonValidator;
-import org.apache.metron.helpers.topology.SettingsLoader;
-
- /**
- * <ul>
- * <li>Title: </li>
- * <li>Description: The class <code>AbstractConfigTest</code> is
- * an abstract base class for implementing JUnit tests that need to use
- * config to connect to ZooKeeper and HBase. The <code>setup</code> method will attempt to
- * load a properties from a file, located in src/test/resources,
- * with the same name as the class.</li>
- * <li>Created: Oct 10, 2014</li>
- * </ul>
- * @version $Revision: 1.1 $
- */
-public class AbstractConfigTest  extends AbstractTestContext{
-         /**
-         * The configPath.
-         */
-        protected String configPath=null;   
-        
-        /**
-        * The configName.
-        */
-       protected String configName=null;           
-
-        /**
-         * The config.
-         */
-        private Configuration config=null;
-        
-         /**
-         * The settings.
-         */
-        Map<String, String> settings=null;       
-
-        /**
-         * The schemaJsonString.
-         */
-        private String schemaJsonString = null;
-        /**
-         * Any Object for mavenMode
-         * @parameter
-         *   expression="${mode}"
-         *   default-value="local"
-         */
-         private Object mode="local";        
-
-        /**
-         * Constructs a new <code>AbstractConfigTest</code> instance.
-         * @throws Exception 
-         */
-        public AbstractConfigTest() throws Exception {
-            super.setUp();
-        }
-
-        /**
-         * Constructs a new <code>AbstractTestContext</code> instance.
-         * @param name the name of the test case.
-         */
-        public AbstractConfigTest(String name) {
-            super(name);
-        }
-
-        /*
-         * (non-Javadoc)
-         * @see junit.framework.TestCase#setUp()
-         */
-        protected void setUp(String configName) throws Exception {
-            super.setUp();
-            this.setConfigPath("src/test/resources/config/"+getClass().getSimpleName()+".config");
-            try {
-                this.setConfig(new PropertiesConfiguration(this.getConfigPath()));
-               
-                Map configOptions= SettingsLoader.getConfigOptions((PropertiesConfiguration)this.config, configName+"=");
-                this.setSettings(SettingsLoader.getConfigOptions((PropertiesConfiguration)this.config, configName + "."));
-                this.getSettings().put(configName, (String) configOptions.get(configName));
-            } catch (ConfigurationException e) {
-                e.printStackTrace();
-                throw new Exception("Config not found !!"+e);
-            }
-        }
-
-        /*
-         * (non-Javadoc)
-         * @see junit.framework.TestCase#tearDown()
-         */
-        @Override
-        protected void tearDown() throws Exception {
-
-        }
-
-        
-         /**
-         * validateJsonData
-         * @param jsonSchema
-         * @param jsonData
-         * @return
-         * @throws Exception
-         */
-         
-        protected boolean validateJsonData(final String jsonSchema, final String jsonData)
-            throws Exception {
-    
-            final JsonNode d = JsonLoader.fromString(jsonData);
-            final JsonNode s = JsonLoader.fromString(jsonSchema);
-    
-            final JsonSchemaFactory factory = JsonSchemaFactory.byDefault();
-            JsonValidator v = factory.getValidator();
-    
-            ProcessingReport report = v.validate(s, d);
-            System.out.println(report);
-            
-            return report.toString().contains("success");
-        }
-        
-        protected String readSchemaFromFile(URL schema_url) throws Exception {
-            BufferedReader br = new BufferedReader(new FileReader(
-                    schema_url.getFile()));
-            String line;
-            StringBuilder sb = new StringBuilder();
-            while ((line = br.readLine()) != null) {
-                System.out.println(line);
-                sb.append(line);
-            }
-            br.close();
-
-            String schema_string = sb.toString().replaceAll("\n", "");
-            schema_string = schema_string.replaceAll(" ", "");
-
-            System.out.println("Read in schema: " + schema_string);
-
-            return schema_string;
-        }        
-  
-        protected String[] readTestDataFromFile(String test_data_url) throws Exception {
-            BufferedReader br = new BufferedReader(new FileReader(
-                    new File(test_data_url)));
-            ArrayList<String> inputDataLines = new ArrayList<String>();
-           
-            String line;
-            while ((line = br.readLine()) != null) {
-                System.out.println(line);
-                inputDataLines.add(line.toString().replaceAll("\n", ""));
-            }
-            br.close();
-            String[] inputData = new String[inputDataLines.size()];
-            inputData = inputDataLines.toArray(inputData);
-
-            return inputData;
-        }          
-       /**
-        * Skip Tests
-        */
-       public boolean skipTests(Object mode){
-           if(mode.toString().equals("local")){
-               return true;
-           }else {
-               return false;
-           }
-       }
-       
-       /**
-        * Returns the mode.
-        * @return the mode.
-        */
-       
-       public Object getMode() {
-           return mode;
-       }
-
-       /**
-        * Sets the mode.
-        * @param mode the mode.
-        */
-       
-       public void setMode(Object mode) {
-       
-           this.mode = mode;
-       }
-
-    
-         /**
-         * @param readSchemaFromFile
-         */
-        public void setSchemaJsonString(String schemaJsonString) {
-            this.schemaJsonString=schemaJsonString;
-        }
-
-    
-         /**
-         * @return
-         */
-        public String getSchemaJsonString() {
-           return this.schemaJsonString;
-        }
-        
-        /**
-        * Returns the configPath.
-        * @return the configPath.
-        */
-       public String getConfigPath() {
-           return configPath;
-       }
-    
-       /**
-        * Sets the configPath.
-        * @param configPath the configPath.
-        */
-       public void setConfigPath(String configPath) {
-           this.configPath = configPath;
-       }    
-       /**
-        * Returns the config.
-        * @return the config.
-        */
-       
-       public Configuration getConfig() {
-           return config;
-       }
-    
-       /**
-        * Sets the config.
-        * @param config the config.
-        */
-       
-       public void setConfig(Configuration config) {
-       
-           this.config = config;
-       }  
-       /**
-        * Returns the settings.
-        * @return the settings.
-        */
-       
-       public Map<String, String> getSettings() {
-           return settings;
-       }
-
-       /**
-        * Sets the settings.
-        * @param settings the settings.
-        */
-       
-       public void setSettings(Map<String, String> settings) {
-           this.settings = settings;
-       }   
-       /**
-       * Returns the configName.
-       * @return the configName.
-       */
-      public String getConfigName() {
-          return configName;
-      }
-
-      /**
-       * Sets the configName.
-       * @param configName the configName.
-       */
-      public void setConfigName(String configName) {  
-          this.configName = configName;
-      }       
-}
-
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/test/AbstractSchemaTest.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/test/AbstractSchemaTest.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/test/AbstractSchemaTest.java
deleted file mode 100644
index 3c5f597..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/test/AbstractSchemaTest.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.test;
-import java.io.BufferedReader;
-import java.io.FileReader;
-import java.net.URL;
-
-import com.fasterxml.jackson.databind.JsonNode;
-import com.github.fge.jackson.JsonLoader;
-import com.github.fge.jsonschema.core.report.ProcessingReport;
-import com.github.fge.jsonschema.main.JsonSchemaFactory;
-import com.github.fge.jsonschema.main.JsonValidator;
-
- /**
- * <ul>
- * <li>Title: </li>
- * <li>Description: The class <code>AbstractSchemaTest</code> is
- * an abstract base class for implementing JUnit tests that need to load a
- * Json Schema. The <code>setup</code> method will attempt to
- * load a properties from a file, located in src/test/resources,
- * with the same name as the class.</li>
- * <li>Created: Aug 7, 2014</li>
- * </ul>
- * @version $Revision: 1.1 $
- */
-public class AbstractSchemaTest  extends AbstractConfigTest{
-        
-        
-         /**
-         * The schemaJsonString.
-         */
-        private String schemaJsonString = null;
-        /**
-         * Any Object for mavenMode
-         * @parameter
-         *   expression="${mode}"
-         *   default-value="local"
-         */
-         private Object mode="local";        
-
-        /**
-         * Constructs a new <code>AbstractTestContext</code> instance.
-         * @throws Exception 
-         */
-        public AbstractSchemaTest() throws Exception {
-            super.setUp();
-        }
-
-        /**
-         * Constructs a new <code>AbstractTestContext</code> instance.
-         * @param name the name of the test case.
-         */
-        public AbstractSchemaTest(String name) {
-            super(name);
-            try{
-                if(System.getProperty("mode")!=null){
-                    setMode(System.getProperty("mode") );                
-                }else
-                {
-                    setMode("local");
-                }
-            }catch(Exception ex){
-                setMode("local");
-            }            
-        }
-
-        /*
-         * (non-Javadoc)
-         * @see junit.framework.TestCase#setUp()
-         */
-        @Override
-        protected void setUp() throws Exception {
-            super.setUp();
-            
-        }
-
-        /*
-         * (non-Javadoc)
-         * @see junit.framework.TestCase#tearDown()
-         */
-        @Override
-        protected void tearDown() throws Exception {
-
-        }
-
-        
-         /**
-         * validateJsonData
-         * @param jsonSchema
-         * @param jsonData
-         * @return
-         * @throws Exception
-         */
-         
-        protected boolean validateJsonData(final String jsonSchema, final String jsonData)
-            throws Exception {
-    
-            final JsonNode d = JsonLoader.fromString(jsonData);
-            final JsonNode s = JsonLoader.fromString(jsonSchema);
-    
-            final JsonSchemaFactory factory = JsonSchemaFactory.byDefault();
-            JsonValidator v = factory.getValidator();
-    
-            ProcessingReport report = v.validate(s, d);
-            System.out.println(report);
-            
-            return report.toString().contains("success");
-        }
-        
-        protected String readSchemaFromFile(URL schema_url) throws Exception {
-            BufferedReader br = new BufferedReader(new FileReader(
-                    schema_url.getFile()));
-            String line;
-            StringBuilder sb = new StringBuilder();
-            while ((line = br.readLine()) != null) {
-                System.out.println(line);
-                sb.append(line);
-            }
-            br.close();
-
-            String schema_string = sb.toString().replaceAll("\n", "");
-            schema_string = schema_string.replaceAll(" ", "");
-
-            System.out.println("Read in schema: " + schema_string);
-
-            return schema_string;
-
-        }        
-        
-       /**
-        * Skip Tests
-        */
-       public boolean skipTests(Object mode){
-           if(mode.toString().equals("local")){
-               return true;
-           }else {
-               return false;
-           }
-       }
-       
-       /**
-        * Returns the mode.
-        * @return the mode.
-        */
-       
-       public Object getMode() {
-           return mode;
-       }
-
-       /**
-        * Sets the mode.
-        * @param mode the mode.
-        */
-       
-       public void setMode(Object mode) {
-       
-           this.mode = mode;
-       }
-
-    
-     /**
-     
-     * @param readSchemaFromFile
-     */
-     
-    public void setSchemaJsonString(String schemaJsonString) {
-        this.schemaJsonString=schemaJsonString;
-    }
-
-    
-     /**
-     
-     * @return
-     */
-     
-    public String getSchemaJsonString() {
-       return this.schemaJsonString;
-    }
-
-     protected void assertNotNull(Object o) throws Exception {}
-     
-}
-
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/test/AbstractTestContext.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/test/AbstractTestContext.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/test/AbstractTestContext.java
deleted file mode 100644
index 2395d04..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/test/AbstractTestContext.java
+++ /dev/null
@@ -1,183 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.test;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.Properties;
-
- /**
- * <ul>
- * <li>Title: </li>
- * <li>Description: The class <code>AbstractTestContext</code> is
- * an abstract base class for implementing JUnit tests that need to load a
- * test properties. The <code>setup</code> method will attempt to
- * load a properties from a file, located in src/test/resources,
- * with the same name as the class.</li>
- * <li>Created: Aug 7, 2014</li>
- * </ul>
- * @version $Revision: 1.1 $
- */
-public class AbstractTestContext {
-         /**
-         * The testProps.
-         */
-        protected File testPropFile=null;
-
-        /**
-         * The properties loaded for test.
-         */
-        protected Properties testProperties=new Properties();
-        
-        /**
-         * Any Object for mavenMode
-         * @parameter
-         *   expression="${mode}"
-         *   default-value="global"
-         */
-         private Object mode="local";        
-
-        /**
-         * Constructs a new <code>AbstractTestContext</code> instance.
-         */
-        public AbstractTestContext() {
-            super();
-        }
-
-        /**
-         * Constructs a new <code>AbstractTestContext</code> instance.
-         * @param name the name of the test case.
-         */
-        public AbstractTestContext(String name) {
-            try{
-                if(System.getProperty("mode")!=null){
-                    setMode(System.getProperty("mode") );                
-                }else
-                {
-                    setMode("local");
-                }
-            }catch(Exception ex){
-                setMode("local");
-            }            
-        }
-
-        /*
-         * (non-Javadoc)
-         * @see junit.framework.TestCase#setUp()
-         */
-        protected void setUp() throws Exception {
-            InputStream input=null;
-            File directory = new File("src/test/resources");
-            if (!directory.isDirectory()) {
-                return;
-            }
-            File file = new File(directory, getClass().getSimpleName() + ".properties");
-            if (!file.canRead()) {
-                return;
-            }
-            setTestPropFile(file);
-            try{
-                input=new FileInputStream(file);
-                testProperties.load(input);
-            }catch(IOException ex){
-                ex.printStackTrace();
-                throw new Exception("failed to load properties");
-            }
-            
-            
-        }
-
-        /*
-         * (non-Javadoc)
-         * @see junit.framework.TestCase#tearDown()
-         */
-        protected void tearDown() throws Exception {
-
-        }
-
-        /**
-         * Returns the testProperties.
-         * @return the testProperties.
-         */
-        
-        public Properties getTestProperties() {
-            return testProperties;
-        }
-
-        /**
-         * Sets the testProperties.
-         * @param testProperties the testProperties.
-         */
-        
-        public void setTestProperties(Properties testProperties) {
-        
-            this.testProperties = testProperties;
-        }    
-        /**
-        * Returns the testPropFile.
-        * @return the testPropFile.
-        */
-       
-       public File getTestPropFile() {
-           return testPropFile;
-       }
-
-       /**
-        * Sets the testPropFile.
-        * @param testPropFile the testPropFile.
-        */
-       
-       public void setTestPropFile(File testPropFile) {
-       
-           this.testPropFile = testPropFile;
-       }     
-       
-       /**
-        * Skip Tests
-        */
-       public boolean skipTests(Object mode){
-           if(mode.toString().equals("local")){
-               return true;
-           }else {
-               return false;
-           }
-       }
-       
-       /**
-        * Returns the mode.
-        * @return the mode.
-        */
-       
-       public Object getMode() {
-           return mode;
-       }
-
-       /**
-        * Sets the mode.
-        * @param mode the mode.
-        */
-       
-       public void setMode(Object mode) {
-       
-           this.mode = mode;
-       }
-     
-    }
-
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/tldextractor/BasicTldExtractor.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/tldextractor/BasicTldExtractor.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/tldextractor/BasicTldExtractor.java
deleted file mode 100644
index 24c3c25..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/tldextractor/BasicTldExtractor.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.tldextractor;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-public class BasicTldExtractor implements Serializable {
-	private static final long serialVersionUID = -7440226111118873815L;
-	private StringBuilder sb = new StringBuilder();
-
-    private Pattern pattern;
-    
-    /**
-    * The inputFile.
-    */
-   private String inputFile ="effective_tld_names.dat";
-   
-   public BasicTldExtractor(String filePath) {
-       this.inputFile=filePath;
-       this.init();
-   }
-   
-	public BasicTldExtractor() {
-      this.init();
-	}
-
-	private void init(){
-	       try {
-	            ArrayList<String> terms = new ArrayList<String>();
-
-	            
-	            BufferedReader br = new BufferedReader(new InputStreamReader(
-	                    getClass().getClassLoader().getResourceAsStream(inputFile)));
-	            String s = null;
-	            while ((s = br.readLine()) != null) {
-	                s = s.trim();
-	                if (s.length() == 0 || s.startsWith("//") || s.startsWith("!"))
-	                    continue;
-	                terms.add(s);
-	            }
-	            Collections.sort(terms, new StringLengthComparator());
-	            for (String t : terms)
-	                add(t);
-	            compile();
-	            br.close();
-	        } catch (IOException e) {
-	            throw new IllegalStateException(e);
-	        }
-	}
-	protected void add(String s) {
-		s = s.replace(".", "\\.");
-		s = "\\." + s;
-		if (s.startsWith("*")) {
-			s = s.replace("*", ".+");
-			sb.append(s).append("|");
-		} else {
-			sb.append(s).append("|");
-		}
-	}
-
-	public void compile() {
-		if (sb.length() > 0)
-			sb.deleteCharAt(sb.length() - 1);
-		sb.insert(0, "[^.]+?(");
-		sb.append(")$");
-		pattern = Pattern.compile(sb.toString());
-		sb = null;
-	}
-
-	public String extract2LD(String host) {
-		Matcher m = pattern.matcher(host);
-		if (m.find()) {
-			return m.group(0);
-		}
-		return null;
-	}
-
-	public String extractTLD(String host) {
-		Matcher m = pattern.matcher(host);
-		if (m.find()) {
-			return m.group(1);
-		}
-		return null;
-	}
-
-	public static class StringLengthComparator implements Comparator<String> {
-		public int compare(String s1, String s2) {
-			if (s1.length() > s2.length())
-				return -1;
-			if (s1.length() < s2.length())
-				return 1;
-			return 0;
-		}
-	}
-    /**
-     * Returns the sb.
-     * @return the sb.
-     */
-    
-    public StringBuilder getSb() {
-        return sb;
-    }
-
-    /**
-     * Sets the sb.
-     * @param sb the sb.
-     */
-    
-    public void setSb(StringBuilder sb) {
-    
-        this.sb = sb;
-    }
-    /**
-     * Returns the inputFile.
-     * @return the inputFile.
-     */
-    
-    public String getInputFile() {
-        return inputFile;
-    }
-
-    /**
-     * Sets the inputFile.
-     * @param inputFile the inputFile.
-     */
-    
-    public void setInputFile(String inputFile) {
-    
-        this.inputFile = inputFile;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/topology/TopologyUtils.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/topology/TopologyUtils.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/topology/TopologyUtils.java
deleted file mode 100644
index 78371d8..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/topology/TopologyUtils.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.topology;
-
-import org.apache.metron.Constants;
-import org.json.simple.JSONObject;
-
-public class TopologyUtils {
-
-  public static String getSensorType(JSONObject message) {
-    return (String) message.get(Constants.SENSOR_TYPE);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/utils/ConfigUtils.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/utils/ConfigUtils.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/utils/ConfigUtils.java
deleted file mode 100644
index 7f5afe9..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/utils/ConfigUtils.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.utils;
-
-import java.lang.reflect.InvocationTargetException;
-
-public class ConfigUtils<T> {
-
-  public static <T> T createInstance(String className, T defaultClass) {
-    T instance;
-    if(className == null || className.length() == 0 || className.charAt(0) == '$') {
-      return defaultClass;
-    }
-    else {
-      try {
-        Class<? extends T> clazz = (Class<? extends T>) Class.forName(className);
-        instance = clazz.getConstructor().newInstance();
-      } catch (InstantiationException e) {
-        throw new IllegalStateException("Unable to instantiate connector.", e);
-      } catch (IllegalAccessException e) {
-        throw new IllegalStateException("Unable to instantiate connector: illegal access", e);
-      } catch (InvocationTargetException e) {
-        throw new IllegalStateException("Unable to instantiate connector", e);
-      } catch (NoSuchMethodException e) {
-        throw new IllegalStateException("Unable to instantiate connector: no such method", e);
-      } catch (ClassNotFoundException e) {
-        throw new IllegalStateException("Unable to instantiate connector: class not found", e);
-      }
-    }
-    return instance;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/utils/ConfigurationsUtils.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/utils/ConfigurationsUtils.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/utils/ConfigurationsUtils.java
deleted file mode 100644
index 62259df..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/utils/ConfigurationsUtils.java
+++ /dev/null
@@ -1,231 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.utils;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
-import org.apache.commons.cli.PosixParser;
-import org.apache.commons.io.FilenameUtils;
-import org.apache.curator.RetryPolicy;
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.CuratorFrameworkFactory;
-import org.apache.curator.retry.ExponentialBackoffRetry;
-import org.apache.metron.Constants;
-import org.apache.metron.domain.Configurations;
-import org.apache.metron.domain.SensorEnrichmentConfig;
-import org.apache.zookeeper.KeeperException;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class ConfigurationsUtils {
-
-  public static CuratorFramework getClient(String zookeeperUrl) {
-    RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
-    return CuratorFrameworkFactory.newClient(zookeeperUrl, retryPolicy);
-  }
-
-  public static void writeGlobalConfigToZookeeper(Map<String, Object> globalConfig, String zookeeperUrl) throws Exception {
-    writeGlobalConfigToZookeeper(JSONUtils.INSTANCE.toJSON(globalConfig), zookeeperUrl);
-  }
-
-  public static void writeGlobalConfigToZookeeper(byte[] globalConfig, String zookeeperUrl) throws Exception {
-    CuratorFramework client = getClient(zookeeperUrl);
-    client.start();
-    try {
-      writeGlobalConfigToZookeeper(globalConfig, client);
-    }
-    finally {
-      client.close();
-    }
-  }
-
-  public static void writeGlobalConfigToZookeeper(byte[] globalConfig, CuratorFramework client) throws Exception {
-    writeToZookeeper(Constants.ZOOKEEPER_GLOBAL_ROOT, globalConfig, client);
-  }
-
-  public static void writeSensorEnrichmentConfigToZookeeper(String sensorType, SensorEnrichmentConfig sensorEnrichmentConfig, String zookeeperUrl) throws Exception {
-    writeSensorEnrichmentConfigToZookeeper(sensorType, JSONUtils.INSTANCE.toJSON(sensorEnrichmentConfig), zookeeperUrl);
-  }
-
-  public static void writeSensorEnrichmentConfigToZookeeper(String sensorType, byte[] configData, String zookeeperUrl) throws Exception {
-    CuratorFramework client = getClient(zookeeperUrl);
-    client.start();
-    try {
-      writeSensorEnrichmentConfigToZookeeper(sensorType, configData, client);
-    }
-    finally {
-      client.close();
-    }
-  }
-
-  public static void writeSensorEnrichmentConfigToZookeeper(String sensorType, byte[] configData, CuratorFramework client) throws Exception {
-    writeToZookeeper(Constants.ZOOKEEPER_SENSOR_ROOT + "/" + sensorType, configData, client);
-  }
-
-  public static void writeConfigToZookeeper(String name, Map<String, Object> config, String zookeeperUrl) throws Exception {
-    writeConfigToZookeeper(name, JSONUtils.INSTANCE.toJSON(config), zookeeperUrl);
-  }
-
-  public static void writeConfigToZookeeper(String name, byte[] config, String zookeeperUrl) throws Exception {
-    CuratorFramework client = getClient(zookeeperUrl);
-    client.start();
-    try {
-      writeToZookeeper(Constants.ZOOKEEPER_TOPOLOGY_ROOT + "/" + name, config, client);
-    }
-    finally {
-      client.close();
-    }
-  }
-
-  public static void writeToZookeeper(String path, byte[] configData, CuratorFramework client) throws Exception {
-    try {
-      client.setData().forPath(path, configData);
-    } catch (KeeperException.NoNodeException e) {
-      client.create().creatingParentsIfNeeded().forPath(path, configData);
-    }
-  }
-
-  public static void updateConfigsFromZookeeper(Configurations configurations, CuratorFramework client) throws Exception {
-    configurations.updateGlobalConfig(readGlobalConfigBytesFromZookeeper(client));
-    List<String> sensorTypes = client.getChildren().forPath(Constants.ZOOKEEPER_SENSOR_ROOT);
-    for(String sensorType: sensorTypes) {
-      configurations.updateSensorEnrichmentConfig(sensorType, readSensorEnrichmentConfigBytesFromZookeeper(sensorType, client));
-    }
-  }
-
-  public static byte[] readGlobalConfigBytesFromZookeeper(CuratorFramework client) throws Exception {
-    return readFromZookeeper(Constants.ZOOKEEPER_GLOBAL_ROOT, client);
-  }
-
-  public static byte[] readSensorEnrichmentConfigBytesFromZookeeper(String sensorType, CuratorFramework client) throws Exception {
-    return readFromZookeeper(Constants.ZOOKEEPER_SENSOR_ROOT + "/" + sensorType, client);
-  }
-
-  public static byte[] readConfigBytesFromZookeeper(String name, CuratorFramework client) throws Exception {
-    return readFromZookeeper(Constants.ZOOKEEPER_TOPOLOGY_ROOT + "/" + name, client);
-  }
-
-  public static byte[] readFromZookeeper(String path, CuratorFramework client) throws Exception {
-    return client.getData().forPath(path);
-  }
-
-  public static void uploadConfigsToZookeeper(String rootFilePath, String zookeeperUrl) throws Exception {
-    ConfigurationsUtils.writeGlobalConfigToZookeeper(readGlobalConfigFromFile(rootFilePath), zookeeperUrl);
-    Map<String, byte[]> sensorEnrichmentConfigs = readSensorEnrichmentConfigsFromFile(rootFilePath);
-    for(String sensorType: sensorEnrichmentConfigs.keySet()) {
-      ConfigurationsUtils.writeSensorEnrichmentConfigToZookeeper(sensorType, sensorEnrichmentConfigs.get(sensorType), zookeeperUrl);
-    }
-  }
-
-  public static byte[] readGlobalConfigFromFile(String rootFilePath) throws IOException {
-    return Files.readAllBytes(Paths.get(rootFilePath, Constants.GLOBAL_CONFIG_NAME + ".json"));
-  }
-
-  public static Map<String, byte[]> readSensorEnrichmentConfigsFromFile(String rootPath) throws IOException {
-    Map<String, byte[]> sensorEnrichmentConfigs = new HashMap<>();
-    for(File file: new File(rootPath, Constants.SENSORS_CONFIG_NAME).listFiles()) {
-      sensorEnrichmentConfigs.put(FilenameUtils.removeExtension(file.getName()), Files.readAllBytes(file.toPath()));
-    }
-    return sensorEnrichmentConfigs;
-  }
-
-  public static void dumpConfigs(String zookeeperUrl) throws Exception {
-    CuratorFramework client = getClient(zookeeperUrl);
-    client.start();
-    //Output global configs
-    {
-      System.out.println("Global config");
-      byte[] globalConfigData = client.getData().forPath(Constants.ZOOKEEPER_GLOBAL_ROOT);
-      System.out.println(new String(globalConfigData));
-    }
-    //Output sensor specific configs
-    {
-      List<String> children = client.getChildren().forPath(Constants.ZOOKEEPER_SENSOR_ROOT);
-      for (String child : children) {
-        byte[] data = client.getData().forPath(Constants.ZOOKEEPER_SENSOR_ROOT + "/" + child);
-        System.out.println("Config for source " + child);
-        System.out.println(new String(data));
-        System.out.println();
-      }
-    }
-    client.close();
-  }
-
-  public static void main(String[] args) {
-
-    Options options = new Options();
-    {
-      Option o = new Option("h", "help", false, "This screen");
-      o.setRequired(false);
-      options.addOption(o);
-    }
-    {
-      Option o = new Option("p", "config_files", true, "Path to the source config files.  Must be named like \"$source\".json");
-      o.setArgName("DIR_NAME");
-      o.setRequired(false);
-      options.addOption(o);
-    }
-    {
-      Option o = new Option("z", "zk", true, "Zookeeper Quroum URL (zk1:2181,zk2:2181,...");
-      o.setArgName("ZK_QUORUM");
-      o.setRequired(true);
-      options.addOption(o);
-    }
-
-    try {
-      CommandLineParser parser = new PosixParser();
-      CommandLine cmd = null;
-      try {
-        cmd = parser.parse(options, args);
-      } catch (ParseException pe) {
-        pe.printStackTrace();
-        final HelpFormatter usageFormatter = new HelpFormatter();
-        usageFormatter.printHelp("ConfigurationsUtils", null, options, null, true);
-        System.exit(-1);
-      }
-      if (cmd.hasOption("h")) {
-        final HelpFormatter usageFormatter = new HelpFormatter();
-        usageFormatter.printHelp("ConfigurationsUtils", null, options, null, true);
-        System.exit(0);
-      }
-
-      String zkQuorum = cmd.getOptionValue("z");
-      if (cmd.hasOption("p")) {
-        String sourcePath = cmd.getOptionValue("p");
-        uploadConfigsToZookeeper(sourcePath, zkQuorum);
-      }
-
-      ConfigurationsUtils.dumpConfigs(zkQuorum);
-
-    } catch (Exception e) {
-      e.printStackTrace();
-      System.exit(-1);
-    }
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/utils/JSONUtils.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/utils/JSONUtils.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/utils/JSONUtils.java
deleted file mode 100644
index 34f98bb..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/utils/JSONUtils.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.utils;
-
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.ObjectMapper;
-
-import java.io.*;
-
-public enum JSONUtils {
-  INSTANCE;
-  private static ThreadLocal<ObjectMapper> _mapper = new ThreadLocal<ObjectMapper>() {
-    /**
-     * Returns the current thread's "initial value" for this
-     * thread-local variable.  This method will be invoked the first
-     * time a thread accesses the variable with the {@link #get}
-     * method, unless the thread previously invoked the {@link #set}
-     * method, in which case the {@code initialValue} method will not
-     * be invoked for the thread.  Normally, this method is invoked at
-     * most once per thread, but it may be invoked again in case of
-     * subsequent invocations of {@link #remove} followed by {@link #get}.
-     * <p>
-     * <p>This implementation simply returns {@code null}; if the
-     * programmer desires thread-local variables to have an initial
-     * value other than {@code null}, {@code ThreadLocal} must be
-     * subclassed, and this method overridden.  Typically, an
-     * anonymous inner class will be used.
-     *
-     * @return the initial value for this thread-local
-     */
-    @Override
-    protected ObjectMapper initialValue() {
-      return new ObjectMapper();
-    }
-  };
-
-  public <T> T load(InputStream is, TypeReference<T> ref) throws IOException {
-    return _mapper.get().readValue(is, ref);
-  }
-  public <T> T load(String is, TypeReference<T> ref) throws IOException {
-    return _mapper.get().readValue(is, ref);
-  }
-  public <T> T load(File f, TypeReference<T> ref) throws IOException {
-    return _mapper.get().readValue(new BufferedInputStream(new FileInputStream(f)), ref);
-  }
-  public <T> T load(InputStream is, Class<T> clazz) throws IOException {
-    return _mapper.get().readValue(is, clazz);
-  }
-
-  public <T> T load(File f, Class<T> clazz) throws IOException {
-    return _mapper.get().readValue(new BufferedInputStream(new FileInputStream(f)), clazz);
-  }
-  public <T> T load(String is, Class<T> clazz) throws IOException {
-    return _mapper.get().readValue(is, clazz);
-  }
-
-  public String toJSON(Object o, boolean pretty) throws JsonProcessingException {
-    if(pretty) {
-      return _mapper.get().writerWithDefaultPrettyPrinter().writeValueAsString(o);
-    }
-    else {
-      return _mapper.get().writeValueAsString(o);
-    }
-  }
-
-  public byte[] toJSON(Object config) throws JsonProcessingException {
-    return _mapper.get().writeValueAsBytes(config);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/writer/HBaseWriter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/writer/HBaseWriter.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/writer/HBaseWriter.java
deleted file mode 100644
index 291b849..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/writer/HBaseWriter.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.writer;
-
-import backtype.storm.tuple.Tuple;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.metron.domain.Configurations;
-import org.apache.metron.hbase.HTableProvider;
-import org.apache.metron.hbase.TableProvider;
-import org.apache.metron.utils.ConfigUtils;
-import org.apache.metron.writer.interfaces.MessageWriter;
-import org.json.simple.JSONObject;
-
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.Map;
-
-public abstract class HBaseWriter implements MessageWriter<JSONObject>, Serializable {
-
-  private String tableName;
-  private String connectorImpl;
-  private TableProvider provider;
-  private HTableInterface table;
-
-  public HBaseWriter(String tableName) {
-    this.tableName = tableName;
-  }
-
-  public HBaseWriter withProviderImpl(String connectorImpl) {
-    this.connectorImpl = connectorImpl;
-    return this;
-  }
-
-  @Override
-  public void init() {
-    final Configuration config = HBaseConfiguration.create();
-    try {
-      provider = ConfigUtils.createInstance(connectorImpl, new HTableProvider());
-      table = provider.getTable(config, tableName);
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-  }
-
-  @Override
-  public void write(String sourceType, Configurations configurations, Tuple tuple, JSONObject message) throws Exception {
-    Put put = new Put(getKey(tuple, message));
-    Map<String, byte[]> values = getValues(tuple, message);
-    for(String column: values.keySet()) {
-      String[] columnParts = column.split(":");
-      long timestamp = getTimestamp(tuple, message);
-      if (timestamp > -1) {
-        put.addColumn(Bytes.toBytes(columnParts[0]), Bytes.toBytes(columnParts[1]), timestamp, values.get(column));
-      } else {
-        put.addColumn(Bytes.toBytes(columnParts[0]), Bytes.toBytes(columnParts[1]), values.get(column));
-      }
-    }
-    table.put(put);
-  }
-
-  @Override
-  public void close() throws Exception {
-    table.close();
-  }
-
-  public abstract byte[] getKey(Tuple tuple, JSONObject message);
-  public abstract long getTimestamp(Tuple tuple, JSONObject message);
-  public abstract Map<String, byte[]> getValues(Tuple tuple, JSONObject message);
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/writer/PcapWriter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/writer/PcapWriter.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/writer/PcapWriter.java
deleted file mode 100644
index 3320bda..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/writer/PcapWriter.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.writer;
-
-import backtype.storm.tuple.Tuple;
-import org.apache.metron.pcap.PcapUtils;
-import org.json.simple.JSONObject;
-
-import java.util.HashMap;
-import java.util.Map;
-
-public class PcapWriter extends HBaseWriter {
-
-  private String column;
-
-  public PcapWriter(String tableName, String column) {
-    super(tableName);
-    this.column = column;
-  }
-
-  @Override
-  public byte[] getKey(Tuple tuple, JSONObject message) {
-    String key = PcapUtils.getSessionKey(message);
-    return key.getBytes();
-  }
-
-  @Override
-  public long getTimestamp(Tuple tuple, JSONObject message) {
-    return (long) message.get("ts_micro");
-  }
-
-  @Override
-  public Map<String, byte[]> getValues(Tuple tuple, JSONObject message) {
-    Map<String, byte[]> values = new HashMap<>();
-    values.put(column, tuple.getBinary(0));
-    return values;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/writer/interfaces/BulkMessageWriter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/writer/interfaces/BulkMessageWriter.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/writer/interfaces/BulkMessageWriter.java
deleted file mode 100644
index c3a930c..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/writer/interfaces/BulkMessageWriter.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.writer.interfaces;
-
-import backtype.storm.tuple.Tuple;
-import org.apache.metron.domain.Configurations;
-
-import java.util.List;
-import java.util.Map;
-
-public interface BulkMessageWriter<T> extends AutoCloseable {
-
-  void init(Map stormConf, Configurations configuration) throws Exception;
-  void write(String sensorType, Configurations configurations, List<Tuple> tuples, List<T> messages) throws Exception;
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/writer/interfaces/MessageWriter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/writer/interfaces/MessageWriter.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/writer/interfaces/MessageWriter.java
deleted file mode 100644
index 25c8a5a..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/writer/interfaces/MessageWriter.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.writer.interfaces;
-
-import backtype.storm.tuple.Tuple;
-import org.apache.metron.domain.Configurations;
-
-public interface MessageWriter<T> extends AutoCloseable {
-
-  void init();
-  void write(String sensorType, Configurations configurations, Tuple tuple, T message) throws Exception;
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/storm/kafka/Callback.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/storm/kafka/Callback.java b/metron-streaming/Metron-Common/src/main/java/storm/kafka/Callback.java
deleted file mode 100644
index ff05c29..0000000
--- a/metron-streaming/Metron-Common/src/main/java/storm/kafka/Callback.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package storm.kafka;
-
-import java.io.Serializable;
-import java.util.List;
-
-public interface Callback extends AutoCloseable, Serializable {
-  List<Object> apply(List<Object> tuple, EmitContext context);
-  void initialize(EmitContext context);
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/storm/kafka/CallbackCollector.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/storm/kafka/CallbackCollector.java b/metron-streaming/Metron-Common/src/main/java/storm/kafka/CallbackCollector.java
deleted file mode 100644
index 485da5a..0000000
--- a/metron-streaming/Metron-Common/src/main/java/storm/kafka/CallbackCollector.java
+++ /dev/null
@@ -1,182 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package storm.kafka;
-
-import backtype.storm.spout.ISpoutOutputCollector;
-import backtype.storm.spout.SpoutOutputCollector;
-
-import java.io.Serializable;
-import java.util.List;
-
-public class CallbackCollector extends SpoutOutputCollector implements Serializable {
-  static final long serialVersionUID = 0xDEADBEEFL;
-  Callback _callback;
-  SpoutOutputCollector _delegate;
-  EmitContext _context;
-  public CallbackCollector(Callback callback, SpoutOutputCollector collector, EmitContext context) {
-    super(collector);
-    this._callback = callback;
-    this._delegate = collector;
-    this._context = context;
-  }
-
-
-  /**
-   * Emits a new tuple to the specified output stream with the given message ID.
-   * When Storm detects that this tuple has been fully processed, or has failed
-   * to be fully processed, the spout will receive an ack or fail callback respectively
-   * with the messageId as long as the messageId was not null. If the messageId was null,
-   * Storm will not track the tuple and no callback will be received. The emitted values must be
-   * immutable.
-   *
-   * @param streamId
-   * @param tuple
-   * @param messageId
-   * @return the list of task ids that this tuple was sent to
-   */
-  @Override
-  public List<Integer> emit(String streamId, List<Object> tuple, Object messageId) {
-    List<Object> t = _callback.apply(tuple, _context.cloneContext().with(EmitContext.Type.MESSAGE_ID, messageId)
-            .with(EmitContext.Type.STREAM_ID, streamId)
-    );
-    return _delegate.emit(streamId, t, messageId);
-  }
-
-  /**
-   * Emits a new tuple to the default output stream with the given message ID.
-   * When Storm detects that this tuple has been fully processed, or has failed
-   * to be fully processed, the spout will receive an ack or fail callback respectively
-   * with the messageId as long as the messageId was not null. If the messageId was null,
-   * Storm will not track the tuple and no callback will be received. The emitted values must be
-   * immutable.
-   *
-   * @param tuple
-   * @param messageId
-   * @return the list of task ids that this tuple was sent to
-   */
-  @Override
-  public List<Integer> emit(List<Object> tuple, Object messageId) {
-    List<Object> t = _callback.apply(tuple, _context.cloneContext().with(EmitContext.Type.MESSAGE_ID, messageId));
-    return super.emit(t, messageId);
-  }
-
-  /**
-   * Emits a tuple to the default output stream with a null message id. Storm will
-   * not track this message so ack and fail will never be called for this tuple. The
-   * emitted values must be immutable.
-   *
-   * @param tuple
-   */
-  @Override
-  public List<Integer> emit(List<Object> tuple) {
-    List<Object> t = _callback.apply(tuple, _context.cloneContext());
-    return super.emit(t);
-  }
-
-  /**
-   * Emits a tuple to the specified output stream with a null message id. Storm will
-   * not track this message so ack and fail will never be called for this tuple. The
-   * emitted values must be immutable.
-   *
-   * @param streamId
-   * @param tuple
-   */
-  @Override
-  public List<Integer> emit(String streamId, List<Object> tuple) {
-    List<Object> t = _callback.apply(tuple, _context.cloneContext().with(EmitContext.Type.STREAM_ID, streamId));
-    return super.emit(streamId, t);
-  }
-
-  /**
-   * Emits a tuple to the specified task on the specified output stream. This output
-   * stream must have been declared as a direct stream, and the specified task must
-   * use a direct grouping on this stream to receive the message. The emitted values must be
-   * immutable.
-   *
-   * @param taskId
-   * @param streamId
-   * @param tuple
-   * @param messageId
-   */
-  @Override
-  public void emitDirect(int taskId, String streamId, List<Object> tuple, Object messageId) {
-    List<Object> t = _callback.apply(tuple, _context.cloneContext().with(EmitContext.Type.STREAM_ID, streamId)
-            .with(EmitContext.Type.MESSAGE_ID, messageId)
-            .with(EmitContext.Type.TASK_ID, new Integer(taskId))
-    );
-    super.emitDirect(taskId, streamId, t, messageId);
-  }
-
-  /**
-   * Emits a tuple to the specified task on the default output stream. This output
-   * stream must have been declared as a direct stream, and the specified task must
-   * use a direct grouping on this stream to receive the message. The emitted values must be
-   * immutable.
-   *
-   * @param taskId
-   * @param tuple
-   * @param messageId
-   */
-  @Override
-  public void emitDirect(int taskId, List<Object> tuple, Object messageId) {
-    List<Object> t = _callback.apply(tuple, _context.cloneContext().with(EmitContext.Type.MESSAGE_ID, messageId)
-            .with(EmitContext.Type.TASK_ID, new Integer(taskId))
-    );
-    super.emitDirect(taskId, t, messageId);
-  }
-
-  /**
-   * Emits a tuple to the specified task on the specified output stream. This output
-   * stream must have been declared as a direct stream, and the specified task must
-   * use a direct grouping on this stream to receive the message. The emitted values must be
-   * immutable.
-   * <p/>
-   * <p> Because no message id is specified, Storm will not track this message
-   * so ack and fail will never be called for this tuple.</p>
-   *
-   * @param taskId
-   * @param streamId
-   * @param tuple
-   */
-  @Override
-  public void emitDirect(int taskId, String streamId, List<Object> tuple) {
-    List<Object> t = _callback.apply(tuple, _context.cloneContext().with(EmitContext.Type.STREAM_ID, streamId)
-            .with(EmitContext.Type.TASK_ID, new Integer(taskId))
-    );
-    super.emitDirect(taskId, streamId, t);
-  }
-
-  /**
-   * Emits a tuple to the specified task on the default output stream. This output
-   * stream must have been declared as a direct stream, and the specified task must
-   * use a direct grouping on this stream to receive the message. The emitted values must be
-   * immutable.
-   * <p/>
-   * <p> Because no message id is specified, Storm will not track this message
-   * so ack and fail will never be called for this tuple.</p>
-   *
-   * @param taskId
-   * @param tuple
-   */
-  @Override
-  public void emitDirect(int taskId, List<Object> tuple) {
-
-    List<Object> t = _callback.apply(tuple, _context.cloneContext().with(EmitContext.Type.TASK_ID, new Integer(taskId)));
-    super.emitDirect(taskId, t);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/storm/kafka/CallbackKafkaSpout.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/storm/kafka/CallbackKafkaSpout.java b/metron-streaming/Metron-Common/src/main/java/storm/kafka/CallbackKafkaSpout.java
deleted file mode 100644
index 431bdf9..0000000
--- a/metron-streaming/Metron-Common/src/main/java/storm/kafka/CallbackKafkaSpout.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package storm.kafka;
-
-import backtype.storm.Config;
-import backtype.storm.metric.api.IMetric;
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import storm.kafka.*;
-
-import java.util.*;
-
-public class CallbackKafkaSpout extends KafkaSpout {
-  static final long serialVersionUID = 0xDEADBEEFL;
-  Class<? extends Callback> callbackClazz;
-  Callback _callback;
-  EmitContext _context;
-  public CallbackKafkaSpout(SpoutConfig spoutConfig, String callbackClass) {
-    this(spoutConfig, toCallbackClass(callbackClass));
-  }
-
-  public CallbackKafkaSpout(SpoutConfig spoutConf, Class<? extends Callback> callback) {
-    super(spoutConf);
-    callbackClazz = callback;
-  }
-
-  public void initialize() {
-    _callback = createCallback(callbackClazz);
-    _context = new EmitContext().with(EmitContext.Type.SPOUT_CONFIG, _spoutConfig)
-            .with(EmitContext.Type.UUID, _uuid);
-    _callback.initialize(_context);
-  }
-
-
-  private static Class<? extends Callback> toCallbackClass(String callbackClass)  {
-    try{
-      return (Class<? extends Callback>) Callback.class.forName(callbackClass);
-    }
-    catch (ClassNotFoundException e) {
-      throw new RuntimeException(callbackClass + " not found", e);
-    }
-  }
-
-  protected Callback createCallback(Class<? extends Callback> callbackClass)  {
-    try {
-      return callbackClass.newInstance();
-    } catch (InstantiationException e) {
-      throw new RuntimeException("Unable to instantiate callback", e);
-    } catch (IllegalAccessException e) {
-      throw new RuntimeException("Illegal access", e);
-    }
-  }
-
-  @Override
-  public void open(Map conf, final TopologyContext context, final SpoutOutputCollector collector) {
-    if(_callback == null) {
-      initialize();
-    }
-    super.open( conf, context
-            , new CallbackCollector(_callback, collector
-                    ,_context.cloneContext().with(EmitContext.Type.OPEN_CONFIG, conf)
-                    .with(EmitContext.Type.TOPOLOGY_CONTEXT, context)
-            )
-    );
-  }
-
-  @Override
-  public void close() {
-    super.close();
-    if(_callback != null) {
-      try {
-        _callback.close();
-      } catch (Exception e) {
-        throw new IllegalStateException("Unable to close callback", e);
-      }
-    }
-  }
-}



[42/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/files/config/sensors/pcap.json
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/files/config/sensors/pcap.json b/metron-deployment/roles/metron_streaming/files/config/sensors/pcap.json
new file mode 100644
index 0000000..7792165
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/files/config/sensors/pcap.json
@@ -0,0 +1,19 @@
+{
+  "index": "pcap",
+  "batchSize": 5,
+  "enrichmentFieldMap":
+  {
+    "geo": ["ip_src_addr", "ip_dst_addr"],
+    "host": ["ip_src_addr", "ip_dst_addr"]
+  },
+  "threatIntelFieldMap":
+  {
+    "hbaseThreatIntel": ["ip_dst_addr", "ip_src_addr"]
+  },
+  "fieldToThreatIntelTypeMap":
+  {
+    "ip_dst_addr" : [ "malicious_ip" ]
+    ,"ip_src_addr" : [ "malicious_ip" ]
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/files/config/sensors/snort.json
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/files/config/sensors/snort.json b/metron-deployment/roles/metron_streaming/files/config/sensors/snort.json
new file mode 100644
index 0000000..c5b6dcc
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/files/config/sensors/snort.json
@@ -0,0 +1,18 @@
+{
+  "index": "snort",
+  "batchSize": 1,
+  "enrichmentFieldMap":
+  {
+    "geo": ["ip_dst_addr", "ip_src_addr"],
+    "host": ["host"]
+  },
+ "threatIntelFieldMap":
+  {
+    "hbaseThreatIntel": ["ip_src_addr", "ip_dst_addr"]
+  },
+  "fieldToThreatIntelTypeMap":
+  {
+    "ip_src_addr" : ["malicious_ip"],
+    "ip_dst_addr" : ["malicious_ip"]
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/files/config/sensors/yaf.json
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/files/config/sensors/yaf.json b/metron-deployment/roles/metron_streaming/files/config/sensors/yaf.json
new file mode 100644
index 0000000..2b46c9a
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/files/config/sensors/yaf.json
@@ -0,0 +1,19 @@
+{
+  "index": "yaf",
+  "batchSize": 5,
+  "enrichmentFieldMap":
+  {
+    "geo": ["ip_dst_addr", "ip_src_addr"],
+    "host": ["host"]
+  },
+  "threatIntelFieldMap":
+  {
+    "hbaseThreatIntel": ["ip_src_addr", "ip_dst_addr"]
+  },
+  "fieldToThreatIntelTypeMap":
+  {
+    "ip_src_addr" : ["malicious_ip"],
+    "ip_dst_addr" : ["malicious_ip"]
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/files/extractor.json
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/files/extractor.json b/metron-deployment/roles/metron_streaming/files/extractor.json
new file mode 100644
index 0000000..545202a
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/files/extractor.json
@@ -0,0 +1,12 @@
+{
+  "config": {
+    "columns": {
+      "ip": 0
+    },
+    "indicator_column": "ip",
+    "type" : "malicious_ip",
+    "separator": ","
+  },
+  "extractor": "CSV"
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/files/yaf_index.template
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/files/yaf_index.template b/metron-deployment/roles/metron_streaming/files/yaf_index.template
new file mode 100644
index 0000000..c8c1702
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/files/yaf_index.template
@@ -0,0 +1,36 @@
+{
+    "template" : "yaf_index*",
+    "mappings" : {
+        "yaf_doc" : {
+            "properties": {
+                "message": {
+                    "properties": {
+                        "@timestamp":{"type":"date","format":"dateOptionalTime"},
+                        "end-time":{"type":"string"},
+                        "duration":{"type":"string"},
+                        "rtt":{"type":"string"},
+                        "proto":{"type":"string"},
+                        "sip":{"type":"string"},
+                        "sp":{"type":"string"},
+                        "dip":{"type":"string"},
+                        "dp":{"type":"string"},
+                        "iflags":{"type":"string"},
+                        "uflags":{"type":"string"},
+                        "riflags":{"type":"string"},
+                        "ruflags":{"type":"string"},
+                        "isn":{"type":"string"},
+                        "risn":{"type":"string"},
+                        "tag":{"type":"string"},
+                        "rtag":{"type":"string"},
+                        "pkt":{"type":"string"},
+                        "oct":{"type":"string"},
+                        "rpkt":{"type":"string"},
+                        "roct":{"type":"string"},
+                        "app":{"type":"string"},
+                        "end-reason":{"type":"string"}
+                    }
+                }
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/meta/main.yml b/metron-deployment/roles/metron_streaming/meta/main.yml
new file mode 100644
index 0000000..6820b2c
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/meta/main.yml
@@ -0,0 +1,20 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - ambari_gather_facts
+  - java_jdk

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/tasks/es_purge.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/tasks/es_purge.yml b/metron-deployment/roles/metron_streaming/tasks/es_purge.yml
new file mode 100644
index 0000000..22616ca
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/tasks/es_purge.yml
@@ -0,0 +1,42 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Create Empty Log Files for ES Purge
+  file:
+    path: "{{ item }}"
+    state: touch
+    owner: hdfs
+    group: hdfs
+    mode: 0644
+  with_items:
+    - /var/log/bro-purge/cron-es-bro-purge.log
+    - /var/log/yaf-purge/cron-es-yaf-purge.log
+    - /var/log/snort-purge/cron-es-snort-purge.log
+
+
+- name: Purge Elasticsearch Indices every 30 days.
+  cron:
+    name: "{{ item.name }}"
+    job: "{{ item.job }}"
+    special_time: daily
+    user: hdfs
+  with_items:
+    - { name: "bro_es_purge", job:  "{{ es_bro_purge_cronjob }}" }
+    - { name: "yaf_es_purge", job: "{{ es_yaf_purge_cronjob }}" }
+    - { name: "snort_es_purge", job: "{{ es_snort_purge_cronjob }}" }
+
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/tasks/grok_upload.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/tasks/grok_upload.yml b/metron-deployment/roles/metron_streaming/tasks/grok_upload.yml
new file mode 100644
index 0000000..d857bf5
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/tasks/grok_upload.yml
@@ -0,0 +1,37 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Create HDFS directory for grok patterns
+  command: hdfs dfs -mkdir -p {{ metron_hdfs_output_dir }}/patterns
+  become: yes
+  become_user: hdfs
+
+- name: Assign hfds user as owner of  {{ metron_hdfs_output_dir }}/patterns HDFS directory
+  command: hdfs dfs -chown -R hdfs:hadoop {{ metron_hdfs_output_dir }}/patterns
+  become: yes
+  become_user: hdfs
+
+- name: Assign permissions of HDFS {{ metron_hdfs_output_dir }}/patterns directory
+  command: hdfs dfs -chmod -R 775 {{ metron_hdfs_output_dir }}/patterns
+  become: yes
+  become_user: hdfs
+
+- name: Upload Grok Patterns to hdfs://{{ metron_hdfs_output_dir }}
+  command: hdfs dfs -put -f {{ metron_directory }}/patterns  {{ metron_hdfs_output_dir }}
+  become: yes
+  become_user: hdfs
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/tasks/hdfs_filesystem.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/tasks/hdfs_filesystem.yml b/metron-deployment/roles/metron_streaming/tasks/hdfs_filesystem.yml
new file mode 100644
index 0000000..252e671
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/tasks/hdfs_filesystem.yml
@@ -0,0 +1,41 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Create root user HDFS directory
+  command: hdfs dfs -mkdir -p /user/root
+  become: yes
+  become_user: hdfs
+
+- name: Assign root as owner of /user/root HDFS directory
+  command: hdfs dfs -chown root:root /user/root
+  become: yes
+  become_user: hdfs
+
+- name: Create Metron HDFS output directory
+  command: hdfs dfs -mkdir -p {{ metron_hdfs_output_dir }}
+  become: yes
+  become_user: hdfs
+
+- name: Assign hdfs as owner of HDFS output directory
+  command: hdfs dfs -chown hdfs:hadoop {{ metron_hdfs_output_dir }}
+  become: yes
+  become_user: hdfs
+
+- name: Assign permissions of HDFS output directory
+  command: hdfs dfs -chmod 775 {{ metron_hdfs_output_dir }}
+  become: yes
+  become_user: hdfs
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/tasks/hdfs_purge.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/tasks/hdfs_purge.yml b/metron-deployment/roles/metron_streaming/tasks/hdfs_purge.yml
new file mode 100644
index 0000000..33442e4
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/tasks/hdfs_purge.yml
@@ -0,0 +1,52 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Create Log Directories for HDFS Purge
+  file:
+      path: "{{ item }}"
+      state: directory
+      mode: 0755
+      owner: hdfs
+      group: hdfs
+  with_items:
+    - /var/log/bro-purge
+    - /var/log/yaf-purge
+    - /var/log/snort-purge
+
+- name: Create Empty Log Files for HDFS Purge
+  file:
+    path: "{{ item }}"
+    state: touch
+    owner: hdfs
+    group: hdfs
+    mode: 0644
+  with_items:
+    - /var/log/bro-purge/cron-hdfs-bro-purge.log
+    - /var/log/yaf-purge/cron-hdfs-yaf-purge.log
+    - /var/log/snort-purge/cron-hdfs-snort-purge.log
+
+- name: Purge HDFS Sensor Data every 30 days.
+  cron:
+    name: "{{ item.name }}"
+    job: "{{ item.job }}"
+    special_time: daily
+    user: hdfs
+  with_items:
+    - { name: "bro_hdfs_purge", job:  "{{ hdfs_bro_purge_cronjob }}" }
+    - { name: "yaf_hdfs_purge", job: "{{ hdfs_yaf_purge_cronjob }}" }
+    - { name: "snort_hdfs_purge", job: "{{ hdfs_snort_purge_cronjob }}" }
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/tasks/main.yml b/metron-deployment/roles/metron_streaming/tasks/main.yml
new file mode 100644
index 0000000..2f22dba
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/tasks/main.yml
@@ -0,0 +1,155 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Create Metron streaming directories
+  file: path="{{ metron_directory }}/{{ item.name }}"  state=directory mode=0755
+  with_items:
+      - { name: 'lib'}
+      - { name: 'scripts'}
+      - { name: 'config'}
+
+
+- name: Copy Metron Solr bundle
+  copy:
+    src: "{{ metron_solr_bundle_path }}"
+    dest: "{{ metron_directory }}"
+
+- name: Copy Metron Elasticsearch bundle
+  copy:
+    src: "{{ metron_elasticsearch_bundle_path }}"
+    dest: "{{ metron_directory }}"
+
+- name: Copy Metron Enrichment bundle
+  copy:
+    src: "{{ metron_enrichment_bundle_path }}"
+    dest: "{{ metron_directory }}"
+
+- name: Copy Metron Parsers bundle
+  copy:
+    src: "{{ metron_parsers_bundle_path }}"
+    dest: "{{ metron_directory }}"
+
+- name: Copy Metron DataLoads bundle
+  copy:
+    src: "{{ metron_data_management_bundle_path }}"
+    dest: "{{ metron_directory }}"
+
+- name: Copy Metron Common bundle
+  copy:
+    src: "{{ metron_common_bundle_path }}"
+    dest: "{{ metron_directory }}"
+
+- name: Unbundle Metron bundles
+  shell: cd {{ metron_directory }} && tar xzvf metron-solr*.tar.gz && tar xzvf metron-elasticsearch*.tar.gz && tar xzvf metron-enrichment*.tar.gz && tar xzvf metron-parsers*.tar.gz && tar xzvf metron-data-management*.tar.gz && tar xzvf metron-common*.tar.gz && rm *.tar.gz
+
+- name: Add *-site.xml files to topology jars
+  shell: cd {{ item.config_path }} && jar -uf {{ metron_directory }}/lib/{{ item.jar_name }} {{ item.file_name }}
+  with_items:
+      - { config_path: "{{ hbase_config_path }}", jar_name: "{{ metron_solr_jar_name }}", file_name: "hbase-site.xml" }
+      - { config_path: "{{ hdfs_config_path }}", jar_name: "{{ metron_solr_jar_name }}", file_name: "core-site.xml" }
+      - { config_path: "{{ hdfs_config_path }}", jar_name: "{{ metron_solr_jar_name }}", file_name: "hdfs-site.xml" }
+      - { config_path: "{{ hbase_config_path }}", jar_name: "{{ metron_elasticsearch_jar_name }}", file_name: "hbase-site.xml" }
+      - { config_path: "{{ hdfs_config_path }}", jar_name: "{{ metron_elasticsearch_jar_name }}", file_name: "core-site.xml" }
+      - { config_path: "{{ hdfs_config_path }}", jar_name: "{{ metron_elasticsearch_jar_name }}", file_name: "hdfs-site.xml" }
+
+- name: Get Default mysql passowrd
+  include_vars: "../roles/mysql_server/defaults/main.yml"
+  when: mysql_root_password is undefined
+
+- include: hdfs_filesystem.yml
+  run_once: true
+
+- include: grok_upload.yml
+  run_once: true
+
+- name: Configure Metron Parser Topologies
+  lineinfile:
+    dest: "{{ metron_parsers_properties_config_path }}"
+    regexp: "{{ item.regexp }}"
+    line: "{{ item.line }}"
+  with_items:
+    - { regexp: "kafka.zk=", line: "kafka.zk={{ zookeeper_url }}" }
+    - { regexp: "kafka.broker=", line: "kafka.broker={{ kafka_broker_url }}" }
+
+- name: Configure Metron Solr topology
+  lineinfile: >
+    dest={{ metron_solr_properties_config_path }}
+    regexp="{{ item.regexp }}"
+    line="{{ item.line }}"
+  with_items:
+    - { regexp: "kafka.zk=", line: "kafka.zk={{ zookeeper_url }}" }
+    - { regexp: "kafka.broker=", line: "kafka.broker={{ kafka_broker_url }}" }
+    - { regexp: "es.ip=", line: "es.ip={{ groups.search[0] }}" }
+    - { regexp: "es.port=", line: "es.port={{ elasticsearch_transport_port }}" }
+    - { regexp: "es.clustername=", line: "es.clustername={{ elasticsearch_cluster_name }}" }
+    - { regexp: "bolt.hdfs.file.system.url=", line: "bolt.hdfs.file.system.url={{ hdfs_url }}" }
+    - { regexp: "spout.kafka.topic.pcap=", line: "spout.kafka.topic.pcap={{ pycapa_topic }}" }
+    - { regexp: "spout.kafka.topic.bro=", line: "spout.kafka.topic.bro={{ bro_topic }}" }
+    - { regexp: "bolt.hbase.table.name=", line: "bolt.hbase.table.name={{ pcap_hbase_table }}" }
+    - { regexp: "threat.intel.tracker.table=", line: "threat.intel.tracker.table={{ tracker_hbase_table }}" }
+    - { regexp: "threat.intel.tracker.cf=", line: "threat.intel.tracker.cf=t" }
+    - { regexp: "threat.intel.simple.hbase.table=", line: "threat.intel.simple.hbase.table={{ threatintel_hbase_table }}" }
+    - { regexp: "threat.intel.simple.hbase.cf=", line: "threat.intel.simple.hbase.cf=t" }
+    - { regexp: "enrichment.simple.hbase.table=", line: "enrichment.simple.hbase.table={{ enrichment_hbase_table }}" }
+    - { regexp: "enrichment.simple.hbase.cf=", line: "enrichment.simple.hbase.cf=t" }
+    - { regexp: "mysql.ip=", line: "mysql.ip={{ groups.mysql[0] }}" }
+    - { regexp: "mysql.password=", line: "mysql.password={{ mysql_root_password }}" }
+    - { regexp: "index.hdfs.output=", line: "index.hdfs.output={{ metron_hdfs_output_dir }}/enrichment/indexed" }
+    - { regexp: "bolt.hdfs.rotation.policy=", line: "bolt.hdfs.rotation.policy={{ metron_hdfs_rotation_policy }}" }
+    - { regexp: "bolt.hdfs.rotation.policy.count=", line: "bolt.hdfs.rotation.policy.count={{ metron_hdfs_rotation_policy_count}}" }
+    - { regexp: "bolt.hdfs.rotation.policy.units=", line: "bolt.hdfs.rotation.policy.units={{ metron_hdfs_rotation_policy_units }}" }
+
+- name: Configure Metron Elasticsearch topology
+  lineinfile: >
+    dest={{ metron_elasticsearch_properties_config_path }}
+    regexp="{{ item.regexp }}"
+    line="{{ item.line }}"
+  with_items:
+    - { regexp: "kafka.zk=", line: "kafka.zk={{ zookeeper_url }}" }
+    - { regexp: "kafka.broker=", line: "kafka.broker={{ kafka_broker_url }}" }
+    - { regexp: "es.ip=", line: "es.ip={{ groups.search[0] }}" }
+    - { regexp: "es.port=", line: "es.port={{ elasticsearch_transport_port }}" }
+    - { regexp: "es.clustername=", line: "es.clustername={{ elasticsearch_cluster_name }}" }
+    - { regexp: "bolt.hdfs.file.system.url=", line: "bolt.hdfs.file.system.url={{ hdfs_url }}" }
+    - { regexp: "spout.kafka.topic.pcap=", line: "spout.kafka.topic.pcap={{ pycapa_topic }}" }
+    - { regexp: "spout.kafka.topic.bro=", line: "spout.kafka.topic.bro={{ bro_topic }}" }
+    - { regexp: "bolt.hbase.table.name=", line: "bolt.hbase.table.name={{ pcap_hbase_table }}" }
+    - { regexp: "threat.intel.tracker.table=", line: "threat.intel.tracker.table={{ tracker_hbase_table }}" }
+    - { regexp: "threat.intel.tracker.cf=", line: "threat.intel.tracker.cf=t" }
+    - { regexp: "threat.intel.simple.hbase.table=", line: "threat.intel.simple.hbase.table={{ threatintel_hbase_table }}" }
+    - { regexp: "threat.intel.simple.hbase.cf=", line: "threat.intel.simple.hbase.cf=t" }
+    - { regexp: "enrichment.simple.hbase.table=", line: "enrichment.simple.hbase.table={{ enrichment_hbase_table }}" }
+    - { regexp: "enrichment.simple.hbase.cf=", line: "enrichment.simple.hbase.cf=t" }
+    - { regexp: "mysql.ip=", line: "mysql.ip={{ groups.mysql[0] }}" }
+    - { regexp: "mysql.password=", line: "mysql.password={{ mysql_root_password }}" }
+    - { regexp: "index.hdfs.output=", line: "index.hdfs.output={{ metron_hdfs_output_dir }}/enrichment/indexed" }
+    - { regexp: "bolt.hdfs.rotation.policy=", line: "bolt.hdfs.rotation.policy={{ metron_hdfs_rotation_policy }}" }
+    - { regexp: "bolt.hdfs.rotation.policy.count=", line: "bolt.hdfs.rotation.policy.count={{ metron_hdfs_rotation_policy_count}}" }
+    - { regexp: "bolt.hdfs.rotation.policy.units=", line: "bolt.hdfs.rotation.policy.units={{ metron_hdfs_rotation_policy_units }}" }
+
+- include: source_config.yml
+  run_once: true
+
+- include: threat_intel.yml
+  run_once: true
+  when: threat_intel_bulk_load == True
+
+- include: metron_topology.yml
+
+- include: hdfs_purge.yml
+
+- include: es_purge.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/tasks/metron_topology.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/tasks/metron_topology.yml b/metron-deployment/roles/metron_streaming/tasks/metron_topology.yml
new file mode 100644
index 0000000..3d64f2b
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/tasks/metron_topology.yml
@@ -0,0 +1,33 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Submit Metron Parser topologies
+  command: storm jar {{ metron_directory }}/lib/{{ metron_parsers_jar_name }} org.apache.storm.flux.Flux  --filter {{ metron_parsers_properties_config_path }} --remote {{ item }}
+  with_items:
+      - "{{ storm_parser_topologies }}"
+
+- name: Submit Solr Metron Enrichment topology
+  command: storm jar {{ metron_directory }}/lib/{{ metron_solr_jar_name }} org.apache.storm.flux.Flux  --filter {{ metron_solr_properties_config_path }} --remote {{ item }}
+  with_items:
+      - "{{ storm_enrichment_topology }}"
+  when: install_solr | default(False) == True
+
+- name: Submit Elasticsearch Metron Enrichment topology
+  command: storm jar {{ metron_directory }}/lib/{{ metron_elasticsearch_jar_name }} org.apache.storm.flux.Flux  --filter {{ metron_elasticsearch_properties_config_path }} --remote {{ item }}
+  with_items:
+      - "{{ storm_enrichment_topology }}"
+  when: install_elasticsearch | default(False) == True

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/tasks/source_config.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/tasks/source_config.yml b/metron-deployment/roles/metron_streaming/tasks/source_config.yml
new file mode 100644
index 0000000..897d0f1
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/tasks/source_config.yml
@@ -0,0 +1,51 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Create Source Config Directory
+  file:
+    path: "{{ zookeeper_config_path }}"
+    state: directory
+
+- name: Copy Elasticsearch Global Config File
+  template:
+    src: "templates/config/elasticsearch.global.json"
+    dest: "{{ zookeeper_global_config_path }}"
+    mode: 0644
+  when: install_elasticsearch | default(False) == True
+
+- name: Copy Solr Global Config File
+  template:
+    src: "../roles/metron_streaming/templates/config/solr.global.json"
+    dest: "{{ zookeeper_global_config_path }}"
+    mode: 0644
+  when: install_solr | default(False) == True
+
+- name: Copy Sensor Config Files
+  copy:
+    src: "{{ item }}"
+    dest: "{{ zookeeper_config_path }}"
+    mode: 0644
+  with_items:
+    - ../roles/metron_streaming/files/config/
+
+- name: Load Config
+  shell: "{{ metron_directory }}/scripts/zk_load_configs.sh -p {{ zookeeper_config_path }} -z {{ zookeeper_url }} && touch {{ zookeeper_config_path }}/configured"
+
+
+
+
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/tasks/threat_intel.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/tasks/threat_intel.yml b/metron-deployment/roles/metron_streaming/tasks/threat_intel.yml
new file mode 100644
index 0000000..f1b7534
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/tasks/threat_intel.yml
@@ -0,0 +1,46 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+
+- name: Create Bulk load working Directory
+  file:
+    path: "{{ threat_intel_work_dir }}"
+    state: directory
+
+- name: Copy extractor.json to {{ inventory_hostname }}
+  copy:
+    src: ../roles/metron_streaming/files/extractor.json
+    dest: "{{  threat_intel_work_dir }}"
+    mode: 0644
+
+- name: Copy Bulk Load CSV File
+  template:
+    src: "{{ threat_intel_csv_filepath }}"
+    dest: "{{ threat_intel_work_dir }}/{{ threat_intel_csv_filename }}"
+    mode: 0644
+
+- name: Copy Bulk Load CSV File to HDFS
+  command: "hdfs dfs -put -f {{ threat_intel_work_dir }}/{{ threat_intel_csv_filename }} ."
+
+- name: Run Threat Intel Bulk Load
+  shell: "{{ threat_intel_bin }} -f t --table {{threatintel_hbase_table}} -e {{ threat_intel_work_dir }}/extractor.json  -i /user/root && touch {{ threat_intel_work_dir }}/loaded"
+  args:
+    creates: "{{ threat_intel_work_dir }}/loaded"
+
+- name: Clean up HDFS File
+  command: "hdfs dfs -rm {{ threat_intel_csv_filename }}"
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/templates/config/elasticsearch.global.json
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/templates/config/elasticsearch.global.json b/metron-deployment/roles/metron_streaming/templates/config/elasticsearch.global.json
new file mode 100644
index 0000000..8177102
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/templates/config/elasticsearch.global.json
@@ -0,0 +1,6 @@
+{
+  "es.clustername": "{{ elasticsearch_cluster_name }}",
+  "es.ip": "{{ groups.search[0] }}",
+  "es.port": "{{ elasticsearch_transport_port }}",
+  "es.date.format": "yyyy.MM.dd.HH"
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/templates/config/solr.global.json
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/templates/config/solr.global.json b/metron-deployment/roles/metron_streaming/templates/config/solr.global.json
new file mode 100644
index 0000000..5cb7a4d
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/templates/config/solr.global.json
@@ -0,0 +1,6 @@
+{
+  "solr.zookeeper": "{{ zookeeper_url }}",
+  "solr.collection": "{{ solr_collection_name }}",
+  "solr.numShards": {{ solr_number_shards }},
+  "solr.replicationFactor": {{ solr_replication_factor }}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/templates/threat_ip.csv
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/templates/threat_ip.csv b/metron-deployment/roles/metron_streaming/templates/threat_ip.csv
new file mode 100644
index 0000000..3ac38f3
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/templates/threat_ip.csv
@@ -0,0 +1,37 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#Add single column of ip address to alert
+#Public lists are available on the internet
+# example: 
+23.113.113.105
+24.107.205.249
+24.108.62.255
+24.224.153.71
+27.4.1.212
+27.131.149.102
+31.24.30.31
+31.131.251.33
+31.186.99.250
+31.192.209.119
+31.192.209.150
+31.200.244.17
+37.34.52.185
+37.58.112.101
+37.99.146.27
+37.128.132.96
+37.140.195.177
+37.140.199.100

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_ui/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_ui/defaults/main.yml b/metron-deployment/roles/metron_ui/defaults/main.yml
new file mode 100644
index 0000000..23aed40
--- /dev/null
+++ b/metron-deployment/roles/metron_ui/defaults/main.yml
@@ -0,0 +1,21 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+metron_version: 0.1BETA
+metron_directory: /usr/metron/{{ metron_version }}
+metron_ui_directory: "{{ metron_directory }}/metron-ui"
+metron_temp_archive: /tmp/metron-ui.tar.gz

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_ui/tasks/copy-source.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_ui/tasks/copy-source.yml b/metron-deployment/roles/metron_ui/tasks/copy-source.yml
new file mode 100644
index 0000000..703b7f9
--- /dev/null
+++ b/metron-deployment/roles/metron_ui/tasks/copy-source.yml
@@ -0,0 +1,46 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Verify {{ metron_ui_directory }} exists
+  file:
+    path: "{{ metron_ui_directory }}"
+    state: directory
+    mode: 0755
+    owner: root
+    group: root
+
+- name: Archive metron-ui on localhost
+  shell: tar --exclude='./node_modules' -czf {{ metron_temp_archive }} .
+  args:
+    chdir: "{{ playbook_dir }}/../../metron-ui"
+    creates: "{{ metron_temp_archive }}"
+    warn: false    #Warns to use unarchive - unarchive does not archive
+  become: false
+  delegate_to: localhost
+  run_once: true
+
+- name: Extract metron-ui tarball
+  unarchive:
+    src: "{{ metron_temp_archive }}"
+    dest: "{{ metron_ui_directory }}"
+    creates: "{{ metron_ui_directory}}/config"
+
+- name: Delete {{ metron_temp_archive }}
+  local_action: file path="{{ metron_temp_archive }}" state=absent
+  become: false
+  run_once: true
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_ui/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_ui/tasks/main.yml b/metron-deployment/roles/metron_ui/tasks/main.yml
new file mode 100644
index 0000000..fd3422b
--- /dev/null
+++ b/metron-deployment/roles/metron_ui/tasks/main.yml
@@ -0,0 +1,56 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Install Metron UI dependencies
+  yum:
+    pkg: "{{ item }}"
+    state: installed
+  with_items:
+      - libpcap-devel
+      - wireshark
+      - nodejs
+      - npm
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10
+
+- include: copy-source.yml
+
+- name: Configure Metron UI
+  lineinfile:
+    dest="{{ metron_ui_directory }}/config.json"
+    regexp="{{ item.regexp }}"
+    line="{{ item.line }}"
+    state=present
+  with_items:
+    - { regexp: '"elasticsearch":', line: '"elasticsearch": { "url": "http://{{ groups.search[0] }}:{{ elasticsearch_web_port }}" },' }
+    - { regexp: '"pcap":', line: '  "pcap": { "url": "http://{{ groups.web[0] }}:{{ pcapservice_port }}/pcapGetter","mock": false }' }
+
+- name: Install Node dependencies
+  npm:
+    name: pm2
+    path: "{{ metron_ui_directory }}"
+    global: true
+
+- name: Install Metron UI
+  npm:
+    path: "{{ metron_ui_directory }}"
+    production: no
+
+- name: Start Metron UI
+  shell: "pm2 start {{ metron_ui_directory }}/lib/metron-ui.js --name metron"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/mysql_client/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/mysql_client/tasks/main.yml b/metron-deployment/roles/mysql_client/tasks/main.yml
new file mode 100644
index 0000000..8c54c23
--- /dev/null
+++ b/metron-deployment/roles/mysql_client/tasks/main.yml
@@ -0,0 +1,35 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+
+- name: Get Default mysql passowrd
+  include_vars: "../roles/mysql_server/defaults/main.yml"
+  when: mysql_root_password is undefined
+
+- name: Allow remote login to mysql
+  template:
+    src: "../roles/mysql_client/templates/db_config.sql"
+    dest: "/tmp/{{ansible_fqdn}}.sql"
+  delegate_to: "{{ groups.mysql[0] }}"
+
+- name: Import DB_Config
+  mysql_db:
+    name: "all"
+    state: "import"
+    target: "/tmp/{{ansible_fqdn}}.sql"
+  ignore_errors: True
+  delegate_to: "{{ groups.mysql[0] }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/mysql_client/templates/db_config.sql
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/mysql_client/templates/db_config.sql b/metron-deployment/roles/mysql_client/templates/db_config.sql
new file mode 100644
index 0000000..c407a13
--- /dev/null
+++ b/metron-deployment/roles/mysql_client/templates/db_config.sql
@@ -0,0 +1,21 @@
+/*
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ */
+
+CREATE USER 'root'@'{{ ansible_fqdn }}' IDENTIFIED BY '{{ mysql_root_password }}';
+SET PASSWORD FOR 'root'@'{{ ansible_fqdn }}' = PASSWORD('{{ mysql_root_password }}');
+GRANT ALL PRIVILEGES ON *.* to 'root'@'{{ ansible_fqdn }}' WITH GRANT OPTION;
+FLUSH PRIVILEGES;

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/mysql_server/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/mysql_server/defaults/main.yml b/metron-deployment/roles/mysql_server/defaults/main.yml
new file mode 100644
index 0000000..0acbd17
--- /dev/null
+++ b/metron-deployment/roles/mysql_server/defaults/main.yml
@@ -0,0 +1,20 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+mysql_rpm_version: mysql57-community-release-el6-7.noarch
+mysql_yum_repo_url: https://dev.mysql.com/get/{{ mysql_rpm_version }}.rpm
+mysql_root_password: P@ssw0rd

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/mysql_server/files/geoip_ddl.sql
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/mysql_server/files/geoip_ddl.sql b/metron-deployment/roles/mysql_server/files/geoip_ddl.sql
new file mode 100644
index 0000000..02616c6
--- /dev/null
+++ b/metron-deployment/roles/mysql_server/files/geoip_ddl.sql
@@ -0,0 +1,49 @@
+/*
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ */
+CREATE DATABASE IF NOT EXISTS GEO;
+
+USE GEO;
+
+DROP TABLE IF EXISTS `blocks`;
+CREATE TABLE  `blocks` ( `startIPNum` int(10) unsigned NOT NULL,`endIPNum` int(10) unsigned NOT NULL,`locID`
+int(10) unsigned NOT NULL, PRIMARY KEY  (`startIPNum`,`endIPNum`) )
+ENGINE=MyISAM DEFAULT CHARSET=latin1 PACK_KEYS=1 DELAY_KEY_WRITE=1;
+
+DROP TABLE IF EXISTS `location`;
+CREATE TABLE  `location` (`locID` int(10) unsigned NOT NULL,`country` char(2) default NULL,`region` char(2)
+ default NULL,`city` varchar(45) default NULL,`postalCode` char(7) default NULL,`latitude` double default
+NULL,`longitude` double default NULL,`dmaCode` char(3) default NULL,`areaCode` char(3) default NULL,PRIMARY KEY
+  (`locID`),KEY `Index_Country` (`country`) ) ENGINE=MyISAM DEFAULT CHARSET=latin1 ROW_FORMAT=FIXED;
+
+load data infile '/var/lib/mysql-files/GeoLiteCity-Blocks.csv'  into table `blocks`  fields terminated by ',' optionally enclosed by '"'  lines terminated by '\n' ignore 2 lines;
+load data infile '/var/lib/mysql-files/GeoLiteCity-Location.csv'  into table `location`  fields terminated by ',' optionally enclosed by '"'  lines terminated by '\n' ignore 2 lines;
+
+
+DELIMITER $$
+DROP FUNCTION IF EXISTS `IPTOLOCID` $$
+CREATE FUNCTION `IPTOLOCID`( ip VARCHAR(15)) RETURNS int(10) unsigned
+  BEGIN
+    DECLARE ipn INTEGER UNSIGNED;
+    DECLARE locID_var INTEGER;
+    IF ip LIKE '192.168.%' OR ip LIKE '10.%' THEN RETURN 0;
+    END IF;
+    SET ipn = INET_ATON(ip);
+    SELECT locID INTO locID_var FROM `blocks` INNER JOIN (SELECT MAX(startIPNum) AS start FROM `blocks` WHERE startIPNum <= ipn) AS s ON (startIPNum = s.start) WHERE endIPNum >= ipn;
+    RETURN locID_var;
+  END
+$$
+DELIMITER ;

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/mysql_server/handlers/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/mysql_server/handlers/main.yml b/metron-deployment/roles/mysql_server/handlers/main.yml
new file mode 100644
index 0000000..112c5ca
--- /dev/null
+++ b/metron-deployment/roles/mysql_server/handlers/main.yml
@@ -0,0 +1,19 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: restart elasticsearch
+  service: name=elasticsearch state=restarted

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/mysql_server/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/mysql_server/tasks/main.yml b/metron-deployment/roles/mysql_server/tasks/main.yml
new file mode 100644
index 0000000..a484ed0
--- /dev/null
+++ b/metron-deployment/roles/mysql_server/tasks/main.yml
@@ -0,0 +1,93 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Create temporary directories
+  file:
+    path: "/tmp/geoip"
+    state: directory
+    mode: 0755
+
+- name: Install Mysql Community Release Repo Def
+  get_url:
+    dest: /tmp/{{ mysql_rpm_version }}.rpm
+    url: "{{ mysql_yum_repo_url }}"
+
+- name: Install Mysql Community Release Repo
+  yum:
+    pkg: /tmp/{{ mysql_rpm_version }}.rpm
+    state: installed
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10
+
+- name: Install MySQL
+  yum:
+    name: "{{ item }}"
+    state: latest
+  with_items:
+    - "mysql-community-server"
+    - "MySQL-python"
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10
+
+- name: Start MySQL
+  service:
+    name: mysqld
+    state: started
+    enabled: yes
+
+- name: Retrieve temporary root password
+  shell: "grep 'temporary password' /var/log/mysqld.log | sed 's/.*root@localhost: //'"
+  args:
+    creates: ~/.my.cnf
+  register: temp_root_password
+
+- name: Update mysql root password
+  command: "mysqladmin --user=root --password='{{ temp_root_password.stdout }}' password '{{ mysql_root_password }}'"
+  ignore_errors: yes
+  args:
+    creates: ~/.my.cnf
+
+- name: Create .my.cnf
+  template:
+    src: "../roles/mysql_server/templates/.my.cnf"
+    dest: ~/.my.cnf
+
+
+- name: Download GeoIP databases
+  unarchive:
+    src:  http://geolite.maxmind.com/download/geoip/database/GeoLiteCity_CSV/GeoLiteCity-latest.tar.xz
+    dest: /tmp/geoip
+    copy: no
+    creates: /tmp/geopip/*/GeoLiteCity-Blocks.csv
+
+- name: Copy to MySQL import directory
+  shell: "cp /tmp/geoip/*/*.csv /var/lib/mysql-files/"
+
+- name: Copy DDL
+  copy:
+    src: geoip_ddl.sql
+    dest: /tmp/geoip_ddl.sql
+
+- name: Import GeoIP DDL
+  mysql_db:
+    name: all
+    state: import
+    target: /tmp/geoip_ddl.sql

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/mysql_server/templates/.my.cnf
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/mysql_server/templates/.my.cnf b/metron-deployment/roles/mysql_server/templates/.my.cnf
new file mode 100644
index 0000000..d5c0825
--- /dev/null
+++ b/metron-deployment/roles/mysql_server/templates/.my.cnf
@@ -0,0 +1,20 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+[client]
+user=root
+password={{ mysql_root_password }}
+host=localhost
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ntp/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ntp/tasks/main.yml b/metron-deployment/roles/ntp/tasks/main.yml
new file mode 100644
index 0000000..7b1b9a8
--- /dev/null
+++ b/metron-deployment/roles/ntp/tasks/main.yml
@@ -0,0 +1,31 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Install ntp
+  yum:
+    name: ntp
+    state: present
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10
+
+- name: Ensure ntp is running and enabled
+  service:
+    name: ntpd
+    state: started
+    enabled: yes

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/packet-capture/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/packet-capture/defaults/main.yml b/metron-deployment/roles/packet-capture/defaults/main.yml
new file mode 100644
index 0000000..3e6358c
--- /dev/null
+++ b/metron-deployment/roles/packet-capture/defaults/main.yml
@@ -0,0 +1,32 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+# dpdk
+dpdk_home: "/usr/local/dpdk"
+dpdk_version: "2.2.0"
+dpdk_sdk: "/root/dpdk-{{ dpdk_version }}"
+dpdk_target: "x86_64-native-linuxapp-gcc"
+num_huge_pages: 512
+extra_cflags: -g
+
+# pcapture
+pcapture_work_dir: /root/packet-capture
+pcapture_prefix: /usr/local/bin
+pcapture_ld_library_path: /usr/local/lib
+pcapture_portmask: 0x01
+pcapture_kafka_config: /etc/pcapture.conf
+pcapture_bin: pcapture

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/packet-capture/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/packet-capture/meta/main.yml b/metron-deployment/roles/packet-capture/meta/main.yml
new file mode 100644
index 0000000..d253e88
--- /dev/null
+++ b/metron-deployment/roles/packet-capture/meta/main.yml
@@ -0,0 +1,19 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - librdkafka

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/packet-capture/tasks/debug.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/packet-capture/tasks/debug.yml b/metron-deployment/roles/packet-capture/tasks/debug.yml
new file mode 100644
index 0000000..06f1526
--- /dev/null
+++ b/metron-deployment/roles/packet-capture/tasks/debug.yml
@@ -0,0 +1,26 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+  - name: Install debug utilities
+    yum: name=yum-utils
+    tags:
+      - debug
+
+  - name: Install debug symbols
+    shell: debuginfo-install -y glibc glib2 zlib
+    tags:
+      - debug

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/packet-capture/tasks/dependencies.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/packet-capture/tasks/dependencies.yml b/metron-deployment/roles/packet-capture/tasks/dependencies.yml
new file mode 100644
index 0000000..4d6edc4
--- /dev/null
+++ b/metron-deployment/roles/packet-capture/tasks/dependencies.yml
@@ -0,0 +1,38 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+  - name: Install dependencies
+    yum: name={{ item }}
+    with_items:
+      - "@Development tools"
+      - pciutils
+      - net-tools
+      - glib2
+      - glib2-devel
+      - git
+
+  #
+  # install prerequisite packages and the latest kernel headers.  need to
+  # ensure that the kernel headers match the current running kernel version.
+  # if this is not the case, the DPDK build process will fail
+  #
+  - name: Install latest kernel headers and source
+    yum: name={{ item }} state=latest
+    with_items:
+      - kernel
+      - kernel-devel
+      - kernel-headers

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/packet-capture/tasks/dpdk.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/packet-capture/tasks/dpdk.yml b/metron-deployment/roles/packet-capture/tasks/dpdk.yml
new file mode 100644
index 0000000..3780be7
--- /dev/null
+++ b/metron-deployment/roles/packet-capture/tasks/dpdk.yml
@@ -0,0 +1,59 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+  - name: "Download DPDK version {{ dpdk_version }}"
+    unarchive:
+      src: "http://dpdk.org/browse/dpdk/snapshot/dpdk-{{ dpdk_version }}.tar.gz"
+      dest: "/root"
+      creates: "{{ dpdk_sdk }}"
+      copy: no
+
+  - name: "Configure DPDK for the target environment: {{ dpdk_target }}"
+    shell: "make config T={{ dpdk_target }} DESTDIR={{ dpdk_home }}"
+    args:
+      chdir: "{{ dpdk_sdk }}"
+      creates: "{{ dpdk_home }}"
+
+  - name: "Turn on debug flags"
+    lineinfile:
+      dest: "{{ dpdk_sdk }}/config/common_linuxapp"
+      regexp: 'DEBUG=n'
+      line: 'DEBUG=y'
+    tags:
+      - debug
+
+  - name: "Build DPDK for the target environment: {{ dpdk_target }}"
+    shell: "make install T={{ dpdk_target }} DESTDIR={{ dpdk_home }} EXTRA_CFLAGS={{ extra_cflags }}"
+    args:
+      chdir: "{{ dpdk_sdk }}"
+      creates: "{{ dpdk_home }}"
+
+  - name: Load kernel modules to enable userspace IO
+    shell: "{{ item }}"
+    with_items:
+      - modprobe uio_pci_generic
+      - modprobe vfio-pci
+
+  - name: Bind the device to the loaded kernel module(s)
+    shell: "{{ dpdk_home }}/sbin/dpdk_nic_bind --force --bind=uio_pci_generic {{ item }}"
+    with_items: "{{ dpdk_device }}"
+
+  - name: Set useful environment variables
+    lineinfile: "dest=/root/.bash_profile line={{ item }}"
+    with_items:
+      - "export RTE_SDK={{ dpdk_sdk }}"
+      - "export RTE_TARGET={{ dpdk_target }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/packet-capture/tasks/kernel.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/packet-capture/tasks/kernel.yml b/metron-deployment/roles/packet-capture/tasks/kernel.yml
new file mode 100644
index 0000000..cd4abe6
--- /dev/null
+++ b/metron-deployment/roles/packet-capture/tasks/kernel.yml
@@ -0,0 +1,51 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+#
+# DPDK requires specific kernel boot parameters.  set the params and reboot
+# the host, if the actual params differ from what is expected.
+#
+---
+  - set_fact:
+      expected_kernel_params: "default_hugepagesz=1G hugepagesz=1G hugepages={{ num_huge_pages }} iommu=pt intel_iommu=on"
+
+  - name: Check kernel boot parameters
+    shell: "cat /proc/cmdline"
+    register: actual_kernel_params
+
+  - name: Alter kernel boot parameters
+    lineinfile:
+      dest: /etc/default/grub
+      regexp:  '^(GRUB_CMDLINE_LINUX=\"[^\"]+)\"$'
+      line: '\1 {{ expected_kernel_params }}"'
+      backrefs: yes
+    when: not expected_kernel_params in actual_kernel_params.stdout
+
+  - name: Update grub with kernel boot parameters
+    shell: /sbin/grub2-mkconfig -o /boot/grub2/grub.cfg
+    when: not expected_kernel_params in actual_kernel_params.stdout
+
+  - name: Restart for modified kernel params
+    command: shutdown -r now "modified kernel params"
+    async: 0
+    poll: 0
+    ignore_errors: true
+    when: not expected_kernel_params in actual_kernel_params.stdout
+    
+  - name: Wait for reboot of '{{ inventory_hostname }}'
+    local_action: wait_for host={{ inventory_hostname }} state=started port=22 timeout=300 delay=10
+    become: false
+    when: not expected_kernel_params in actual_kernel_params.stdout

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/packet-capture/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/packet-capture/tasks/main.yml b/metron-deployment/roles/packet-capture/tasks/main.yml
new file mode 100644
index 0000000..f096178
--- /dev/null
+++ b/metron-deployment/roles/packet-capture/tasks/main.yml
@@ -0,0 +1,22 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+  - include: dependencies.yml
+  - include: kernel.yml
+  - include: dpdk.yml
+  - include: pcapture.yml
+  - include: debug.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/packet-capture/tasks/pcapture.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/packet-capture/tasks/pcapture.yml b/metron-deployment/roles/packet-capture/tasks/pcapture.yml
new file mode 100644
index 0000000..d00d379
--- /dev/null
+++ b/metron-deployment/roles/packet-capture/tasks/pcapture.yml
@@ -0,0 +1,49 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Distribute pcapture
+  copy: src=../../../metron-sensors/packet-capture dest={{ pcapture_work_dir | dirname }} mode=0755
+
+- name: Build pcapture
+  shell: "{{ item }}"
+  args:
+    chdir: "{{ pcapture_work_dir }}"
+  with_items:
+    - make
+  environment:
+    RTE_SDK: "{{ dpdk_sdk }}"
+    RTE_TARGET: "{{ dpdk_target }}"
+    LD_LIBRARY_PATH: "{{ pcapture_ld_library_path }}"
+
+- name: Install pcapture
+  shell: "cp {{ pcapture_work_dir }}/src/build/app/{{ pcapture_bin }} {{ pcapture_prefix }}"
+  args:
+    chdir: "{{ pcapture_work_dir }}"
+    creates: "{{ pcapture_prefix }}/{{ pcapture_bin }}"
+
+- name: Deploy configuration
+  template: src=pcapture.conf dest={{ pcapture_kafka_config }} mode=0755
+
+- name: Deploy service
+  template: src=pcapture dest=/etc/init.d/ mode=0755
+
+- name: Register the service with systemd
+  shell: systemctl enable pcapture
+  when: ansible_distribution == "CentOS" and ansible_distribution_major_version == "7"
+
+- name: Run pcapture
+  service: name=pcapture state=restarted

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/packet-capture/templates/pcapture
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/packet-capture/templates/pcapture b/metron-deployment/roles/packet-capture/templates/pcapture
new file mode 100644
index 0000000..8c2221a
--- /dev/null
+++ b/metron-deployment/roles/packet-capture/templates/pcapture
@@ -0,0 +1,93 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# pcapture daemon
+# chkconfig: 345 20 80
+# description: Packet capture probe
+# processname: pcapture
+#
+
+export RTE_SDK="{{ dpdk_sdk }}"
+export RTE_TARGET="{{ dpdk_target }}"
+export LD_LIBRARY_PATH="{{ pcapture_ld_library_path }}"
+
+DAEMON_PATH="{{ dpdk_sdk }}"
+DAEMON="{{ pcapture_prefix }}/{{ pcapture_bin }}"
+DAEMONOPTS+=" -- "
+DAEMONOPTS+="-p {{ pcapture_portmask }} "
+DAEMONOPTS+="-t {{ pcapture_topic }} "
+DAEMONOPTS+="-c {{ pcapture_kafka_config }} "
+
+NAME="pcapture"
+DESC="Metron network packet capture probe"
+PIDFILE=/var/run/$NAME.pid
+SCRIPTNAME=/etc/init.d/$NAME
+DAEMONLOG=/var/log/$NAME.log
+NOW=`date`
+
+case "$1" in
+  start)
+    printf "%-50s" "Starting $NAME..."
+    echo "$NOW:  Starting $NAME..." >> $DAEMONLOG
+    cd $DAEMON_PATH
+    PID=`$DAEMON $DAEMONOPTS >> $DAEMONLOG 2>&1 & echo $!`
+    if [ -z $PID ]; then
+        printf "%s\n" "Fail"
+    else
+        echo $PID > $PIDFILE
+        printf "%s\n" "Ok"
+    fi
+  ;;
+
+  status)
+    printf "%-50s" "Checking $NAME..."
+    if [ -f $PIDFILE ]; then
+      PID=`cat $PIDFILE`
+      if [ -z "`ps axf | grep ${PID} | grep -v grep`" ]; then
+        printf "%s\n" "Process dead but pidfile exists"
+      else
+        echo "Running"
+      fi
+    else
+      printf "%s\n" "Service not running"
+    fi
+  ;;
+
+  stop)
+    printf "%-50s" "Stopping $NAME"
+    PID=`cat $PIDFILE`
+    cd $DAEMON_PATH
+    if [ -f $PIDFILE ]; then
+        echo "$NOW:  Stopping $NAME with pid=$PID" >> $DAEMONLOG
+        kill -HUP $PID
+        printf "%s\n" "Ok"
+        rm -f $PIDFILE
+    else
+        printf "%s\n" "pidfile not found"
+    fi
+  ;;
+
+  restart)
+    $0 stop
+    $0 start
+  ;;
+
+  *)
+    echo "Usage: $0 {status|start|stop|restart}"
+    exit 1
+esac

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/packet-capture/templates/pcapture.conf
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/packet-capture/templates/pcapture.conf b/metron-deployment/roles/packet-capture/templates/pcapture.conf
new file mode 100644
index 0000000..e404476
--- /dev/null
+++ b/metron-deployment/roles/packet-capture/templates/pcapture.conf
@@ -0,0 +1,67 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+#
+# kafka global settings
+#
+[kafka-global]
+
+# initial list of kafka brokers
+metadata.broker.list = {{ kafka_broker_url }}
+
+# identifies the client to kafka
+client.id = metron-packet-capture
+
+# max number of messages allowed on the producer queue
+queue.buffering.max.messages = 1000
+
+# maximum time, in milliseconds, for buffering data on the producer queue
+queue.buffering.max.ms = 3000
+
+# compression codec = none, gzip or snappy
+compression.codec = snappy
+
+# maximum number of messages batched in one MessageSet (increase for better compression)
+batch.num.messages = 10
+
+# max times to retry sending a failed message set
+message.send.max.retries = 5
+
+# backoff time before retrying a message send
+retry.backoff.ms = 250
+
+# how often statistics are emitted; 0 = never
+statistics.interval.ms = 0
+
+# only provide delivery reports for failed messages
+delivery.report.only.error = false
+
+#
+# kafka topic settings
+#
+[kafka-topic]
+
+# broker acks { 1 = leader ack, 0 = no acks, -1 = in sync replica ack }
+request.required.acks = 1
+
+# local message timeout. This value is only enforced locally and limits the time a
+# produced message waits for successful delivery. A time of 0 is infinite.
+message.timeout.ms = 10000
+
+# report offset of produced message back to application. The application must be
+# use the dr_msg_cb to retrieve the offset from rd_kafka_message_t.offset
+produce.offset.report = false

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/pcap_replay/README.md
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/pcap_replay/README.md b/metron-deployment/roles/pcap_replay/README.md
new file mode 100644
index 0000000..8bc92c9
--- /dev/null
+++ b/metron-deployment/roles/pcap_replay/README.md
@@ -0,0 +1,44 @@
+Pcap Replay
+===========
+
+This project enables packet capture data to be replayed through a network interface to simulate live network traffic.  This can be used to support functional, performance, and load testing of Apache Metron.
+
+Getting Started
+---------------
+
+To replay packet capture data, simply start the `pcap-replay` SysV service.  To do this run the following command.
+
+```
+service pcap-replay start
+```
+
+All additional options accepted by `tcpreplay` can be passed to the service script to modify how the network data is replayed.  For example, this makes it simple to control the amount and rate of data replayed during functional, performance and load testing.
+
+Example: Replay data at a rate of 10 mbps.
+
+```
+service pcap-replay start --mbps 10
+```
+
+Example: Replay data at a rate of 10 packets per second.
+
+```
+service pcap-replay start --pps 10
+```
+
+All nodes on the same subnet with their network interface set to promiscuous mode will then be able to capture the network traffic being replayed.  To validate, simply run something like the following.
+
+```
+tcpdump -i eth1
+```
+
+Data
+----
+
+An example packet capture file has been installed at `/opt/pcap-replay/example.pcap`.  By default, the network traffic contained within this file is continually replayed.   
+
+To replay your own packet capture data, simply add any number of files containing `libpcap` formatted packet capture data to `/opt/pcap-replay`.  The files must end with the `.pcap` extension.  To pick up newly installed files, simply restart the service.
+
+```
+service pcap-replay restart
+```

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/pcap_replay/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/pcap_replay/defaults/main.yml b/metron-deployment/roles/pcap_replay/defaults/main.yml
new file mode 100644
index 0000000..b1fae1e
--- /dev/null
+++ b/metron-deployment/roles/pcap_replay/defaults/main.yml
@@ -0,0 +1,21 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+pcap_replay_interface: eth0
+pcap_path: /opt/pcap-replay
+tcpreplay_version: 4.1.1
+tcpreplay_prefix: /opt

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/pcap_replay/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/pcap_replay/meta/main.yml b/metron-deployment/roles/pcap_replay/meta/main.yml
new file mode 100644
index 0000000..0c47853
--- /dev/null
+++ b/metron-deployment/roles/pcap_replay/meta/main.yml
@@ -0,0 +1,21 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - libselinux-python
+  - build-tools
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/pcap_replay/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/pcap_replay/tasks/main.yml b/metron-deployment/roles/pcap_replay/tasks/main.yml
new file mode 100644
index 0000000..06919ed
--- /dev/null
+++ b/metron-deployment/roles/pcap_replay/tasks/main.yml
@@ -0,0 +1,19 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- include: tcpreplay.yml
+- include: service.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/pcap_replay/tasks/service.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/pcap_replay/tasks/service.yml b/metron-deployment/roles/pcap_replay/tasks/service.yml
new file mode 100644
index 0000000..9e13e7f
--- /dev/null
+++ b/metron-deployment/roles/pcap_replay/tasks/service.yml
@@ -0,0 +1,22 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Create pcap directory
+  file: path={{ pcap_path }} state=directory mode=0755
+
+- name: Install init.d service script
+  template: src=pcap-replay dest=/etc/init.d/pcap-replay mode=0755

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/pcap_replay/tasks/tcpreplay.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/pcap_replay/tasks/tcpreplay.yml b/metron-deployment/roles/pcap_replay/tasks/tcpreplay.yml
new file mode 100644
index 0000000..e24dcf1
--- /dev/null
+++ b/metron-deployment/roles/pcap_replay/tasks/tcpreplay.yml
@@ -0,0 +1,38 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Download tcpreplay
+  get_url:
+    url: "https://github.com/appneta/tcpreplay/releases/download/v{{ tcpreplay_version }}/tcpreplay-{{ tcpreplay_version }}.tar.gz"
+    dest: "/tmp/tcpreplay-{{ tcpreplay_version }}.tar.gz"
+
+- name: Extract tcpreplay tarball
+  unarchive:
+    src: "/tmp/tcpreplay-{{ tcpreplay_version }}.tar.gz"
+    dest: /opt
+    copy: no
+    creates: "/opt/tcpreplay-{{ tcpreplay_version }}"
+
+- name: Compile and install tcpreplay
+  shell: "{{ item }}"
+  args:
+    chdir: "/opt/tcpreplay-{{ tcpreplay_version }}"
+    creates: "{{ tcpreplay_prefix }}/bin/tcpreplay"
+  with_items:
+    - "./configure --prefix={{ tcpreplay_prefix }}"
+    - make
+    - make install

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/pcap_replay/templates/pcap-replay
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/pcap_replay/templates/pcap-replay b/metron-deployment/roles/pcap_replay/templates/pcap-replay
new file mode 100644
index 0000000..b9ae0c3
--- /dev/null
+++ b/metron-deployment/roles/pcap_replay/templates/pcap-replay
@@ -0,0 +1,92 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# pcap replay daemon
+# chkconfig: 345 20 80
+# description: Replays packet capture data stored in libpcap format
+# processname: pcap-replay
+#
+
+DAEMON_PATH="{{ pcap_path }}"
+PCAPIN=`ls $DAEMON_PATH/*.pcap 2> /dev/null`
+IFACE="{{ pcap_replay_interface }}"
+EXTRA_ARGS="${@:2}"
+DAEMON="{{ tcpreplay_prefix }}/bin/tcpreplay"
+DAEMONOPTS="--intf1=$IFACE --loop=0 $EXTRA_ARGS $PCAPIN"
+
+NAME=pcap-replay
+DESC="Replay packet capture data"
+PIDFILE=/var/run/$NAME.pid
+SCRIPTNAME=/etc/init.d/$NAME
+
+case "$1" in
+  start)
+    printf "%-50s" "Starting $NAME..."
+
+    # ensure that a pcap file exists to replay
+    if [ -z "$PCAPIN" ]; then
+      printf "%s: %s\n" "Fail: No pcap files found at " $DAEMON_PATH
+    else
+      # kick-off the daemon
+      cd $DAEMON_PATH
+      PID=`$DAEMON $DAEMONOPTS > /dev/null 2>&1 & echo $!`
+      if [ -z $PID ]; then
+          printf "%s\n" "Fail"
+      else
+          echo $PID > $PIDFILE
+          printf "%s\n" "Ok"
+      fi
+    fi
+  ;;
+
+  status)
+    printf "%-50s" "Checking $NAME..."
+    if [ -f $PIDFILE ]; then
+      PID=`cat $PIDFILE`
+      if [ -z "`ps axf | grep ${PID} | grep -v grep`" ]; then
+        printf "%s\n" "Process dead but pidfile exists"
+      else
+        echo "Running"
+      fi
+    else
+      printf "%s\n" "Service not running"
+    fi
+  ;;
+
+  stop)
+    printf "%-50s" "Stopping $NAME"
+    PID=`cat $PIDFILE`
+    cd $DAEMON_PATH
+    if [ -f $PIDFILE ]; then
+        kill -HUP $PID
+        printf "%s\n" "Ok"
+        rm -f $PIDFILE
+    else
+        printf "%s\n" "pidfile not found"
+    fi
+  ;;
+
+  restart)
+    $0 stop
+    $0 start
+  ;;
+
+  *)
+    echo "Usage: $0 {status|start|stop|restart}"
+    exit 1
+esac

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/pycapa/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/pycapa/meta/main.yml b/metron-deployment/roles/pycapa/meta/main.yml
new file mode 100644
index 0000000..3aaa18d
--- /dev/null
+++ b/metron-deployment/roles/pycapa/meta/main.yml
@@ -0,0 +1,22 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - ambari_gather_facts
+  - epel
+  - python-pip
+  - kafka-client



[51/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88


Project: http://git-wip-us.apache.org/repos/asf/incubator-metron/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-metron/commit/0117987e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-metron/tree/0117987e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-metron/diff/0117987e

Branch: refs/heads/master
Commit: 0117987ea132ba3e44e495963a2acb482889ae1c
Parents: 86f6deb
Author: merrimanr <me...@gmail.com>
Authored: Tue Apr 26 09:44:57 2016 -0500
Committer: rmerriman <rm...@hortonworks.com>
Committed: Tue Apr 26 09:44:57 2016 -0500

----------------------------------------------------------------------
 .travis.yml                                     |     2 +-
 deployment/.gitignore                           |     2 -
 deployment/README.md                            |    97 -
 deployment/amazon-ec2/.gitignore                |     4 -
 deployment/amazon-ec2/README.md                 |   211 -
 deployment/amazon-ec2/ansible.cfg               |    28 -
 deployment/amazon-ec2/conf/defaults.yml         |    80 -
 deployment/amazon-ec2/conf/ec2.ini              |   105 -
 deployment/amazon-ec2/playbook.yml              |    80 -
 deployment/amazon-ec2/tasks/check-hosts.yml     |    20 -
 deployment/amazon-ec2/tasks/check-volume.yml    |    26 -
 deployment/amazon-ec2/tasks/create-hosts.yml    |    54 -
 deployment/amazon-ec2/tasks/create-keypair.yml  |    29 -
 .../create-open-inbound-security-group.yml      |    26 -
 .../create-open-outbound-security-group.yml     |    26 -
 .../amazon-ec2/tasks/create-security-group.yml  |    28 -
 deployment/amazon-ec2/tasks/create-vpc.yml      |    50 -
 deployment/amazon-ec2/tasks/expand-volume.yml   |    30 -
 deployment/amazon-ec2/tasks/mount-volume.yml    |    32 -
 .../amazon-ec2/tasks/provisioning-report.yml    |    35 -
 deployment/ansible.cfg                          |    23 -
 .../extra_modules/ambari_cluster_state.py       |   392 -
 .../inventory/metron_example/group_vars/all     |    77 -
 deployment/inventory/metron_example/hosts       |    63 -
 .../inventory/multinode-vagrant/group_vars/all  |    75 -
 deployment/inventory/multinode-vagrant/hosts    |    59 -
 .../inventory/singlenode-vagrant/group_vars/all |    87 -
 deployment/inventory/singlenode-vagrant/hosts   |    48 -
 deployment/playbooks/ambari_install.yml         |    55 -
 deployment/playbooks/metron_full_install.yml    |    23 -
 deployment/playbooks/metron_install.yml         |    96 -
 .../roles/ambari_common/defaults/main.yml       |    19 -
 deployment/roles/ambari_common/meta/main.yml    |    22 -
 deployment/roles/ambari_common/tasks/main.yml   |    52 -
 .../ambari_common/tasks/passwd_less_ssh.yml     |    32 -
 .../templates/metron-hadoop-logrotate.yml       |   135 -
 deployment/roles/ambari_common/vars/main.yml    |    21 -
 .../roles/ambari_config/defaults/main.yml       |    30 -
 deployment/roles/ambari_config/meta/main.yml    |    21 -
 deployment/roles/ambari_config/tasks/main.yml   |    42 -
 .../ambari_config/tasks/start_services.yml      |    48 -
 .../vars/multi_vagrant_cluster.yml              |    99 -
 .../roles/ambari_config/vars/single_node_vm.yml |    85 -
 .../roles/ambari_config/vars/small_cluster.yml  |    88 -
 .../roles/ambari_gather_facts/meta/main.yml     |    21 -
 .../roles/ambari_gather_facts/tasks/main.yml    |   151 -
 .../roles/ambari_master/defaults/main.yml       |    19 -
 deployment/roles/ambari_master/tasks/main.yml   |    51 -
 deployment/roles/ambari_slave/files/hostname.sh |    19 -
 deployment/roles/ambari_slave/tasks/main.yml    |    51 -
 deployment/roles/ambari_slave/vars/main.yml     |    24 -
 deployment/roles/bro/meta/main.yml              |    23 -
 deployment/roles/bro/tasks/bro-plugin-kafka.yml |    41 -
 deployment/roles/bro/tasks/bro.yml              |    44 -
 deployment/roles/bro/tasks/dependencies.yml     |    37 -
 deployment/roles/bro/tasks/librdkafka.yml       |    39 -
 deployment/roles/bro/tasks/main.yml             |    22 -
 deployment/roles/bro/tasks/start-bro.yml        |    31 -
 deployment/roles/bro/vars/main.yml              |    26 -
 deployment/roles/build-tools/meta/main.yml      |    19 -
 deployment/roles/build-tools/tasks/main.yml     |    34 -
 .../roles/elasticsearch/defaults/main.yml       |    22 -
 .../elasticsearch/files/elasticsearch.repo      |    23 -
 .../elasticsearch/files/yaf_index.template      |    82 -
 deployment/roles/elasticsearch/meta/main.yml    |    24 -
 .../elasticsearch/tasks/configure_index.yml     |    44 -
 deployment/roles/elasticsearch/tasks/main.yml   |    73 -
 .../metron-elasticsearch-logrotate.yml          |    26 -
 deployment/roles/epel/tasks/main.yml            |    30 -
 deployment/roles/flume/meta/main.yml            |    20 -
 deployment/roles/flume/tasks/main.yml           |    52 -
 deployment/roles/flume/vars/main.yml            |    18 -
 deployment/roles/hadoop_setup/defaults/main.yml |    25 -
 deployment/roles/hadoop_setup/meta/main.yml     |    20 -
 deployment/roles/hadoop_setup/tasks/main.yml    |    37 -
 deployment/roles/hadoop_setup/vars/main.yml     |    18 -
 deployment/roles/httplib2/tasks/main.yml        |    20 -
 deployment/roles/java_jdk/defaults/main.yml     |    18 -
 deployment/roles/java_jdk/tasks/main.yml        |    34 -
 deployment/roles/kafka-broker/defaults/main.yml |    18 -
 deployment/roles/kafka-broker/meta/main.yml     |    18 -
 deployment/roles/kafka-broker/tasks/main.yml    |    41 -
 deployment/roles/kafka-broker/vars/main.yml     |    18 -
 deployment/roles/kafka-client/tasks/main.yml    |    30 -
 deployment/roles/librdkafka/defaults/main.yml   |    20 -
 .../roles/librdkafka/tasks/dependencies.yml     |    37 -
 .../roles/librdkafka/tasks/librdkafka.yml       |    39 -
 deployment/roles/librdkafka/tasks/main.yml      |    19 -
 .../roles/libselinux-python/tasks/main.yml      |    25 -
 .../roles/metron_common/defaults/main.yml       |    19 -
 deployment/roles/metron_common/meta/main.yml    |    22 -
 deployment/roles/metron_common/tasks/main.yml   |    35 -
 .../roles/metron_pcapservice/defaults/main.yml  |    24 -
 .../roles/metron_pcapservice/meta/main.yml      |    19 -
 .../metron_pcapservice/tasks/config-hbase.yml   |    26 -
 .../roles/metron_pcapservice/tasks/main.yml     |    25 -
 .../metron_pcapservice/tasks/pcapservice.yml    |    38 -
 .../metron_pcapservice/templates/pcapservice    |    84 -
 .../roles/metron_streaming/defaults/main.yml    |    75 -
 .../files/config/sensors/bro.json               |    19 -
 .../files/config/sensors/pcap.json              |    19 -
 .../files/config/sensors/snort.json             |    18 -
 .../files/config/sensors/yaf.json               |    19 -
 .../roles/metron_streaming/files/extractor.json |    12 -
 .../metron_streaming/files/yaf_index.template   |    36 -
 .../roles/metron_streaming/handlers/main.yml    |    19 -
 deployment/roles/metron_streaming/meta/main.yml |    20 -
 .../roles/metron_streaming/tasks/es_purge.yml   |    42 -
 .../metron_streaming/tasks/grok_upload.yml      |    37 -
 .../metron_streaming/tasks/hdfs_filesystem.yml  |    41 -
 .../roles/metron_streaming/tasks/hdfs_purge.yml |    52 -
 .../roles/metron_streaming/tasks/main.yml       |   136 -
 .../metron_streaming/tasks/metron_topology.yml  |    29 -
 .../metron_streaming/tasks/source_config.yml    |    48 -
 .../metron_streaming/tasks/threat_intel.yml     |    46 -
 .../templates/config/elasticsearch.global.json  |     6 -
 .../templates/config/solr.global.json           |     6 -
 .../metron_streaming/templates/threat_ip.csv    |    37 -
 deployment/roles/metron_ui/defaults/main.yml    |    21 -
 .../roles/metron_ui/tasks/copy-source.yml       |    46 -
 deployment/roles/metron_ui/tasks/main.yml       |    56 -
 deployment/roles/mysql_client/tasks/main.yml    |    35 -
 .../roles/mysql_client/templates/db_config.sql  |    21 -
 deployment/roles/mysql_server/defaults/main.yml |    20 -
 .../roles/mysql_server/files/geoip_ddl.sql      |    49 -
 deployment/roles/mysql_server/handlers/main.yml |    19 -
 deployment/roles/mysql_server/tasks/main.yml    |    93 -
 deployment/roles/mysql_server/templates/.my.cnf |    20 -
 deployment/roles/ntp/tasks/main.yml             |    31 -
 .../roles/packet-capture/defaults/main.yml      |    32 -
 deployment/roles/packet-capture/meta/main.yml   |    19 -
 deployment/roles/packet-capture/tasks/debug.yml |    26 -
 .../roles/packet-capture/tasks/dependencies.yml |    38 -
 deployment/roles/packet-capture/tasks/dpdk.yml  |    59 -
 .../roles/packet-capture/tasks/kernel.yml       |    51 -
 deployment/roles/packet-capture/tasks/main.yml  |    22 -
 .../roles/packet-capture/tasks/pcapture.yml     |    49 -
 .../roles/packet-capture/templates/pcapture     |    93 -
 .../packet-capture/templates/pcapture.conf      |    67 -
 deployment/roles/pcap_replay/README.md          |    44 -
 deployment/roles/pcap_replay/defaults/main.yml  |    21 -
 deployment/roles/pcap_replay/meta/main.yml      |    21 -
 deployment/roles/pcap_replay/tasks/main.yml     |    19 -
 deployment/roles/pcap_replay/tasks/service.yml  |    22 -
 .../roles/pcap_replay/tasks/tcpreplay.yml       |    38 -
 .../roles/pcap_replay/templates/pcap-replay     |    92 -
 deployment/roles/pycapa/meta/main.yml           |    22 -
 deployment/roles/pycapa/tasks/dependencies.yml  |    34 -
 deployment/roles/pycapa/tasks/main.yml          |    19 -
 deployment/roles/pycapa/tasks/pycapa.yml        |    37 -
 deployment/roles/pycapa/templates/pycapa        |    84 -
 deployment/roles/pycapa/vars/main.yml           |    23 -
 deployment/roles/python-pip/tasks/main.yml      |    25 -
 deployment/roles/sensor-test-mode/README.md     |    27 -
 .../roles/sensor-test-mode/files/example.pcap   |   Bin 507865 -> 0 bytes
 deployment/roles/sensor-test-mode/meta/main.yml |    19 -
 .../roles/sensor-test-mode/tasks/main.yml       |    56 -
 deployment/roles/snort/defaults/main.yml        |    25 -
 deployment/roles/snort/files/flume-snort.conf   |    44 -
 deployment/roles/snort/files/snort.conf         |   726 -
 deployment/roles/snort/meta/main.yml            |    24 -
 deployment/roles/snort/tasks/daq.yml            |    36 -
 deployment/roles/snort/tasks/flume.yml          |    31 -
 deployment/roles/snort/tasks/main.yml           |    31 -
 deployment/roles/snort/tasks/snort.yml          |    85 -
 deployment/roles/solr/defaults/main.yml         |    29 -
 deployment/roles/solr/files/schema.xml          |   191 -
 deployment/roles/solr/meta/main.yml             |    21 -
 deployment/roles/solr/tasks/main.yml            |    74 -
 deployment/roles/solr/templates/solr.xml        |    52 -
 deployment/roles/solr/templates/solrconfig.xml  |   583 -
 .../roles/tap_interface/defaults/main.yml       |    19 -
 deployment/roles/tap_interface/tasks/main.yml   |    35 -
 deployment/roles/yaf/defaults/main.yml          |    30 -
 deployment/roles/yaf/meta/main.yml              |    23 -
 deployment/roles/yaf/tasks/fixbuf.yml           |    37 -
 deployment/roles/yaf/tasks/main.yml             |    19 -
 deployment/roles/yaf/tasks/yaf.yml              |    60 -
 deployment/roles/yaf/templates/start-yaf.sh     |    25 -
 deployment/roles/yaf/templates/yaf              |    83 -
 deployment/roles/yum-update/tasks/main.yml      |    26 -
 deployment/vagrant/multinode-vagrant/.gitignore |     1 -
 .../vagrant/multinode-vagrant/Vagrantfile       |    65 -
 .../vagrant/multinode-vagrant/ansible.cfg       |    22 -
 deployment/vagrant/packet-capture/Vagrantfile   |    69 -
 deployment/vagrant/packet-capture/ansible.cfg   |    22 -
 deployment/vagrant/packet-capture/playbook.yml  |    43 -
 .../vagrant/singlenode-vagrant/.gitignore       |     1 -
 .../vagrant/singlenode-vagrant/Vagrantfile      |    63 -
 .../vagrant/singlenode-vagrant/ansible.cfg      |    22 -
 metron-deployment/.gitignore                    |     2 +
 metron-deployment/README.md                     |    97 +
 metron-deployment/amazon-ec2/.gitignore         |     4 +
 metron-deployment/amazon-ec2/README.md          |   211 +
 metron-deployment/amazon-ec2/ansible.cfg        |    28 +
 metron-deployment/amazon-ec2/conf/defaults.yml  |    80 +
 metron-deployment/amazon-ec2/conf/ec2.ini       |   105 +
 metron-deployment/amazon-ec2/playbook.yml       |    80 +
 .../amazon-ec2/tasks/check-hosts.yml            |    20 +
 .../amazon-ec2/tasks/check-volume.yml           |    26 +
 .../amazon-ec2/tasks/create-hosts.yml           |    54 +
 .../amazon-ec2/tasks/create-keypair.yml         |    29 +
 .../create-open-inbound-security-group.yml      |    26 +
 .../create-open-outbound-security-group.yml     |    26 +
 .../amazon-ec2/tasks/create-security-group.yml  |    28 +
 .../amazon-ec2/tasks/create-vpc.yml             |    50 +
 .../amazon-ec2/tasks/expand-volume.yml          |    30 +
 .../amazon-ec2/tasks/mount-volume.yml           |    32 +
 .../amazon-ec2/tasks/provisioning-report.yml    |    35 +
 metron-deployment/ansible.cfg                   |    23 +
 .../extra_modules/ambari_cluster_state.py       |   392 +
 .../inventory/dev-vagrant/group_vars/all        |    86 +
 metron-deployment/inventory/dev-vagrant/hosts   |    48 +
 .../inventory/metron_example/group_vars/all     |    77 +
 .../inventory/metron_example/hosts              |    63 +
 .../inventory/multinode-vagrant/group_vars/all  |    75 +
 .../inventory/multinode-vagrant/hosts           |    59 +
 .../inventory/singlenode-vagrant/group_vars/all |    86 +
 .../inventory/singlenode-vagrant/hosts          |    48 +
 metron-deployment/playbooks/ambari_install.yml  |    55 +
 .../playbooks/metron_full_install.yml           |    23 +
 metron-deployment/playbooks/metron_install.yml  |    96 +
 .../roles/ambari_common/defaults/main.yml       |    19 +
 .../roles/ambari_common/meta/main.yml           |    22 +
 .../roles/ambari_common/tasks/main.yml          |    52 +
 .../ambari_common/tasks/passwd_less_ssh.yml     |    32 +
 .../templates/metron-hadoop-logrotate.yml       |   135 +
 .../roles/ambari_common/vars/main.yml           |    21 +
 .../roles/ambari_config/defaults/main.yml       |    30 +
 .../roles/ambari_config/meta/main.yml           |    21 +
 .../roles/ambari_config/tasks/main.yml          |    42 +
 .../ambari_config/tasks/start_services.yml      |    48 +
 .../vars/multi_vagrant_cluster.yml              |    99 +
 .../roles/ambari_config/vars/single_node_vm.yml |    85 +
 .../roles/ambari_config/vars/small_cluster.yml  |    88 +
 .../roles/ambari_gather_facts/meta/main.yml     |    21 +
 .../roles/ambari_gather_facts/tasks/main.yml    |   151 +
 .../roles/ambari_master/defaults/main.yml       |    19 +
 .../roles/ambari_master/tasks/main.yml          |    51 +
 .../roles/ambari_slave/files/hostname.sh        |    19 +
 .../roles/ambari_slave/tasks/main.yml           |    51 +
 .../roles/ambari_slave/vars/main.yml            |    24 +
 metron-deployment/roles/bro/meta/main.yml       |    23 +
 .../roles/bro/tasks/bro-plugin-kafka.yml        |    41 +
 metron-deployment/roles/bro/tasks/bro.yml       |    44 +
 .../roles/bro/tasks/dependencies.yml            |    37 +
 .../roles/bro/tasks/librdkafka.yml              |    39 +
 metron-deployment/roles/bro/tasks/main.yml      |    22 +
 metron-deployment/roles/bro/tasks/start-bro.yml |    31 +
 metron-deployment/roles/bro/vars/main.yml       |    26 +
 .../roles/build-tools/meta/main.yml             |    19 +
 .../roles/build-tools/tasks/main.yml            |    34 +
 .../roles/elasticsearch/defaults/main.yml       |    22 +
 .../elasticsearch/files/elasticsearch.repo      |    23 +
 .../elasticsearch/files/yaf_index.template      |    82 +
 .../roles/elasticsearch/meta/main.yml           |    24 +
 .../elasticsearch/tasks/configure_index.yml     |    44 +
 .../roles/elasticsearch/tasks/main.yml          |    73 +
 .../metron-elasticsearch-logrotate.yml          |    26 +
 metron-deployment/roles/epel/tasks/main.yml     |    30 +
 metron-deployment/roles/flume/meta/main.yml     |    20 +
 metron-deployment/roles/flume/tasks/main.yml    |    52 +
 metron-deployment/roles/flume/vars/main.yml     |    18 +
 .../roles/hadoop_setup/defaults/main.yml        |    25 +
 .../roles/hadoop_setup/meta/main.yml            |    20 +
 .../roles/hadoop_setup/tasks/main.yml           |    37 +
 .../roles/hadoop_setup/vars/main.yml            |    18 +
 metron-deployment/roles/httplib2/tasks/main.yml |    20 +
 .../roles/java_jdk/defaults/main.yml            |    18 +
 metron-deployment/roles/java_jdk/tasks/main.yml |    34 +
 .../roles/kafka-broker/defaults/main.yml        |    18 +
 .../roles/kafka-broker/meta/main.yml            |    18 +
 .../roles/kafka-broker/tasks/main.yml           |    41 +
 .../roles/kafka-broker/vars/main.yml            |    18 +
 .../roles/kafka-client/tasks/main.yml           |    30 +
 .../roles/librdkafka/defaults/main.yml          |    20 +
 .../roles/librdkafka/tasks/dependencies.yml     |    37 +
 .../roles/librdkafka/tasks/librdkafka.yml       |    39 +
 .../roles/librdkafka/tasks/main.yml             |    19 +
 .../roles/libselinux-python/tasks/main.yml      |    25 +
 .../roles/metron_common/defaults/main.yml       |    19 +
 .../roles/metron_common/meta/main.yml           |    22 +
 .../roles/metron_common/tasks/main.yml          |    35 +
 .../roles/metron_pcapservice/defaults/main.yml  |    24 +
 .../roles/metron_pcapservice/meta/main.yml      |    19 +
 .../metron_pcapservice/tasks/config-hbase.yml   |    26 +
 .../roles/metron_pcapservice/tasks/main.yml     |    25 +
 .../metron_pcapservice/tasks/pcapservice.yml    |    38 +
 .../metron_pcapservice/templates/pcapservice    |    84 +
 .../roles/metron_streaming/defaults/main.yml    |    81 +
 .../files/config/sensors/bro.json               |    19 +
 .../files/config/sensors/pcap.json              |    19 +
 .../files/config/sensors/snort.json             |    18 +
 .../files/config/sensors/yaf.json               |    19 +
 .../roles/metron_streaming/files/extractor.json |    12 +
 .../metron_streaming/files/yaf_index.template   |    36 +
 .../roles/metron_streaming/meta/main.yml        |    20 +
 .../roles/metron_streaming/tasks/es_purge.yml   |    42 +
 .../metron_streaming/tasks/grok_upload.yml      |    37 +
 .../metron_streaming/tasks/hdfs_filesystem.yml  |    41 +
 .../roles/metron_streaming/tasks/hdfs_purge.yml |    52 +
 .../roles/metron_streaming/tasks/main.yml       |   155 +
 .../metron_streaming/tasks/metron_topology.yml  |    33 +
 .../metron_streaming/tasks/source_config.yml    |    51 +
 .../metron_streaming/tasks/threat_intel.yml     |    46 +
 .../templates/config/elasticsearch.global.json  |     6 +
 .../templates/config/solr.global.json           |     6 +
 .../metron_streaming/templates/threat_ip.csv    |    37 +
 .../roles/metron_ui/defaults/main.yml           |    21 +
 .../roles/metron_ui/tasks/copy-source.yml       |    46 +
 .../roles/metron_ui/tasks/main.yml              |    56 +
 .../roles/mysql_client/tasks/main.yml           |    35 +
 .../roles/mysql_client/templates/db_config.sql  |    21 +
 .../roles/mysql_server/defaults/main.yml        |    20 +
 .../roles/mysql_server/files/geoip_ddl.sql      |    49 +
 .../roles/mysql_server/handlers/main.yml        |    19 +
 .../roles/mysql_server/tasks/main.yml           |    93 +
 .../roles/mysql_server/templates/.my.cnf        |    20 +
 metron-deployment/roles/ntp/tasks/main.yml      |    31 +
 .../roles/packet-capture/defaults/main.yml      |    32 +
 .../roles/packet-capture/meta/main.yml          |    19 +
 .../roles/packet-capture/tasks/debug.yml        |    26 +
 .../roles/packet-capture/tasks/dependencies.yml |    38 +
 .../roles/packet-capture/tasks/dpdk.yml         |    59 +
 .../roles/packet-capture/tasks/kernel.yml       |    51 +
 .../roles/packet-capture/tasks/main.yml         |    22 +
 .../roles/packet-capture/tasks/pcapture.yml     |    49 +
 .../roles/packet-capture/templates/pcapture     |    93 +
 .../packet-capture/templates/pcapture.conf      |    67 +
 metron-deployment/roles/pcap_replay/README.md   |    44 +
 .../roles/pcap_replay/defaults/main.yml         |    21 +
 .../roles/pcap_replay/meta/main.yml             |    21 +
 .../roles/pcap_replay/tasks/main.yml            |    19 +
 .../roles/pcap_replay/tasks/service.yml         |    22 +
 .../roles/pcap_replay/tasks/tcpreplay.yml       |    38 +
 .../roles/pcap_replay/templates/pcap-replay     |    92 +
 metron-deployment/roles/pycapa/meta/main.yml    |    22 +
 .../roles/pycapa/tasks/dependencies.yml         |    34 +
 metron-deployment/roles/pycapa/tasks/main.yml   |    19 +
 metron-deployment/roles/pycapa/tasks/pycapa.yml |    37 +
 metron-deployment/roles/pycapa/templates/pycapa |    84 +
 metron-deployment/roles/pycapa/vars/main.yml    |    23 +
 .../roles/python-pip/tasks/main.yml             |    25 +
 .../roles/sensor-test-mode/README.md            |    27 +
 .../roles/sensor-test-mode/files/example.pcap   |   Bin 0 -> 507865 bytes
 .../roles/sensor-test-mode/meta/main.yml        |    19 +
 .../roles/sensor-test-mode/tasks/main.yml       |    56 +
 metron-deployment/roles/snort/defaults/main.yml |    25 +
 .../roles/snort/files/flume-snort.conf          |    44 +
 metron-deployment/roles/snort/files/snort.conf  |   726 +
 metron-deployment/roles/snort/meta/main.yml     |    24 +
 metron-deployment/roles/snort/tasks/daq.yml     |    36 +
 metron-deployment/roles/snort/tasks/flume.yml   |    31 +
 metron-deployment/roles/snort/tasks/main.yml    |    31 +
 metron-deployment/roles/snort/tasks/snort.yml   |    85 +
 metron-deployment/roles/solr/defaults/main.yml  |    29 +
 metron-deployment/roles/solr/files/schema.xml   |   191 +
 metron-deployment/roles/solr/meta/main.yml      |    21 +
 metron-deployment/roles/solr/tasks/main.yml     |    74 +
 metron-deployment/roles/solr/templates/solr.xml |    52 +
 .../roles/solr/templates/solrconfig.xml         |   583 +
 .../roles/tap_interface/defaults/main.yml       |    19 +
 .../roles/tap_interface/tasks/main.yml          |    35 +
 metron-deployment/roles/yaf/defaults/main.yml   |    30 +
 metron-deployment/roles/yaf/meta/main.yml       |    23 +
 metron-deployment/roles/yaf/tasks/fixbuf.yml    |    37 +
 metron-deployment/roles/yaf/tasks/main.yml      |    19 +
 metron-deployment/roles/yaf/tasks/yaf.yml       |    60 +
 .../roles/yaf/templates/start-yaf.sh            |    25 +
 metron-deployment/roles/yaf/templates/yaf       |    83 +
 .../roles/yum-update/tasks/main.yml             |    26 +
 .../vagrant/multinode-vagrant/.gitignore        |     1 +
 .../vagrant/multinode-vagrant/Vagrantfile       |    65 +
 .../vagrant/multinode-vagrant/ansible.cfg       |    22 +
 .../vagrant/packet-capture/Vagrantfile          |    69 +
 .../vagrant/packet-capture/ansible.cfg          |    22 +
 .../vagrant/packet-capture/playbook.yml         |    43 +
 .../vagrant/singlenode-vagrant/.gitignore       |     1 +
 .../vagrant/singlenode-vagrant/Vagrantfile      |    63 +
 .../vagrant/singlenode-vagrant/ansible.cfg      |    22 +
 metron-platform/README.md                       |    30 +
 metron-platform/metron-api/README.txt           |    16 +
 metron-platform/metron-api/pom.xml              |   282 +
 .../OnlyDeleteExpiredFilesCompactionPolicy.java |    54 +
 .../apache/metron/api/ConfigurationManager.java |   136 +
 .../api/helper/service/PcapServiceCli.java      |   127 +
 .../pcapservice/CellTimestampComparator.java    |    40 +
 .../metron/pcapservice/ConfigurationUtil.java   |   286 +
 .../pcapservice/HBaseConfigConstants.java       |    57 +
 .../pcapservice/HBaseConfigurationUtil.java     |   179 +
 .../apache/metron/pcapservice/IPcapGetter.java  |   102 +
 .../apache/metron/pcapservice/IPcapScanner.java |    66 +
 .../metron/pcapservice/PcapGetterHBaseImpl.java |   826 +
 .../apache/metron/pcapservice/PcapHelper.java   |   222 +
 .../pcapservice/PcapReceiverImplRestEasy.java   |   267 +
 .../pcapservice/PcapScannerHBaseImpl.java       |   319 +
 .../metron/pcapservice/PcapsResponse.java       |   167 +
 .../metron/pcapservice/RestTestingUtil.java     |   329 +
 .../pcapservice/rest/JettyServiceRunner.java    |    43 +
 .../metron/pcapservice/rest/PcapService.java    |    51 +
 .../main/resources/config-definition-hbase.xml  |    50 +
 .../resources/hbase-config-default.properties   |    57 +
 .../CellTimestampComparatorTest.java            |   109 +
 .../pcapservice/ConfigurationUtilTest.java      |    67 +
 .../pcapservice/HBaseConfigurationUtilTest.java |    69 +
 .../pcapservice/HBaseIntegrationTest.java       |    88 +
 .../pcapservice/PcapGetterHBaseImplTest.java    |   553 +
 .../metron/pcapservice/PcapHelperTest.java      |   335 +
 .../pcapservice/PcapScannerHBaseImplTest.java   |   249 +
 .../src/test/resources/hbase-config.properties  |    57 +
 .../src/test/resources/test-tcp-packet.pcap     |   Bin 0 -> 144 bytes
 metron-platform/metron-common/.gitignore        |     1 +
 metron-platform/metron-common/pom.xml           |   300 +
 .../src/main/assembly/assembly.xml              |    33 +
 .../org/apache/metron/common/Constants.java     |    35 +
 .../metron/common/bolt/ConfiguredBolt.java      |   124 +
 .../metron/common/cli/ConfigurationsUtils.java  |   232 +
 .../common/configuration/Configuration.java     |    60 +
 .../common/configuration/Configurations.java    |   112 +
 .../common/configuration/EnrichmentConfig.java  |   201 +
 .../configuration/SensorEnrichmentConfig.java   |   129 +
 .../common/interfaces/BulkMessageWriter.java    |    31 +
 .../metron/common/interfaces/MessageWriter.java |    27 +
 .../apache/metron/common/utils/ErrorUtils.java  |    64 +
 .../apache/metron/common/utils/JSONUtils.java   |    86 +
 .../metron/common/utils/MessageUtils.java       |    28 +
 .../metron/common/utils/ReflectionUtils.java    |    48 +
 .../src/main/scripts/zk_load_configs.sh         |    33 +
 .../metron/common/bolt/ConfiguredBoltTest.java  |   162 +
 .../common/cli/ConfigurationsUtilsTest.java     |    95 +
 .../common/configuration/ConfigurationTest.java |    90 +
 .../configuration/ConfigurationsTest.java       |    40 +
 .../configuration/EnrichmentConfigTest.java     |   211 +
 .../SensorEnrichmentConfigTest.java             |    41 +
 .../config/BasicTldExtractorTest.config         |    20 +
 .../src/test/resources/config/global.json       |     3 +
 .../src/test/resources/config/sensors/bro.json  |    19 +
 .../src/test/resources/effective_tld_names.dat  |  9719 +++++++
 .../metron-data-management/README.md            |   252 +
 metron-platform/metron-data-management/pom.xml  |   327 +
 .../src/main/assembly/assembly.xml              |    42 +
 .../src/main/bash/Whois_CSV_to_JSON.py          |   208 +
 .../src/main/bash/flatfile_loader.sh            |    39 +
 .../main/bash/prune_elasticsearch_indices.sh    |    21 +
 .../src/main/bash/prune_hdfs_files.sh           |    21 +
 .../src/main/bash/threatintel_bulk_load.sh      |    38 +
 .../src/main/bash/threatintel_bulk_prune.sh     |    37 +
 .../src/main/bash/threatintel_taxii_load.sh     |    39 +
 .../metron/dataloads/bulk/DataPruner.java       |    66 +
 .../dataloads/bulk/ElasticsearchDataPruner.java |   135 +
 .../bulk/ElasticsearchDataPrunerRunner.java     |   190 +
 .../metron/dataloads/bulk/HDFSDataPruner.java   |   226 +
 .../dataloads/bulk/LeastRecentlyUsedPruner.java |   221 +
 .../dataloads/bulk/StartDateException.java      |    31 +
 .../dataloads/bulk/ThreatIntelBulkLoader.java   |   259 +
 .../metron/dataloads/cif/HBaseTableLoad.java    |   255 +
 .../metron/dataloads/extractor/Extractor.java   |    28 +
 .../dataloads/extractor/ExtractorCreator.java   |    24 +
 .../dataloads/extractor/ExtractorHandler.java   |    79 +
 .../metron/dataloads/extractor/Extractors.java  |    58 +
 .../dataloads/extractor/csv/CSVExtractor.java   |   139 +
 .../extractor/csv/LookupConverter.java          |    29 +
 .../extractor/csv/LookupConverters.java         |    68 +
 .../extractor/inputformat/Formats.java          |    55 +
 .../inputformat/InputFormatHandler.java         |    28 +
 .../extractor/inputformat/WholeFileFormat.java  |   109 +
 .../dataloads/extractor/stix/StixExtractor.java |   132 +
 .../stix/types/AbstractObjectTypeHandler.java   |    36 +
 .../extractor/stix/types/AddressHandler.java    |    94 +
 .../extractor/stix/types/DomainHandler.java     |    77 +
 .../extractor/stix/types/HostnameHandler.java   |    70 +
 .../extractor/stix/types/ObjectTypeHandler.java |    31 +
 .../stix/types/ObjectTypeHandlers.java          |    42 +
 .../dataloads/hbase/mr/BulkLoadMapper.java      |    75 +
 .../metron/dataloads/hbase/mr/PrunerMapper.java |    78 +
 .../SimpleEnrichmentFlatFileLoader.java         |   261 +
 .../dataloads/nonbulk/taxii/ConnectionType.java |    23 +
 .../dataloads/nonbulk/taxii/TableInfo.java      |    71 +
 .../nonbulk/taxii/TaxiiConnectionConfig.java    |   222 +
 .../dataloads/nonbulk/taxii/TaxiiHandler.java   |   406 +
 .../dataloads/nonbulk/taxii/TaxiiLoader.java    |   205 +
 .../ElasticsearchDataPrunerIntegrationTest.java |   153 +
 .../bulk/ElasticsearchDataPrunerRunnerTest.java |    72 +
 .../bulk/ElasticsearchDataPrunerTest.java       |   211 +
 .../dataloads/bulk/HDFSDataPrunerTest.java      |   178 +
 .../dataloads/extractor/ExtractorTest.java      |    83 +
 .../extractor/csv/CSVExtractorTest.java         |    73 +
 .../extractor/stix/StixExtractorTest.java       |   142 +
 .../hbase/HBaseEnrichmentConverterTest.java     |    74 +
 .../hbase/mr/BulkLoadMapperIntegrationTest.java |   105 +
 .../dataloads/hbase/mr/BulkLoadMapperTest.java  |    91 +
 .../metron/dataloads/hbase/mr/HBaseUtil.java    |    72 +
 .../LeastRecentlyUsedPrunerIntegrationTest.java |   138 +
 .../nonbulk/taxii/MockTaxiiService.java         |    94 +
 .../nonbulk/taxii/TaxiiIntegrationTest.java     |   119 +
 .../src/test/resources/log4j.properties         |    24 +
 .../resources/taxii-messages/message.discovery  |    21 +
 .../test/resources/taxii-messages/messages.poll |  2914 ++
 metron-platform/metron-elasticsearch/pom.xml    |   250 +
 .../src/main/assembly/assembly.xml              |    52 +
 .../src/main/config/elasticsearch.properties    |   109 +
 .../writer/ElasticsearchWriter.java             |    94 +
 .../scripts/start_elasticsearch_topology.sh     |    22 +
 .../ElasticsearchEnrichmentIntegrationTest.java |    89 +
 .../components/ElasticSearchComponent.java      |   186 +
 .../src/test/resources/log4j.properties         |    24 +
 .../src/test/resources/log4j2.xml               |    31 +
 metron-platform/metron-enrichment/README.md     |   125 +
 metron-platform/metron-enrichment/pom.xml       |   255 +
 .../src/main/assembly/assembly.xml              |    44 +
 .../src/main/flux/enrichment/remote.yaml        |   413 +
 .../src/main/flux/enrichment/test.yaml          |   394 +
 .../adapters/cif/AbstractCIFAdapter.java        |    47 +
 .../adapters/cif/CIFHbaseAdapter.java           |   138 +
 .../enrichment/adapters/geo/GeoAdapter.java     |    76 +
 .../adapters/host/AbstractHostAdapter.java      |    47 +
 .../adapters/host/HostFromJSONListAdapter.java  |    75 +
 .../host/HostFromPropertiesFileAdapter.java     |    65 +
 .../adapters/jdbc/BaseJdbcConfig.java           |    70 +
 .../enrichment/adapters/jdbc/JdbcAdapter.java   |    83 +
 .../enrichment/adapters/jdbc/JdbcConfig.java    |    26 +
 .../enrichment/adapters/jdbc/MySqlConfig.java   |    39 +
 .../simplehbase/SimpleHBaseAdapter.java         |   119 +
 .../adapters/simplehbase/SimpleHBaseConfig.java |    55 +
 .../threatintel/ThreatIntelAdapter.java         |   134 +
 .../adapters/threatintel/ThreatIntelConfig.java |   107 +
 .../enrichment/bolt/BulkMessageWriterBolt.java  |   104 +
 .../apache/metron/enrichment/bolt/CacheKey.java |    73 +
 .../enrichment/bolt/EnrichmentJoinBolt.java     |    98 +
 .../enrichment/bolt/EnrichmentSplitterBolt.java |   142 +
 .../enrichment/bolt/GenericEnrichmentBolt.java  |   225 +
 .../metron/enrichment/bolt/HBaseBolt.java       |   184 +
 .../apache/metron/enrichment/bolt/JoinBolt.java |   130 +
 .../metron/enrichment/bolt/SplitBolt.java       |    99 +
 .../enrichment/bolt/ThreatIntelJoinBolt.java    |    60 +
 .../bolt/ThreatIntelSplitterBolt.java           |    40 +
 .../enrichment/cli/LatencySummarizer.java       |   189 +
 .../enrichment/configuration/Enrichment.java    |    62 +
 .../enrichment/converter/AbstractConverter.java |    96 +
 .../converter/EnrichmentConverter.java          |    37 +
 .../enrichment/converter/EnrichmentHelper.java  |    36 +
 .../enrichment/converter/EnrichmentKey.java     |   116 +
 .../enrichment/converter/EnrichmentValue.java   |   106 +
 .../enrichment/converter/HbaseConverter.java    |    40 +
 .../interfaces/EnrichmentAdapter.java           |    29 +
 .../enrichment/lookup/EnrichmentLookup.java     |   104 +
 .../apache/metron/enrichment/lookup/Lookup.java |    96 +
 .../metron/enrichment/lookup/LookupKV.java      |    65 +
 .../metron/enrichment/lookup/LookupKey.java     |    23 +
 .../metron/enrichment/lookup/LookupValue.java   |    28 +
 .../lookup/accesstracker/AccessTracker.java     |    35 +
 .../lookup/accesstracker/AccessTrackerUtil.java |    83 +
 .../accesstracker/BloomAccessTracker.java       |   146 +
 .../lookup/accesstracker/NoopAccessTracker.java |    65 +
 .../accesstracker/PersistentAccessTracker.java  |   209 +
 .../enrichment/lookup/handler/Handler.java      |    29 +
 .../tldextractor/BasicTldExtractor.java         |   154 +
 .../enrichment/utils/EnrichmentUtils.java       |    80 +
 .../enrichment/utils/ThreatIntelUtils.java      |    32 +
 .../apache/metron/writer/hdfs/HdfsWriter.java   |    94 +
 .../writer/hdfs/SourceAwareMoveAction.java      |    48 +
 .../writer/hdfs/SourceFileNameFormat.java       |    48 +
 .../metron/writer/hdfs/SourceHandler.java       |   160 +
 .../src/main/resources/effective_tld_names.dat  |  9719 +++++++
 .../src/main/scripts/latency_summarizer.sh      |    32 +
 .../enrichment/adapters/geo/GeoAdapterTest.java |    94 +
 .../host/HostFromJSONListAdapterTest.java       |    83 +
 .../host/HostFromPropertiesFileAdapterTest.java |   106 +
 .../adapters/jdbc/MySqlConfigTest.java          |    44 +
 .../simplehbase/SimpleHBaseAdapterTest.java     |   117 +
 .../simplehbase/SimpleHBaseConfigTest.java      |    42 +
 .../threatintel/ThreatIntelAdapterTest.java     |   148 +
 .../threatintel/ThreatIntelConfigTest.java      |    53 +
 .../bolt/BulkMessageWriterBoltTest.java         |   150 +
 .../enrichment/bolt/EnrichmentJoinBoltTest.java |    86 +
 .../bolt/EnrichmentSplitterBoltTest.java        |    96 +
 .../bolt/GenericEnrichmentBoltTest.java         |   196 +
 .../metron/enrichment/bolt/JoinBoltTest.java    |   121 +
 .../metron/enrichment/bolt/SplitBoltTest.java   |   125 +
 .../bolt/ThreatIntelJoinBoltTest.java           |   105 +
 .../bolt/ThreatIntelSplitterBoltTest.java       |    45 +
 .../converter/EnrichmentConverterTest.java      |    51 +
 .../tldextractor/BasicTldExtractorTest.java     |    69 +
 .../resources/CIFHbaseAdapterTest.properties    |    27 +
 .../resources/GeoMysqlAdapterTest.properties    |    27 +
 .../resources/TestSchemas/CIFHbaseSchema.json   |     0
 .../resources/TestSchemas/GeoMySqlSchema.json   |    42 +
 .../resources/TestSchemas/WhoisHbaseSchema.json |     0
 .../resources/WhoisHbaseAdapterTest.properties  |    28 +
 metron-platform/metron-hbase/pom.xml            |    77 +
 .../java/org/apache/metron/hbase/Connector.java |    36 +
 .../apache/metron/hbase/HTableConnector.java    |   157 +
 .../org/apache/metron/hbase/HTableProvider.java |    31 +
 .../org/apache/metron/hbase/TableConfig.java    |   118 +
 .../org/apache/metron/hbase/TableProvider.java  |    28 +
 .../apache/metron/hbase/TupleTableConfig.java   |   275 +
 .../apache/metron/hbase/writer/HBaseWriter.java |    88 +
 metron-platform/metron-integration-test/pom.xml |   121 +
 .../metron/integration/BaseIntegrationTest.java |    48 +
 .../metron/integration/ComponentRunner.java     |   152 +
 .../integration/EnrichmentIntegrationTest.java  |   468 +
 .../metron/integration/InMemoryComponent.java   |    23 +
 .../apache/metron/integration/Processor.java    |    23 +
 .../metron/integration/ReadinessState.java      |    22 +
 .../integration/UnableToStartException.java     |    27 +
 .../components/FluxTopologyComponent.java       |   132 +
 .../components/KafkaWithZKComponent.java        |   225 +
 .../metron/integration/mock/MockGeoAdapter.java |    64 +
 .../integration/mock/MockHBaseConnector.java    |    52 +
 .../metron/integration/utils/KafkaUtil.java     |    41 +
 .../metron/integration/utils/SampleUtil.java    |    40 +
 .../metron/integration/utils/TestUtils.java     |    37 +
 .../main/resources/sample/config/global.json    |    10 +
 .../resources/sample/config/sensors/bro.json    |    19 +
 .../resources/sample/config/sensors/pcap.json   |    13 +
 .../resources/sample/config/sensors/snort.json  |    19 +
 .../resources/sample/config/sensors/yaf.json    |    25 +
 .../sample/data/SampleIndexed/YafIndexed        |    10 +
 .../data/SampleInput/.PCAPExampleOutput.crc     |   Bin 0 -> 44 bytes
 .../resources/sample/data/SampleInput/AsaOutput |   100 +
 .../sample/data/SampleInput/BroExampleOutput    | 23411 +++++++++++++++++
 .../data/SampleInput/FireeyeExampleOutput       |    90 +
 .../sample/data/SampleInput/ISESampleOutput     |   308 +
 .../data/SampleInput/LancopeExampleOutput       |    40 +
 .../sample/data/SampleInput/PCAPExampleOutput   |   Bin 0 -> 4510 bytes
 .../sample/data/SampleInput/PaloaltoOutput      |   100 +
 .../sample/data/SampleInput/SnortOutput         |     3 +
 .../data/SampleInput/SourcefireExampleOutput    |     2 +
 .../sample/data/SampleInput/YafExampleOutput    |    10 +
 .../sample/data/SampleParsed/SnortParsed        |     3 +
 .../sample/data/SampleParsed/YafExampleParsed   |    10 +
 metron-platform/metron-parsers/README.md        |    82 +
 metron-platform/metron-parsers/pom.xml          |   227 +
 .../src/main/assembly/assembly.xml              |    74 +
 .../src/main/config/parsers.properties          |    21 +
 .../src/main/flux/asa/remote.yaml               |    82 +
 .../metron-parsers/src/main/flux/asa/test.yaml  |    82 +
 .../src/main/flux/bro/remote.yaml               |    71 +
 .../metron-parsers/src/main/flux/bro/test.yaml  |    72 +
 .../src/main/flux/fireeye/remote.yaml           |    79 +
 .../src/main/flux/fireeye/test.yaml             |    79 +
 .../src/main/flux/ise/remote.yaml               |    79 +
 .../metron-parsers/src/main/flux/ise/test.yaml  |    79 +
 .../src/main/flux/lancope/remote.yaml           |    79 +
 .../src/main/flux/lancope/test.yaml             |    79 +
 .../src/main/flux/paloalto/remote.yaml          |    79 +
 .../src/main/flux/paloalto/test.yaml            |    79 +
 .../src/main/flux/pcap/remote.yaml              |    70 +
 .../metron-parsers/src/main/flux/pcap/test.yaml |    74 +
 .../src/main/flux/snort/remote.yaml             |    69 +
 .../src/main/flux/snort/test.yaml               |    69 +
 .../src/main/flux/sourcefire/remote.yaml        |    79 +
 .../src/main/flux/sourcefire/test.yaml          |    79 +
 .../src/main/flux/yaf/remote.yaml               |    84 +
 .../metron-parsers/src/main/flux/yaf/test.yaml  |    88 +
 .../org/apache/metron/parsers/BasicParser.java  |    71 +
 .../org/apache/metron/parsers/GrokParser.java   |   166 +
 .../metron/parsers/asa/GrokAsaParser.java       |   280 +
 .../apache/metron/parsers/bolt/ParserBolt.java  |    89 +
 .../metron/parsers/bro/BasicBroParser.java      |   159 +
 .../apache/metron/parsers/bro/JSONCleaner.java  |    91 +
 .../parsers/filters/BroMessageFilter.java       |    62 +
 .../parsers/filters/GenericMessageFilter.java   |    34 +
 .../parsers/fireeye/BasicFireEyeParser.java     |   218 +
 .../parsers/interfaces/MessageFilter.java       |    24 +
 .../parsers/interfaces/MessageParser.java       |    28 +
 .../metron/parsers/ise/BasicIseParser.java      |    95 +
 .../apache/metron/parsers/ise/ISEParser.java    |   660 +
 .../org/apache/metron/parsers/ise/ISEParser.jj  |    12 +
 .../metron/parsers/ise/ISEParserConstants.java  |    69 +
 .../parsers/ise/ISEParserTokenManager.java      |   676 +
 .../metron/parsers/ise/JavaCharStream.java      |   633 +
 .../metron/parsers/ise/ParseException.java      |   204 +
 .../org/apache/metron/parsers/ise/Token.java    |   148 +
 .../metron/parsers/ise/TokenMgrError.java       |   164 +
 .../parsers/lancope/BasicLancopeParser.java     |    90 +
 .../parsers/logstash/BasicLogstashParser.java   |    89 +
 .../paloalto/BasicPaloAltoFirewallParser.java   |   209 +
 .../apache/metron/parsers/pcap/PcapParser.java  |   229 +
 .../metron/parsers/snort/BasicSnortParser.java  |   163 +
 .../sourcefire/BasicSourcefireParser.java       |   122 +
 .../apache/metron/parsers/utils/GrokUtils.java  |    43 +
 .../metron/parsers/utils/ParserUtils.java       |    72 +
 .../metron/parsers/writer/KafkaWriter.java      |    79 +
 .../src/main/resources/patterns/asa             |   176 +
 .../src/main/resources/patterns/common          |    96 +
 .../src/main/resources/patterns/fireeye         |     9 +
 .../src/main/resources/patterns/sourcefire      |    30 +
 .../src/main/resources/patterns/yaf             |     2 +
 .../src/main/scripts/start_parser_topology.sh   |    22 +
 .../metron/parsers/AbstractConfigTest.java      |   297 +
 .../metron/parsers/AbstractSchemaTest.java      |   197 +
 .../metron/parsers/AbstractTestContext.java     |   190 +
 .../apache/metron/parsers/GrokParserTest.java   |   114 +
 .../apache/metron/parsers/SettingsLoader.java   |   166 +
 .../metron/parsers/asa/GrokAsaParserTest.java   |   167 +
 .../metron/parsers/bolt/ParserBoltTest.java     |    91 +
 .../metron/parsers/bro/BasicBroParserTest.java  |   163 +
 .../metron/parsers/bro/BroParserTest.java       |   163 +
 .../parsers/fireeye/BasicFireEyeParserTest.java |   161 +
 .../integration/ParserIntegrationTest.java      |   131 +
 .../integration/PcapParserIntegrationTest.java  |   214 +
 .../integration/SnortIntegrationTest.java       |    48 +
 .../parsers/integration/YafIntegrationTest.java |    48 +
 .../metron/parsers/ise/BasicIseParserTest.java  |   170 +
 .../parsers/lancope/BasicLancopeParserTest.java |   161 +
 .../BasicPaloAltoFirewallParserTest.java        |   155 +
 .../sourcefire/BasicSourcefireParserTest.java   |   156 +
 .../test/resources/TestSchemas/BroSchema.json   |    28 +
 .../test/resources/TestSchemas/IseSchema.json   |    21 +
 .../resources/TestSchemas/LancopeSchema.json    |    28 +
 .../test/resources/TestSchemas/PcapSchema.json  |    22 +
 .../resources/TestSchemas/SourcefireSchema.json |    34 +
 .../config/BasicFireEyeParserTest.config        |    20 +
 .../resources/config/BasicIseParserTest.config  |    20 +
 .../config/BasicLancopeParserTest.config        |    20 +
 .../BasicPaloAltoFirewallParserTest.config      |    20 +
 .../config/BasicSourcefireParserTest.config     |    20 +
 .../test/resources/config/BroParserTest.config  |    20 +
 .../resources/config/GrokAsaParserTest.config   |    20 +
 .../src/test/resources/effective_tld_names.dat  |  9719 +++++++
 metron-platform/metron-pcap/pom.xml             |   103 +
 .../java/org/apache/metron/pcap/Constants.java  |    38 +
 .../org/apache/metron/pcap/IEEE_802_1Q.java     |    44 +
 .../metron/pcap/MetronEthernetDecoder.java      |   134 +
 .../java/org/apache/metron/pcap/PacketInfo.java |   470 +
 .../apache/metron/pcap/PcapByteInputStream.java |   185 +
 .../metron/pcap/PcapByteOutputStream.java       |   305 +
 .../java/org/apache/metron/pcap/PcapMerger.java |   262 +
 .../metron/pcap/PcapPacketComparator.java       |    39 +
 .../metron/pcap/spout/HDFSWriterCallback.java   |   168 +
 .../metron/pcap/spout/HDFSWriterConfig.java     |    97 +
 .../org/apache/metron/pcap/utils/PcapUtils.java |   475 +
 .../apache/metron/pcap/writer/PcapWriter.java   |    54 +
 .../src/main/java/storm/kafka/Callback.java     |    26 +
 .../java/storm/kafka/CallbackCollector.java     |   182 +
 .../java/storm/kafka/CallbackKafkaSpout.java    |    93 +
 .../src/main/java/storm/kafka/EmitContext.java  |   146 +
 .../apache/metron/pcap/utils/PcapUtilsTest.java |    31 +
 metron-platform/metron-solr/pom.xml             |   245 +
 .../metron-solr/src/main/assembly/assembly.xml  |    52 +
 .../metron-solr/src/main/config/solr.properties |   109 +
 .../org/apache/metron/solr/SolrConstants.java   |    29 +
 .../metron/solr/writer/MetronSolrClient.java    |    74 +
 .../apache/metron/solr/writer/SolrWriter.java   |   110 +
 .../src/main/scripts/start_solr_topology.sh     |    22 +
 .../SolrEnrichmentIntegrationTest.java          |   108 +
 .../integration/components/SolrComponent.java   |   153 +
 .../solr/writer/MetronSolrClientTest.java       |    83 +
 .../metron/solr/writer/SolrWriterTest.java      |   139 +
 .../src/test/resources/log4j.properties         |    24 +
 .../metron-solr/src/test/resources/log4j2.xml   |    31 +
 .../test/resources/solr/conf/_rest_managed.json |     1 +
 .../src/test/resources/solr/conf/currency.xml   |    67 +
 .../resources/solr/conf/lang/stopwords_en.txt   |    54 +
 .../src/test/resources/solr/conf/protwords.txt  |    21 +
 .../src/test/resources/solr/conf/schema.xml     |   191 +
 .../src/test/resources/solr/conf/solrconfig.xml |   583 +
 .../src/test/resources/solr/conf/stopwords.txt  |    14 +
 .../src/test/resources/solr/conf/synonyms.txt   |    29 +
 .../src/test/resources/solr/solr.xml            |    14 +
 metron-platform/metron-test-utilities/pom.xml   |   117 +
 .../java/org/apache/metron/TestConstants.java   |    26 +
 .../apache/metron/test/bolt/BaseBoltTest.java   |    93 +
 .../test/bolt/BaseEnrichmentBoltTest.java       |    93 +
 .../apache/metron/test/bolt/PrintingBolt.java   |    49 +
 .../test/converters/BinaryConverters.java       |    37 +
 .../test/converters/HexStringConverter.java     |    30 +
 .../metron/test/converters/IConverter.java      |    22 +
 .../metron/test/filereaders/FileReader.java     |    54 +
 .../org/apache/metron/test/mock/MockHTable.java |   672 +
 .../test/spouts/GenericInternalTestSpout.java   |   139 +
 .../apache/metron/test/utils/KafkaLoader.java   |    86 +
 .../metron/test/utils/UnitTestHelper.java       |    84 +
 metron-platform/pom.xml                         |   265 +
 metron-platform/style/LICENSE.config            |    16 +
 metron-platform/style/LICENSE.java              |    17 +
 metron-platform/style/LICENSE.xml               |    16 +
 metron-platform/style/checkstyle.xml            |    33 +
 metron-streaming/Metron-Alerts/README.md        |   104 -
 metron-streaming/Metron-Alerts/pom.xml          |   169 -
 .../apache/metron/alerts/AbstractAlertBolt.java |   119 -
 .../metron/alerts/TelemetryAlertsBolt.java      |   254 -
 .../alerts/adapters/AbstractAlertAdapter.java   |    69 -
 .../metron/alerts/adapters/AllAlertAdapter.java |   292 -
 .../alerts/adapters/CIFAlertsAdapter.java       |   328 -
 .../adapters/HbaseWhiteAndBlacklistAdapter.java |   483 -
 .../alerts/adapters/KeywordsAlertAdapter.java   |   291 -
 .../metron/alerts/adapters/RangeChecker.java    |    40 -
 .../alerts/adapters/ThreatAlertsAdapter.java    |   329 -
 .../metron/tagging/AbstractTaggerBolt.java      |    94 -
 .../metron/tagging/TelemetryTaggerBolt.java     |   200 -
 .../tagging/adapters/AbstractTaggerAdapter.java |    34 -
 .../metron/tagging/adapters/RegexTagger.java    |    64 -
 .../tagging/adapters/StaticAllTagger.java       |    53 -
 .../metron/tagging/adapters/TaggerAdapter.java  |    26 -
 .../alerts/adapters/AllAlertAdapterTest.java    |   168 -
 .../resources/AllAlertAdapterTest.properties    |    17 -
 .../TestSchemas/AllAlertAdapterSchema.json      |    42 -
 .../resources/config/AllAlertAdapterTest.config |    25 -
 metron-streaming/Metron-Common/.gitignore       |     1 -
 metron-streaming/Metron-Common/pom.xml          |   291 -
 .../main/java/org/apache/metron/Constants.java  |    35 -
 .../metron/alerts/interfaces/AlertsAdapter.java |    33 -
 .../alerts/interfaces/AlertsInterface.java      |    28 -
 .../metron/alerts/interfaces/TaggerAdapter.java |    26 -
 .../metron/bolt/BulkMessageWriterBolt.java      |   103 -
 .../org/apache/metron/bolt/ConfiguredBolt.java  |   124 -
 .../java/org/apache/metron/bolt/JoinBolt.java   |   130 -
 .../java/org/apache/metron/bolt/SplitBolt.java  |    98 -
 .../configuration/ConfigurationManager.java     |   136 -
 .../dataloads/interfaces/ThreatIntelSource.java |    28 -
 .../org/apache/metron/domain/Configuration.java |    60 -
 .../apache/metron/domain/Configurations.java    |   112 -
 .../org/apache/metron/domain/Enrichment.java    |    62 -
 .../metron/domain/SensorEnrichmentConfig.java   |   129 -
 .../metron/enrichment/EnrichmentConfig.java     |   203 -
 .../metron/enrichment/EnrichmentConstants.java  |    28 -
 .../interfaces/EnrichmentAdapter.java           |    29 -
 .../java/org/apache/metron/hbase/Connector.java |    42 -
 .../java/org/apache/metron/hbase/HBaseBolt.java |   181 -
 .../metron/hbase/HBaseStreamPartitioner.java    |   163 -
 .../apache/metron/hbase/HTableConnector.java    |   166 -
 .../org/apache/metron/hbase/HTableProvider.java |    31 -
 .../org/apache/metron/hbase/TableConfig.java    |   118 -
 .../org/apache/metron/hbase/TableProvider.java  |    28 -
 .../apache/metron/hbase/TupleTableConfig.java   |   276 -
 .../hbase/converters/AbstractConverter.java     |    96 -
 .../metron/hbase/converters/HbaseConverter.java |    40 -
 .../enrichment/EnrichmentConverter.java         |    38 -
 .../converters/enrichment/EnrichmentHelper.java |    36 -
 .../converters/enrichment/EnrichmentKey.java    |   119 -
 .../converters/enrichment/EnrichmentValue.java  |   107 -
 .../metron/hbase/lookup/EnrichmentLookup.java   |   108 -
 .../metron/helpers/services/PcapServiceCli.java |   127 -
 .../org/apache/metron/helpers/topology/Cli.java |   203 -
 .../metron/helpers/topology/ErrorUtils.java     |    64 -
 .../metron/helpers/topology/SettingsLoader.java |   166 -
 .../metron/index/interfaces/IndexAdapter.java   |    32 -
 .../org/apache/metron/ise/parser/ISEParser.java |   661 -
 .../org/apache/metron/ise/parser/ISEParser.jj   |    12 -
 .../metron/ise/parser/ISEParserConstants.java   |    69 -
 .../ise/parser/ISEParserTokenManager.java       |   676 -
 .../metron/ise/parser/JavaCharStream.java       |   633 -
 .../metron/ise/parser/ParseException.java       |   204 -
 .../org/apache/metron/ise/parser/Token.java     |   148 -
 .../apache/metron/ise/parser/TokenMgrError.java |   164 -
 .../json/serialization/JSONDecoderHelper.java   |   113 -
 .../json/serialization/JSONEncoderHelper.java   |    91 -
 .../json/serialization/JSONKafkaSerializer.java |   266 -
 .../json/serialization/JSONKryoSerializer.java  |    57 -
 .../apache/metron/metrics/MetricReporter.java   |   106 -
 .../apache/metron/metrics/MyMetricReporter.java |    50 -
 .../org/apache/metron/metrics/NullReporter.java |    27 -
 .../metron/parser/interfaces/MessageFilter.java |    24 -
 .../metron/parser/interfaces/MessageParser.java |    28 -
 .../java/org/apache/metron/pcap/Constants.java  |    38 -
 .../org/apache/metron/pcap/IEEE_802_1Q.java     |    44 -
 .../metron/pcap/MetronEthernetDecoder.java      |   134 -
 .../java/org/apache/metron/pcap/PacketInfo.java |   471 -
 .../apache/metron/pcap/PcapByteInputStream.java |   185 -
 .../metron/pcap/PcapByteOutputStream.java       |   305 -
 .../java/org/apache/metron/pcap/PcapMerger.java |   262 -
 .../metron/pcap/PcapPacketComparator.java       |    39 -
 .../java/org/apache/metron/pcap/PcapUtils.java  |   475 -
 .../apache/metron/reference/lookup/Lookup.java  |    96 -
 .../metron/reference/lookup/LookupKV.java       |    65 -
 .../metron/reference/lookup/LookupKey.java      |    23 -
 .../metron/reference/lookup/LookupValue.java    |    28 -
 .../lookup/accesstracker/AccessTracker.java     |    37 -
 .../lookup/accesstracker/AccessTrackerUtil.java |    83 -
 .../accesstracker/BloomAccessTracker.java       |   147 -
 .../lookup/accesstracker/NoopAccessTracker.java |    65 -
 .../accesstracker/PersistentAccessTracker.java  |   206 -
 .../reference/lookup/handler/Handler.java       |    29 -
 .../metron/spout/pcap/HDFSWriterCallback.java   |   169 -
 .../metron/spout/pcap/HDFSWriterConfig.java     |    97 -
 .../apache/metron/test/AbstractConfigTest.java  |   299 -
 .../apache/metron/test/AbstractSchemaTest.java  |   199 -
 .../apache/metron/test/AbstractTestContext.java |   183 -
 .../metron/tldextractor/BasicTldExtractor.java  |   154 -
 .../apache/metron/topology/TopologyUtils.java   |    28 -
 .../org/apache/metron/utils/ConfigUtils.java    |    48 -
 .../metron/utils/ConfigurationsUtils.java       |   231 -
 .../java/org/apache/metron/utils/JSONUtils.java |    86 -
 .../org/apache/metron/writer/HBaseWriter.java   |    88 -
 .../org/apache/metron/writer/PcapWriter.java    |    53 -
 .../writer/interfaces/BulkMessageWriter.java    |    31 -
 .../metron/writer/interfaces/MessageWriter.java |    27 -
 .../src/main/java/storm/kafka/Callback.java     |    26 -
 .../java/storm/kafka/CallbackCollector.java     |   182 -
 .../java/storm/kafka/CallbackKafkaSpout.java    |    93 -
 .../src/main/java/storm/kafka/EmitContext.java  |   146 -
 .../org/apache/metron/AbstractConfigTest.java   |   299 -
 .../org/apache/metron/AbstractSchemaTest.java   |   197 -
 .../org/apache/metron/AbstractTestContext.java  |   190 -
 .../metron/bolt/BulkMessageWriterBoltTest.java  |   149 -
 .../apache/metron/bolt/ConfiguredBoltTest.java  |   160 -
 .../org/apache/metron/bolt/JoinBoltTest.java    |   120 -
 .../org/apache/metron/bolt/SplitBoltTest.java   |   124 -
 .../apache/metron/domain/ConfigurationTest.java |    90 -
 .../metron/domain/ConfigurationsTest.java       |    39 -
 .../domain/SensorEnrichmentConfigTest.java      |    42 -
 .../metron/enrichment/EnrichmentConfigTest.java |   214 -
 .../enrichment/EnrichmentConverterTest.java     |    51 -
 .../org/apache/metron/pcap/PcapUtilsTest.java   |    31 -
 .../tldextractor/BasicTldExtractorTest.java     |    71 -
 .../metron/utils/ConfigurationsUtilsTest.java   |    94 -
 .../config/BasicTldExtractorTest.config         |    20 -
 .../src/test/resources/config/global.json       |     3 -
 .../src/test/resources/config/sensors/bro.json  |    19 -
 .../src/test/resources/effective_tld_names.dat  |  9719 -------
 metron-streaming/Metron-DataLoads/README.md     |   252 -
 metron-streaming/Metron-DataLoads/pom.xml       |   301 -
 .../src/main/assembly/assembly.xml              |    42 -
 .../src/main/bash/flatfile_loader.sh            |    39 -
 .../main/bash/prune_elasticsearch_indices.sh    |    21 -
 .../src/main/bash/prune_hdfs_files.sh           |    21 -
 .../src/main/bash/threatintel_bulk_load.sh      |    38 -
 .../src/main/bash/threatintel_bulk_prune.sh     |    37 -
 .../src/main/bash/threatintel_taxii_load.sh     |    39 -
 .../metron/dataloads/bulk/DataPruner.java       |    66 -
 .../dataloads/bulk/ElasticsearchDataPruner.java |   135 -
 .../bulk/ElasticsearchDataPrunerRunner.java     |   191 -
 .../metron/dataloads/bulk/HDFSDataPruner.java   |   226 -
 .../dataloads/bulk/LeastRecentlyUsedPruner.java |   221 -
 .../dataloads/bulk/StartDateException.java      |    31 -
 .../dataloads/bulk/ThreatIntelBulkLoader.java   |   259 -
 .../metron/dataloads/cif/HBaseTableLoad.java    |   255 -
 .../metron/dataloads/extractor/Extractor.java   |    30 -
 .../dataloads/extractor/ExtractorCreator.java   |    24 -
 .../dataloads/extractor/ExtractorHandler.java   |    79 -
 .../metron/dataloads/extractor/Extractors.java  |    58 -
 .../dataloads/extractor/csv/CSVExtractor.java   |   139 -
 .../extractor/csv/LookupConverter.java          |    30 -
 .../extractor/csv/LookupConverters.java         |    68 -
 .../extractor/inputformat/Formats.java          |    55 -
 .../inputformat/InputFormatHandler.java         |    28 -
 .../extractor/inputformat/WholeFileFormat.java  |   109 -
 .../dataloads/extractor/stix/StixExtractor.java |   132 -
 .../stix/types/AbstractObjectTypeHandler.java   |    36 -
 .../extractor/stix/types/AddressHandler.java    |    94 -
 .../extractor/stix/types/DomainHandler.java     |    77 -
 .../extractor/stix/types/HostnameHandler.java   |    70 -
 .../extractor/stix/types/ObjectTypeHandler.java |    31 -
 .../stix/types/ObjectTypeHandlers.java          |    42 -
 .../dataloads/hbase/mr/BulkLoadMapper.java      |    75 -
 .../metron/dataloads/hbase/mr/PrunerMapper.java |    81 -
 .../SimpleEnrichmentFlatFileLoader.java         |   263 -
 .../dataloads/nonbulk/taxii/ConnectionType.java |    23 -
 .../dataloads/nonbulk/taxii/TableInfo.java      |    71 -
 .../nonbulk/taxii/TaxiiConnectionConfig.java    |   222 -
 .../dataloads/nonbulk/taxii/TaxiiHandler.java   |   406 -
 .../dataloads/nonbulk/taxii/TaxiiLoader.java    |   208 -
 .../ElasticsearchDataPrunerIntegrationTest.java |   156 -
 .../bulk/ElasticsearchDataPrunerRunnerTest.java |    72 -
 .../bulk/ElasticsearchDataPrunerTest.java       |   210 -
 .../dataloads/bulk/HDFSDataPrunerTest.java      |   178 -
 .../dataloads/extractor/ExtractorTest.java      |    83 -
 .../extractor/csv/CSVExtractorTest.java         |    73 -
 .../extractor/stix/StixExtractorTest.java       |   142 -
 .../hbase/HBaseEnrichmentConverterTest.java     |    74 -
 .../hbase/mr/BulkLoadMapperIntegrationTest.java |   105 -
 .../dataloads/hbase/mr/BulkLoadMapperTest.java  |    91 -
 .../metron/dataloads/hbase/mr/HBaseUtil.java    |    72 -
 .../LeastRecentlyUsedPrunerIntegrationTest.java |   138 -
 .../nonbulk/taxii/MockTaxiiService.java         |    94 -
 .../nonbulk/taxii/TaxiiIntegrationTest.java     |   119 -
 .../src/test/resources/log4j.properties         |    24 -
 .../resources/taxii-messages/message.discovery  |    21 -
 .../test/resources/taxii-messages/messages.poll |  2914 --
 metron-streaming/Metron-DataServices/README.md  |    16 -
 .../Metron-DataServices/conf/config.properties  |    37 -
 metron-streaming/Metron-DataServices/pom.xml    |   514 -
 .../metron/alerts/server/AlertsCacheReaper.java |    62 -
 .../alerts/server/AlertsFilterCacheEntry.java   |    34 -
 .../alerts/server/AlertsProcessingServer.java   |    60 -
 .../metron/alerts/server/AlertsSearcher.java    |   252 -
 .../org/apache/metron/dataservices/Main.java    |   305 -
 .../metron/dataservices/auth/AuthToken.java     |   204 -
 .../dataservices/auth/AuthTokenFilter.java      |    32 -
 .../dataservices/auth/CustomDomainADRealm.java  |    51 -
 .../auth/RestSecurityInterceptor.java           |    74 -
 .../dataservices/common/MetronService.java      |    44 -
 .../dataservices/kafkaclient/KafkaClient.java   |    98 -
 .../dataservices/kafkaclient/KafkaConsumer.java |    64 -
 .../kafkaclient/poll/PollingKafkaClient.java    |   118 -
 .../kafkaclient/poll/PollingKafkaConsumer.java  |    69 -
 .../modules/guice/AlertsServerModule.java       |    53 -
 .../modules/guice/DefaultServletModule.java     |    64 -
 .../modules/guice/DefaultShiroWebModule.java    |   107 -
 .../modules/guice/RestEasyModule.java           |    39 -
 .../modules/guice/ServiceModule.java            |    50 -
 .../apache/metron/dataservices/rest/Index.java  |    70 -
 .../metron/dataservices/rest/RestServices.java  |    50 -
 .../dataservices/servlet/LoginServlet.java      |   130 -
 .../dataservices/servlet/LogoutServlet.java     |    60 -
 .../websocket/KafkaMessageSenderServlet.java    |    43 -
 .../websocket/KafkaMessageSenderSocket.java     |   137 -
 .../websocket/KafkaWebSocketCreator.java        |    76 -
 .../pcapservice/CellTimestampComparator.java    |    40 -
 .../metron/pcapservice/ConfigurationUtil.java   |   286 -
 .../pcapservice/HBaseConfigConstants.java       |    57 -
 .../pcapservice/HBaseConfigurationUtil.java     |   179 -
 .../apache/metron/pcapservice/IPcapGetter.java  |   102 -
 .../apache/metron/pcapservice/IPcapScanner.java |    66 -
 .../metron/pcapservice/PcapGetterHBaseImpl.java |   826 -
 .../apache/metron/pcapservice/PcapHelper.java   |   222 -
 .../pcapservice/PcapReceiverImplRestEasy.java   |   273 -
 .../pcapservice/PcapScannerHBaseImpl.java       |   319 -
 .../metron/pcapservice/PcapsResponse.java       |   167 -
 .../metron/pcapservice/RestTestingUtil.java     |   255 -
 .../pcapservice/rest/JettyServiceRunner.java    |    43 -
 .../metron/pcapservice/rest/PcapService.java    |    51 -
 .../ElasticSearch_KafkaAlertsService.java       |   105 -
 .../alerts/Solr_KafkaAlertsService.java         |   106 -
 .../main/resources/config-definition-hbase.xml  |    50 -
 .../resources/hbase-config-default.properties   |    57 -
 .../src/main/resources/log4j.xml                |    33 -
 .../main/resources/webroot/WEB-INF/shiro.ini    |    45 -
 .../src/main/resources/webroot/include.jsp      |    20 -
 .../src/main/resources/webroot/logged_in.jsp    |    38 -
 .../src/main/resources/webroot/login.jsp        |    70 -
 .../src/main/resources/webroot/login_failed.jsp |    30 -
 .../src/main/resources/webroot/login_old.jsp    |    38 -
 .../src/main/resources/webroot/withsocket.jsp   |   116 -
 .../src/main/resources/webroot/withsocket2.jsp  |    89 -
 .../src/main/resources/webroot/withsocket3.jsp  |   116 -
 .../CellTimestampComparatorTest.java            |   109 -
 metron-streaming/Metron-Elasticsearch/pom.xml   |   207 -
 .../src/main/assembly/assembly.xml              |    41 -
 .../metron/writer/ElasticsearchWriter.java      |    94 -
 .../etc/env/elasticsearch.properties            |   109 -
 .../ElasticsearchEnrichmentIntegrationTest.java |    88 -
 .../components/ElasticSearchComponent.java      |   186 -
 .../src/test/resources/log4j.properties         |    24 -
 .../src/test/resources/log4j2.xml               |    31 -
 .../Metron-EnrichmentAdapters/README.md         |   125 -
 .../Metron-EnrichmentAdapters/pom.xml           |   184 -
 .../adapters/cif/AbstractCIFAdapter.java        |    47 -
 .../adapters/cif/CIFHbaseAdapter.java           |   138 -
 .../enrichment/adapters/geo/GeoAdapter.java     |    76 -
 .../adapters/host/AbstractHostAdapter.java      |    47 -
 .../adapters/host/HostFromJSONListAdapter.java  |    78 -
 .../host/HostFromPropertiesFileAdapter.java     |    65 -
 .../adapters/jdbc/BaseJdbcConfig.java           |    70 -
 .../enrichment/adapters/jdbc/JdbcAdapter.java   |    83 -
 .../enrichment/adapters/jdbc/JdbcConfig.java    |    26 -
 .../enrichment/adapters/jdbc/MySqlConfig.java   |    39 -
 .../simplehbase/SimpleHBaseAdapter.java         |   119 -
 .../adapters/simplehbase/SimpleHBaseConfig.java |    55 -
 .../threatintel/ThreatIntelAdapter.java         |   135 -
 .../adapters/threatintel/ThreatIntelConfig.java |   108 -
 .../adapters/whois/WhoisHBaseAdapter.java       |   150 -
 .../apache/metron/enrichment/bolt/CacheKey.java |    73 -
 .../enrichment/bolt/EnrichmentJoinBolt.java     |   100 -
 .../enrichment/bolt/EnrichmentSplitterBolt.java |   143 -
 .../enrichment/bolt/GenericEnrichmentBolt.java  |   225 -
 .../enrichment/bolt/ThreatIntelJoinBolt.java    |    60 -
 .../bolt/ThreatIntelSplitterBolt.java           |    40 -
 .../enrichment/utils/EnrichmentUtils.java       |    81 -
 .../enrichment/utils/ThreatIntelUtils.java      |    32 -
 .../enrichment/adapters/geo/GeoAdapterTest.java |    94 -
 .../host/HostFromJSONListAdapterTest.java       |    83 -
 .../host/HostFromPropertiesFileAdapterTest.java |   106 -
 .../adapters/jdbc/MySqlConfigTest.java          |    44 -
 .../simplehbase/SimpleHBaseAdapterTest.java     |   117 -
 .../simplehbase/SimpleHBaseConfigTest.java      |    42 -
 .../threatintel/ThreatIntelAdapterTest.java     |   148 -
 .../threatintel/ThreatIntelConfigTest.java      |    53 -
 .../enrichment/bolt/EnrichmentJoinBoltTest.java |    86 -
 .../bolt/EnrichmentSplitterBoltTest.java        |    97 -
 .../bolt/GenericEnrichmentBoltTest.java         |   195 -
 .../bolt/ThreatIntelJoinBoltTest.java           |   105 -
 .../bolt/ThreatIntelSplitterBoltTest.java       |    45 -
 .../resources/CIFHbaseAdapterTest.properties    |    27 -
 .../resources/GeoMysqlAdapterTest.properties    |    27 -
 .../resources/TestSchemas/CIFHbaseSchema.json   |     0
 .../resources/TestSchemas/GeoMySqlSchema.json   |    42 -
 .../resources/TestSchemas/WhoisHbaseSchema.json |     0
 .../resources/WhoisHbaseAdapterTest.properties  |    28 -
 metron-streaming/Metron-Indexing/README.md      |    61 -
 metron-streaming/Metron-Indexing/pom.xml        |   127 -
 .../apache/metron/writer/hdfs/HdfsWriter.java   |    94 -
 .../writer/hdfs/SourceAwareMoveAction.java      |    48 -
 .../writer/hdfs/SourceFileNameFormat.java       |    48 -
 .../metron/writer/hdfs/SourceHandler.java       |   160 -
 .../Metron-MessageParsers/README.md             |    82 -
 metron-streaming/Metron-MessageParsers/pom.xml  |   155 -
 .../java/org/apache/metron/bolt/ParserBolt.java |    88 -
 .../org/apache/metron/bolt/PcapParserBolt.java  |    48 -
 .../apache/metron/bolt/TelemetryParserBolt.java |   110 -
 .../apache/metron/filters/BroMessageFilter.java |    62 -
 .../metron/filters/GenericMessageFilter.java    |    34 -
 .../org/apache/metron/parser/MessageParser.java |    25 -
 .../metron/parsing/parsers/BasicBroParser.java  |   158 -
 .../parsing/parsers/BasicFireEyeParser.java     |   217 -
 .../metron/parsing/parsers/BasicIseParser.java  |    95 -
 .../parsing/parsers/BasicLancopeParser.java     |    89 -
 .../parsing/parsers/BasicLogstashParser.java    |    88 -
 .../parsers/BasicPaloAltoFirewallParser.java    |   208 -
 .../metron/parsing/parsers/BasicParser.java     |    71 -
 .../parsing/parsers/BasicSnortParser.java       |   162 -
 .../parsing/parsers/BasicSourcefireParser.java  |   121 -
 .../metron/parsing/parsers/BasicYafParser.java  |   209 -
 .../metron/parsing/parsers/GrokAsaParser.java   |   279 -
 .../metron/parsing/parsers/GrokParser.java      |   170 -
 .../parsing/parsers/GrokSourcefireParser.java   |   100 -
 .../metron/parsing/parsers/JSONCleaner.java     |    95 -
 .../metron/parsing/parsers/MetronConverter.java |   200 -
 .../metron/parsing/parsers/MetronGarbage.java   |   147 -
 .../metron/parsing/parsers/MetronGrok.java      |   385 -
 .../metron/parsing/parsers/MetronMatch.java     |   297 -
 .../metron/parsing/parsers/PcapParser.java      |   229 -
 .../apache/metron/parsing/utils/GrokUtils.java  |    43 -
 .../metron/parsing/utils/ParserUtils.java       |    72 -
 .../org/apache/metron/writer/KafkaWriter.java   |    79 -
 .../src/main/resources/patterns/asa             |   176 -
 .../src/main/resources/patterns/common          |    96 -
 .../src/main/resources/patterns/fireeye         |     9 -
 .../src/main/resources/patterns/sourcefire      |    30 -
 .../src/main/resources/patterns/yaf             |     2 -
 .../org/apache/metron/bolt/ParserBoltTest.java  |    90 -
 .../metron/parsing/parsers/GrokParserTest.java  |   114 -
 .../metron/parsing/test/BasicBroParserTest.java |   163 -
 .../parsing/test/BasicFireEyeParserTest.java    |   162 -
 .../metron/parsing/test/BasicIseParserTest.java |   171 -
 .../parsing/test/BasicLancopeParserTest.java    |   162 -
 .../test/BasicPaloAltoFirewallParserTest.java   |   155 -
 .../parsing/test/BasicSourcefireParserTest.java |   157 -
 .../metron/parsing/test/BroParserTest.java      |   164 -
 .../metron/parsing/test/GrokAsaParserTest.java  |   167 -
 .../src/test/resources/BroParserTest.log        |     4 -
 .../src/test/resources/FireEyeParserTest.log    |     8 -
 .../src/test/resources/GrokParserTest.log       |    12 -
 .../src/test/resources/IseParserTest.log        |   308 -
 .../src/test/resources/LancopeParserTest.log    |     1 -
 .../resources/PaloAltoFirewallParserTest.log    |     2 -
 .../src/test/resources/SourceFireTest.log       |     3 -
 .../test/resources/TestSchemas/BroSchema.json   |    28 -
 .../test/resources/TestSchemas/IseSchema.json   |    21 -
 .../resources/TestSchemas/LancopeSchema.json    |    28 -
 .../test/resources/TestSchemas/PcapSchema.json  |    22 -
 .../resources/TestSchemas/SourcefireSchema.json |    34 -
 .../config/BasicFireEyeParserTest.config        |    20 -
 .../resources/config/BasicIseParserTest.config  |    20 -
 .../config/BasicLancopeParserTest.config        |    20 -
 .../BasicPaloAltoFirewallParserTest.config      |    20 -
 .../config/BasicSourcefireParserTest.config     |    20 -
 .../test/resources/config/BroParserTest.config  |    20 -
 .../resources/config/GrokAsaParserTest.config   |    20 -
 .../src/test/resources/effective_tld_names.dat  |  9719 -------
 metron-streaming/Metron-Pcap_Service/README.txt |    16 -
 metron-streaming/Metron-Pcap_Service/pom.xml    |   296 -
 .../OnlyDeleteExpiredFilesCompactionPolicy.java |    54 -
 .../pcapservice/CellTimestampComparator.java    |    40 -
 .../metron/pcapservice/ConfigurationUtil.java   |   286 -
 .../pcapservice/HBaseConfigConstants.java       |    57 -
 .../pcapservice/HBaseConfigurationUtil.java     |   179 -
 .../apache/metron/pcapservice/IPcapGetter.java  |   102 -
 .../apache/metron/pcapservice/IPcapScanner.java |    66 -
 .../metron/pcapservice/PcapGetterHBaseImpl.java |   826 -
 .../apache/metron/pcapservice/PcapHelper.java   |   222 -
 .../pcapservice/PcapReceiverImplRestEasy.java   |   267 -
 .../pcapservice/PcapScannerHBaseImpl.java       |   319 -
 .../metron/pcapservice/PcapsResponse.java       |   167 -
 .../metron/pcapservice/RestTestingUtil.java     |   329 -
 .../pcapservice/rest/JettyServiceRunner.java    |    43 -
 .../metron/pcapservice/rest/PcapService.java    |    52 -
 .../main/resources/config-definition-hbase.xml  |    50 -
 .../resources/hbase-config-default.properties   |    57 -
 .../CellTimestampComparatorTest.java            |   109 -
 .../pcapservice/ConfigurationUtilTest.java      |    67 -
 .../pcapservice/HBaseConfigurationUtilTest.java |    69 -
 .../pcapservice/HBaseIntegrationTest.java       |    88 -
 .../pcapservice/PcapGetterHBaseImplTest.java    |   553 -
 .../metron/pcapservice/PcapHelperTest.java      |   335 -
 .../pcapservice/PcapScannerHBaseImplTest.java   |   249 -
 .../src/test/resources/hbase-config.properties  |    57 -
 .../src/test/resources/test-tcp-packet.pcap     |   Bin 144 -> 0 bytes
 .../WhoisEnrichment/Whois_CSV_to_JSON.py        |   208 -
 metron-streaming/Metron-Solr/pom.xml            |   209 -
 .../Metron-Solr/src/main/assembly/assembly.xml  |    41 -
 .../org/apache/metron/solr/SolrConstants.java   |    29 -
 .../metron/writer/solr/MetronSolrClient.java    |    72 -
 .../apache/metron/writer/solr/SolrWriter.java   |   108 -
 .../Metron_Configs/etc/env/solr.properties      |   109 -
 .../SolrEnrichmentIntegrationTest.java          |   107 -
 .../integration/components/SolrComponent.java   |   153 -
 .../writer/solr/MetronSolrClientTest.java       |    82 -
 .../metron/writer/solr/SolrWriterTest.java      |   139 -
 .../src/test/resources/log4j.properties         |    24 -
 .../Metron-Solr/src/test/resources/log4j2.xml   |    31 -
 .../test/resources/solr/conf/_rest_managed.json |     1 -
 .../src/test/resources/solr/conf/currency.xml   |    67 -
 .../resources/solr/conf/lang/stopwords_en.txt   |    54 -
 .../src/test/resources/solr/conf/protwords.txt  |    21 -
 .../src/test/resources/solr/conf/schema.xml     |   191 -
 .../src/test/resources/solr/conf/solrconfig.xml |   583 -
 .../src/test/resources/solr/conf/stopwords.txt  |    14 -
 .../src/test/resources/solr/conf/synonyms.txt   |    29 -
 .../src/test/resources/solr/solr.xml            |    14 -
 metron-streaming/Metron-Testing/pom.xml         |   120 -
 .../metron/integration/BaseIntegrationTest.java |    48 -
 .../integration/EnrichmentIntegrationTest.java  |   470 -
 .../metron/integration/util/TestUtils.java      |    37 -
 .../metron/integration/util/UnitTestHelper.java |    84 -
 .../util/integration/ComponentRunner.java       |   152 -
 .../util/integration/InMemoryComponent.java     |    23 -
 .../integration/util/integration/Processor.java |    23 -
 .../util/integration/ReadinessState.java        |    22 -
 .../integration/UnableToStartException.java     |    27 -
 .../components/FluxTopologyComponent.java       |   132 -
 .../components/KafkaWithZKComponent.java        |   228 -
 .../util/integration/util/KafkaUtil.java        |    41 -
 .../util/integration/util/PcapTestUtil.java     |    77 -
 .../integration/util/mock/MockGeoAdapter.java   |    64 -
 .../util/mock/MockHBaseConnector.java           |    52 -
 .../java/org/apache/metron/util/SampleUtil.java |    41 -
 .../main/resources/sample/config/global.json    |    10 -
 .../resources/sample/config/sensors/bro.json    |    19 -
 .../resources/sample/config/sensors/pcap.json   |    13 -
 .../resources/sample/config/sensors/snort.json  |    19 -
 .../resources/sample/config/sensors/yaf.json    |    25 -
 .../sample/data/SampleIndexed/YafIndexed        |    10 -
 .../data/SampleInput/.PCAPExampleOutput.crc     |   Bin 44 -> 0 bytes
 .../resources/sample/data/SampleInput/AsaOutput |   100 -
 .../sample/data/SampleInput/BroExampleOutput    | 23411 -----------------
 .../data/SampleInput/FireeyeExampleOutput       |    90 -
 .../sample/data/SampleInput/ISESampleOutput     |   308 -
 .../data/SampleInput/LancopeExampleOutput       |    40 -
 .../sample/data/SampleInput/PCAPExampleOutput   |   Bin 4510 -> 0 bytes
 .../sample/data/SampleInput/PaloaltoOutput      |   100 -
 .../sample/data/SampleInput/SnortOutput         |     3 -
 .../data/SampleInput/SourcefireExampleOutput    |     2 -
 .../sample/data/SampleInput/YafExampleOutput    |    10 -
 .../sample/data/SampleParsed/SnortParsed        |     3 -
 .../sample/data/SampleParsed/YafExampleParsed   |    10 -
 .../Metron-TestingUtilities/pom.xml             |   117 -
 .../org/apache/metron/bolt/BaseBoltTest.java    |    93 -
 .../metron/bolt/BaseEnrichmentBoltTest.java     |    93 -
 .../integration/util/mock/MockHTable.java       |   672 -
 metron-streaming/Metron-Topologies/README.md    |    39 -
 metron-streaming/Metron-Topologies/pom.xml      |   324 -
 .../src/main/assembly/assembly.xml              |    74 -
 .../src/main/bash/latency_summarizer.sh         |    32 -
 .../main/bash/start_elasticsearch_topology.sh   |    22 -
 .../src/main/bash/start_solr_topology.sh        |    22 -
 .../src/main/bash/zk_load_configs.sh            |    33 -
 .../apache/metron/test/bolts/PrintingBolt.java  |    49 -
 .../test/converters/BinaryConverters.java       |    37 -
 .../test/converters/HexStringConverter.java     |    30 -
 .../metron/test/converters/IConverter.java      |    22 -
 .../metron/test/filereaders/FileReader.java     |    54 -
 .../test/spouts/GenericInternalTestSpout.java   |   139 -
 .../metron/test/spouts/PcapSimulatorSpout.java  |   170 -
 .../org/apache/metron/utils/KafkaLoader.java    |    88 -
 .../apache/metron/utils/LatencySummarizer.java  |   188 -
 .../Metron_Configs/etc/env/config.properties    |   112 -
 .../etc/whitelists/known_hosts.conf             |    21 -
 .../Metron_Configs/topologies/asa/remote.yaml   |    82 -
 .../Metron_Configs/topologies/asa/test.yaml     |    82 -
 .../Metron_Configs/topologies/bro/remote.yaml   |    71 -
 .../Metron_Configs/topologies/bro/test.yaml     |    72 -
 .../topologies/enrichment/remote.yaml           |   413 -
 .../topologies/enrichment/test.yaml             |   394 -
 .../topologies/fireeye/remote.yaml              |    79 -
 .../Metron_Configs/topologies/fireeye/test.yaml |    79 -
 .../Metron_Configs/topologies/ise/remote.yaml   |    79 -
 .../Metron_Configs/topologies/ise/test.yaml     |    79 -
 .../topologies/lancope/remote.yaml              |    79 -
 .../Metron_Configs/topologies/lancope/test.yaml |    79 -
 .../topologies/paloalto/remote.yaml             |    79 -
 .../topologies/paloalto/test.yaml               |    79 -
 .../Metron_Configs/topologies/pcap/remote.yaml  |    70 -
 .../Metron_Configs/topologies/pcap/test.yaml    |    74 -
 .../Metron_Configs/topologies/snort/remote.yaml |    69 -
 .../Metron_Configs/topologies/snort/test.yaml   |    69 -
 .../topologies/sourcefire/remote.yaml           |    79 -
 .../topologies/sourcefire/test.yaml             |    79 -
 .../Metron_Configs/topologies/yaf/remote.yaml   |    84 -
 .../Metron_Configs/topologies/yaf/test.yaml     |    88 -
 .../resources/TopologyConfigs_old/lancope.conf  |   108 -
 .../src/main/resources/effective_tld_names.dat  |  9719 -------
 .../integration/ParserIntegrationTest.java      |   130 -
 .../integration/PcapParserIntegrationTest.java  |   213 -
 .../integration/SnortIntegrationTest.java       |    46 -
 .../metron/integration/YafIntegrationTest.java  |    46 -
 .../src/test/resources/log4j.properties         |    24 -
 .../src/test/resources/log4j2.xml               |    31 -
 metron-streaming/README.md                      |    30 -
 metron-streaming/pom.xml                        |   277 -
 metron-streaming/style/LICENSE.config           |    16 -
 metron-streaming/style/LICENSE.java             |    17 -
 metron-streaming/style/LICENSE.xml              |    16 -
 metron-streaming/style/checkstyle.xml           |    33 -
 1296 files changed, 107494 insertions(+), 121870 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/.travis.yml
----------------------------------------------------------------------
diff --git a/.travis.yml b/.travis.yml
index 9e428e9..ad6b444 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -3,4 +3,4 @@ jdk:
   - oraclejdk8
 script:
   - |
-    mvn apache-rat:check && cd metron-streaming && mvn -q integration-test package
+    mvn apache-rat:check && cd metron-platform && mvn -q integration-test package

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/.gitignore
----------------------------------------------------------------------
diff --git a/deployment/.gitignore b/deployment/.gitignore
deleted file mode 100644
index 4dd9982..0000000
--- a/deployment/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-keys/
-.vagrant
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/README.md
----------------------------------------------------------------------
diff --git a/deployment/README.md b/deployment/README.md
deleted file mode 100644
index 38ffb34..0000000
--- a/deployment/README.md
+++ /dev/null
@@ -1,97 +0,0 @@
-# Overview
-This set of playbooks can be used to deploy an Ambari-managed Hadoop cluster, Metron services, or both using ansible
-playbooks. These playbooks currently only target RHEL/CentOS 6.x operating
-systems. 
-
-## Prerequisites
-The following tools are required to run these scripts:
-
-- Maven - https://maven.apache.org/
-- Git - https://git-scm.com/
-- Ansible - http://www.ansible.com/ (version 2.0 or greater)
-
-Currently Metron must be built from source.  Before running these scripts perform the following steps:
-
-1. Clone the Metron git repository with `git clone git@github.com:apache/incubator-metron.git`
-2. Navigate to `incubator-metron/metron-streaming` and run `mvn clean package`
-
-These scripts depend on two files for configuration:
-  
-- hosts - declares which Ansible roles will be run on which hosts
-- group_vars/all - various configuration settings needed to install Metron
-
-Examples can be found in the
-`incubator-metron/deployment/inventory/metron_example` directory and are a good starting point.  Copy this directory 
-into `incubator-metron/deployment/inventory/` and rename it to your `project_name`.  More information about Ansible files and directory 
-structure can be found at http://docs.ansible.com/ansible/playbooks_best_practices.html.
-
-## Ambari
-The Ambari playbook will install a Hadoop cluster with all the services and configuration required by Metron.  This
-section can be skipped if installing Metron on a pre-existing cluster.  
-
-Currently, this playbook supports building a local development cluster running on one node but options for other types
- of clusters will be added in the future.
-
-### Setting up your inventory
-Make sure to update the hosts file in `incubator-metron/deployment/inventory/project_name/hosts` or provide an 
-alternate inventory file when you launch the playbooks, including the 
-ssh user(s) and ssh keyfile location(s). These playbooks expect two 
-host groups:
-
-- ambari_master
-- ambari_slaves
-
-### Running the playbook
-This playbook will install the Ambari server on the ambari_master, install the ambari agents on 
-the ambari_slaves, and create a cluster in Ambari with a blueprint for the required 
-Metron components.
-
-Navigate to `incubator-metron/deployment/playbooks` and run: 
-`ansible-playbook -i ../inventory/project_name ambari_install.yml`
-
-## Metron
-The Metron playbook will gather the necessary cluster settings from Ambari and install the Metron services.
-
-### Setting up your inventory
-Edit the hosts file at `incubator-metron/deployment/inventory/project_name/hosts`.  Declare where which hosts the 
-Metron services will be installed on by updating these groups:
-
-- enrichment - submits the topology code to Storm and requires a storm client
-- search - host where Elasticsearch will be run
-- web - host where the Metron UI and underlying services will run
-- sensors - host where network data will be collected and published to Kafka
-
-The Metron topologies depend on Kafka topics and HBase tables being created beforehand.  Declare a host that has Kafka
- and HBase clients installed by updating this group:
-
-- hadoop_client
-
-If only installing Metron, these groups can be ignored:
-
-- ambari_master
-- ambari_slaves
-
-### Configuring group variables
-The Metron Ansible scripts depend on a set of variables.  These variables can be found in the file at 
-`incubator-metron/deployment/inventory/project_name/group_vars/all`.  Edit the ambari* variables to match your Ambari
-instance and update the java_home variable to match the java path on your hosts.
-
-### Running the playbook
-Navigate to `incubator-metron/deployment/playbooks` and run: 
-`ansible-playbook -i ../inventory/project_name metron_install.yml`
-
-## Vagrant
-A VagrantFile is included and will install a working version of the entire Metron stack.  The following is required to
-run this:
-
-- Vagrant - https://www.vagrantup.com/
-- Hostmanager plugin for vagrant - Run `vagrant plugin install vagrant-hostmanager` on the machine where Vagrant is
-installed
-
-Navigate to `incubator-metron/deployment/vagrant/singlenode-vagrant` and run `vagrant up`.  This also provides a good
-example of how to run a full end-to-end Metron install.
-
-
-## TODO
-- migrate existing MySQL/GeoLite playbook
-- Support Ubuntu deployments

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/amazon-ec2/.gitignore
----------------------------------------------------------------------
diff --git a/deployment/amazon-ec2/.gitignore b/deployment/amazon-ec2/.gitignore
deleted file mode 100644
index 9c214d2..0000000
--- a/deployment/amazon-ec2/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-*.pem
-*.secret
-*.log
-*.retry


[28/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/flux/enrichment/test.yaml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/flux/enrichment/test.yaml b/metron-platform/metron-enrichment/src/main/flux/enrichment/test.yaml
new file mode 100644
index 0000000..dcc507c
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/flux/enrichment/test.yaml
@@ -0,0 +1,394 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: "enrichment"
+config:
+    topology.workers: 1
+
+components:
+# Enrichment
+    -   id: "geoEnrichmentAdapter"
+        className: "org.apache.metron.integration.mock.MockGeoAdapter"
+    -   id: "geoEnrichment"
+        className: "org.apache.metron.enrichment.configuration.Enrichment"
+        constructorArgs:
+            -   "geo"
+            -   ref: "geoEnrichmentAdapter"
+    -   id: "hostEnrichmentAdapter"
+        className: "org.apache.metron.enrichment.adapters.host.HostFromJSONListAdapter"
+        constructorArgs:
+            - '${org.apache.metron.enrichment.host.known_hosts}'
+    -   id: "hostEnrichment"
+        className: "org.apache.metron.enrichment.configuration.Enrichment"
+        constructorArgs:
+            -   "host"
+            -   ref: "hostEnrichmentAdapter"
+
+    -   id: "simpleHBaseEnrichmentConfig"
+        className: "org.apache.metron.enrichment.adapters.simplehbase.SimpleHBaseConfig"
+        configMethods:
+            -   name: "withProviderImpl"
+                args:
+                    - "${hbase.provider.impl}"
+            -   name: "withHBaseTable"
+                args:
+                    - "${enrichment.simple.hbase.table}"
+            -   name: "withHBaseCF"
+                args:
+                    - "${enrichment.simple.hbase.cf}"
+    -   id: "simpleHBaseEnrichmentAdapter"
+        className: "org.apache.metron.enrichment.adapters.simplehbase.SimpleHBaseAdapter"
+        configMethods:
+           -    name: "withConfig"
+                args:
+                    - ref: "simpleHBaseEnrichmentConfig"
+    -   id: "simpleHBaseEnrichment"
+        className: "org.apache.metron.enrichment.configuration.Enrichment"
+        constructorArgs:
+          -   "hbaseEnrichment"
+          -   ref: "simpleHBaseEnrichmentAdapter"
+    -   id: "enrichments"
+        className: "java.util.ArrayList"
+        configMethods:
+            -   name: "add"
+                args:
+                    - ref: "geoEnrichment"
+            -   name: "add"
+                args:
+                    - ref: "hostEnrichment"
+            -   name: "add"
+                args:
+                    - ref: "simpleHBaseEnrichment"
+# Threat Intel
+
+    -   id: "simpleHBaseThreatIntelConfig"
+        className: "org.apache.metron.enrichment.adapters.threatintel.ThreatIntelConfig"
+        configMethods:
+            -   name: "withProviderImpl"
+                args:
+                    - "${hbase.provider.impl}"
+            -   name: "withTrackerHBaseTable"
+                args:
+                    - "${threat.intel.tracker.table}"
+            -   name: "withTrackerHBaseCF"
+                args:
+                    - "${threat.intel.tracker.cf}"
+            -   name: "withHBaseTable"
+                args:
+                    - "${threat.intel.simple.hbase.table}"
+            -   name: "withHBaseCF"
+                args:
+                    - "${threat.intel.simple.hbase.cf}"
+    -   id: "simpleHBaseThreatIntelAdapter"
+        className: "org.apache.metron.enrichment.adapters.threatintel.ThreatIntelAdapter"
+        configMethods:
+           -    name: "withConfig"
+                args:
+                    - ref: "simpleHBaseThreatIntelConfig"
+    -   id: "simpleHBaseThreatIntelEnrichment"
+        className: "org.apache.metron.enrichment.configuration.Enrichment"
+        constructorArgs:
+          -   "hbaseThreatIntel"
+          -   ref: "simpleHBaseThreatIntelAdapter"
+
+    -   id: "threatIntels"
+        className: "java.util.ArrayList"
+        configMethods:
+            -   name: "add"
+                args:
+                    - ref: "simpleHBaseThreatIntelEnrichment"
+
+    -   id: "fileNameFormat"
+        className: "org.apache.storm.hdfs.bolt.format.DefaultFileNameFormat"
+        configMethods:
+            -   name: "withPrefix"
+                args:
+                    - "enrichment-"
+            -   name: "withExtension"
+                args:
+                  - ".json"
+            -   name: "withPath"
+                args:
+                    - "${index.hdfs.output}"
+#indexing
+    -   id: "hdfsWriter"
+        className: "org.apache.metron.writer.hdfs.HdfsWriter"
+        configMethods:
+            -   name: "withFileNameFormat"
+                args:
+                    - ref: "fileNameFormat"
+    -   id: "indexWriter"
+        className: "${writer.class.name}"
+
+#kafka/zookeeper
+    -   id: "zkHosts"
+        className: "storm.kafka.ZkHosts"
+        constructorArgs:
+            - "${kafka.zk}"
+    -   id: "kafkaConfig"
+        className: "storm.kafka.SpoutConfig"
+        constructorArgs:
+            # zookeeper hosts
+            - ref: "zkHosts"
+            # topic name
+            - "enrichments"
+            # zk root
+            - ""
+            # id
+            - "enrichments"
+        properties:
+            -   name: "ignoreZkOffsets"
+                value: true
+            -   name: "startOffsetTime"
+                value: -2
+
+spouts:
+    -   id: "testingSpout"
+        className: "org.apache.metron.test.spouts.GenericInternalTestSpout"
+        parallelism: 1
+        configMethods:
+            -   name: "withFilename"
+                args:
+                    - "../metron-integration-test/src/main/resources/sample/data/SampleInput/YafExampleOutput"
+            -   name: "withRepeating"
+                args:
+                    - true
+    -   id: "kafkaSpout"
+        className: "storm.kafka.KafkaSpout"
+        constructorArgs:
+            - ref: "kafkaConfig"
+bolts:
+# Enrichment Bolts
+    -   id: "enrichmentSplitBolt"
+        className: "org.apache.metron.enrichment.bolt.EnrichmentSplitterBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withEnrichments"
+                args:
+                    - ref: "enrichments"
+    -   id: "geoEnrichmentBolt"
+        className: "org.apache.metron.enrichment.bolt.GenericEnrichmentBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withEnrichment"
+                args:
+                    - ref: "geoEnrichment"
+            -   name: "withMaxCacheSize"
+                args: [10000]
+            -   name: "withMaxTimeRetain"
+                args: [10]
+    -   id: "hostEnrichmentBolt"
+        className: "org.apache.metron.enrichment.bolt.GenericEnrichmentBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withEnrichment"
+                args:
+                    - ref: "hostEnrichment"
+            -   name: "withMaxCacheSize"
+                args: [10000]
+            -   name: "withMaxTimeRetain"
+                args: [10]
+    -   id: "simpleHBaseEnrichmentBolt"
+        className: "org.apache.metron.enrichment.bolt.GenericEnrichmentBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withEnrichment"
+                args:
+                    - ref: "simpleHBaseEnrichment"
+            -   name: "withMaxCacheSize"
+                args: [10000]
+            -   name: "withMaxTimeRetain"
+                args: [10]
+    -   id: "enrichmentJoinBolt"
+        className: "org.apache.metron.enrichment.bolt.EnrichmentJoinBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withMaxCacheSize"
+                args: [10000]
+            -   name: "withMaxTimeRetain"
+                args: [10]
+
+# Threat Intel Bolts
+    -   id: "threatIntelSplitBolt"
+        className: "org.apache.metron.enrichment.bolt.ThreatIntelSplitterBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withEnrichments"
+                args:
+                    - ref: "threatIntels"
+            -   name: "withMessageFieldName"
+                args: ["message"]
+    -   id: "simpleHBaseThreatIntelBolt"
+        className: "org.apache.metron.enrichment.bolt.GenericEnrichmentBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withEnrichment"
+                args:
+                    - ref: "simpleHBaseThreatIntelEnrichment"
+            -   name: "withMaxCacheSize"
+                args: [10000]
+            -   name: "withMaxTimeRetain"
+                args: [10]
+    -   id: "threatIntelJoinBolt"
+        className: "org.apache.metron.enrichment.bolt.ThreatIntelJoinBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withMaxCacheSize"
+                args: [10000]
+            -   name: "withMaxTimeRetain"
+                args: [10]
+# Indexing Bolts
+    -   id: "indexingBolt"
+        className: "org.apache.metron.enrichment.bolt.BulkMessageWriterBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withBulkMessageWriter"
+                args:
+                    - ref: "indexWriter"
+    -   id: "hdfsIndexingBolt"
+        className: "org.apache.metron.enrichment.bolt.BulkMessageWriterBolt"
+        constructorArgs:
+            - "${kafka.zk}"
+        configMethods:
+            -   name: "withBulkMessageWriter"
+                args:
+                    - ref: "hdfsWriter"
+
+
+streams:
+#parser
+    -   name: "spout -> enrichmentSplit"
+        from: "kafkaSpout"
+        to: "enrichmentSplitBolt"
+        grouping:
+            type: SHUFFLE
+
+#enrichment
+    -   name: "enrichmentSplit -> host"
+        from: "enrichmentSplitBolt"
+        to: "hostEnrichmentBolt"
+        grouping:
+            streamId: "host"
+            type: FIELDS
+            args: ["key"]
+    -   name: "enrichmentSplit -> geo"
+        from: "enrichmentSplitBolt"
+        to: "geoEnrichmentBolt"
+        grouping:
+            streamId: "geo"
+            type: FIELDS
+            args: ["key"]
+
+    -   name: "enrichmentSplit -> simpleHBaseEnrichmentBolt"
+        from: "enrichmentSplitBolt"
+        to: "simpleHBaseEnrichmentBolt"
+        grouping:
+            streamId: "hbaseEnrichment"
+            type: FIELDS
+            args: ["key"]
+
+    -   name: "splitter -> join"
+        from: "enrichmentSplitBolt"
+        to: "enrichmentJoinBolt"
+        grouping:
+            streamId: "message"
+            type: FIELDS
+            args: ["key"]
+    -   name: "geo -> join"
+        from: "geoEnrichmentBolt"
+        to: "enrichmentJoinBolt"
+        grouping:
+            streamId: "geo"
+            type: FIELDS
+            args: ["key"]
+
+
+    -   name: "simpleHBaseEnrichmentBolt -> join"
+        from: "simpleHBaseEnrichmentBolt"
+        to: "enrichmentJoinBolt"
+        grouping:
+            streamId: "hbaseEnrichment"
+            type: FIELDS
+            args: ["key"]
+    -   name: "host -> join"
+        from: "hostEnrichmentBolt"
+        to: "enrichmentJoinBolt"
+        grouping:
+            streamId: "host"
+            type: FIELDS
+            args: ["key"]
+
+#threat intel
+    -   name: "enrichmentJoin -> threatSplit"
+        from: "enrichmentJoinBolt"
+        to: "threatIntelSplitBolt"
+        grouping:
+            streamId: "message"
+            type: FIELDS
+            args: ["key"]
+
+    -   name: "threatSplit -> simpleHBaseThreatIntel"
+        from: "threatIntelSplitBolt"
+        to: "simpleHBaseThreatIntelBolt"
+        grouping:
+            streamId: "hbaseThreatIntel"
+            type: FIELDS
+            args: ["key"]
+
+    -   name: "simpleHBaseThreatIntel -> join"
+        from: "simpleHBaseThreatIntelBolt"
+        to: "threatIntelJoinBolt"
+        grouping:
+            streamId: "hbaseThreatIntel"
+            type: FIELDS
+            args: ["key"]
+    -   name: "threatIntelSplit -> threatIntelJoin"
+        from: "threatIntelSplitBolt"
+        to: "threatIntelJoinBolt"
+        grouping:
+            streamId: "message"
+            type: FIELDS
+            args: ["key"]
+#indexing
+    -   name: "threatIntelJoin -> indexing"
+        from: "threatIntelJoinBolt"
+        to: "indexingBolt"
+        grouping:
+            streamId: "message"
+            type: FIELDS
+            args: ["key"]
+    -   name: "threatIntelJoin -> hdfs"
+        from: "threatIntelJoinBolt"
+        to: "hdfsIndexingBolt"
+        grouping:
+            streamId: "message"
+            type: SHUFFLE
+
+    -   name: "indexingBolt -> errorIndexingBolt"
+        from: "indexingBolt"
+        to: "indexingBolt"
+        grouping:
+            streamId: "error"
+            type: SHUFFLE

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/cif/AbstractCIFAdapter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/cif/AbstractCIFAdapter.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/cif/AbstractCIFAdapter.java
new file mode 100644
index 0000000..73a7ad5
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/cif/AbstractCIFAdapter.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.enrichment.adapters.cif;
+
+import java.io.Serializable;
+
+import org.apache.metron.enrichment.bolt.CacheKey;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.metron.enrichment.interfaces.EnrichmentAdapter;
+
+public abstract class AbstractCIFAdapter implements EnrichmentAdapter<CacheKey>,Serializable{
+
+	/**
+	 * 
+	 */
+	private static final long serialVersionUID = -5040559164824221816L;
+	protected static final Logger LOG = LoggerFactory
+			.getLogger(AbstractCIFAdapter.class);
+	
+	abstract public boolean initializeAdapter();
+	abstract public String enrichByIP(String metadata);
+	abstract public String enrichByDomain(String metadata);
+	abstract public String enrichByEmail(String metadata);
+
+	@Override
+	public void cleanup() {
+
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/cif/CIFHbaseAdapter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/cif/CIFHbaseAdapter.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/cif/CIFHbaseAdapter.java
new file mode 100644
index 0000000..63d6c0b
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/cif/CIFHbaseAdapter.java
@@ -0,0 +1,138 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.enrichment.adapters.cif;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.metron.enrichment.bolt.CacheKey;
+import org.apache.metron.enrichment.interfaces.EnrichmentAdapter;
+import org.json.simple.JSONObject;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HConnection;
+import org.apache.hadoop.hbase.client.HConnectionManager;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.log4j.Logger;
+
+@SuppressWarnings("unchecked")
+public class CIFHbaseAdapter implements EnrichmentAdapter<CacheKey>,Serializable {
+
+	private static final long serialVersionUID = 1L;
+	private String _tableName;
+	private HTableInterface table;
+	private String _quorum;
+	private String _port;
+
+	public CIFHbaseAdapter(String quorum, String port, String tableName) {
+		_quorum = quorum;
+		_port = port;
+		_tableName = tableName;
+	}
+
+	/** The LOGGER. */
+	private static final Logger LOGGER = Logger
+			.getLogger(CIFHbaseAdapter.class);
+
+	@Override
+	public void logAccess(CacheKey value) {
+
+	}
+
+	public JSONObject enrich(CacheKey k) {
+		String metadata = k.getValue();
+		JSONObject output = new JSONObject();
+		LOGGER.debug("=======Looking Up For:" + metadata);
+		output.putAll(getCIFObject(metadata));
+
+		return output;
+	}
+
+	@SuppressWarnings({ "rawtypes", "deprecation" })
+	protected Map getCIFObject(String key) {
+
+		LOGGER.debug("=======Pinging HBase For:" + key);
+
+		Get get = new Get(key.getBytes());
+		Result rs;
+		Map output = new HashMap();
+
+		try {
+			rs = table.get(get);
+
+			for (KeyValue kv : rs.raw())
+				output.put(new String(kv.getQualifier()), "Y");
+
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}
+		return output;
+	}
+
+	@Override
+	public boolean initializeAdapter() {
+
+		// Initialize HBase Table
+		Configuration conf = null;
+		conf = HBaseConfiguration.create();
+		conf.set("hbase.zookeeper.quorum", _quorum);
+		conf.set("hbase.zookeeper.property.clientPort", _port);
+
+		try {
+			LOGGER.debug("=======Connecting to HBASE===========");
+			LOGGER.debug("=======ZOOKEEPER = "
+					+ conf.get("hbase.zookeeper.quorum"));
+			HConnection connection = HConnectionManager.createConnection(conf);
+			table = connection.getTable(_tableName);
+			return true;
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			LOGGER.debug("=======Unable to Connect to HBASE===========");
+			e.printStackTrace();
+		}
+
+		return false;
+	}
+
+
+	public String enrichByIP(String metadata) {
+		return null;
+	}
+
+
+	public String enrichByDomain(String metadata) {
+		return null;
+	}
+
+
+	public String enrichByEmail(String metadata) {
+		return null;
+	}
+
+	@Override
+	public void cleanup() {
+
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/geo/GeoAdapter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/geo/GeoAdapter.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/geo/GeoAdapter.java
new file mode 100644
index 0000000..5d12a29
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/geo/GeoAdapter.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.adapters.geo;
+
+import org.apache.commons.validator.routines.InetAddressValidator;
+import org.apache.metron.enrichment.adapters.jdbc.JdbcAdapter;
+import org.apache.metron.enrichment.bolt.CacheKey;
+import org.json.simple.JSONObject;
+
+import java.net.InetAddress;
+import java.sql.ResultSet;
+
+public class GeoAdapter extends JdbcAdapter {
+
+  private InetAddressValidator ipvalidator = new InetAddressValidator();
+
+  @Override
+  public void logAccess(CacheKey value) {
+
+  }
+
+  @SuppressWarnings("unchecked")
+  @Override
+  public JSONObject enrich(CacheKey value) {
+    JSONObject enriched = new JSONObject();
+    try {
+      InetAddress addr = InetAddress.getByName(value.getValue());
+      if (addr.isAnyLocalAddress() || addr.isLoopbackAddress()
+              || addr.isSiteLocalAddress() || addr.isMulticastAddress()
+              || !ipvalidator.isValidInet4Address(value.getValue())) {
+        return new JSONObject();
+      }
+      String locidQuery = "select IPTOLOCID(\"" + value
+              + "\") as ANS";
+      ResultSet resultSet = statement.executeQuery(locidQuery);
+      String locid = null;
+      if (resultSet.next()) {
+        locid = resultSet.getString("ANS");
+      }
+      resultSet.close();
+      if (locid == null) return new JSONObject();
+      String geoQuery = "select * from location where locID = " + locid;
+      resultSet = statement.executeQuery(geoQuery);
+      if (resultSet.next()) {
+        enriched.put("locID", resultSet.getString("locID"));
+        enriched.put("country", resultSet.getString("country"));
+        enriched.put("city", resultSet.getString("city"));
+        enriched.put("postalCode", resultSet.getString("postalCode"));
+        enriched.put("latitude", resultSet.getString("latitude"));
+        enriched.put("longitude", resultSet.getString("longitude"));
+        enriched.put("dmaCode", resultSet.getString("dmaCode"));
+        enriched.put("location_point", enriched.get("longitude") + "," + enriched.get("latitude"));
+      }
+      resultSet.close();
+    } catch (Exception e) {
+      _LOG.error("Enrichment failure: " + e.getMessage(), e);
+      return new JSONObject();
+    }
+    return enriched;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/host/AbstractHostAdapter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/host/AbstractHostAdapter.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/host/AbstractHostAdapter.java
new file mode 100644
index 0000000..329456f
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/host/AbstractHostAdapter.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.enrichment.adapters.host;
+
+import java.io.Serializable;
+
+import org.apache.metron.enrichment.bolt.CacheKey;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.metron.enrichment.interfaces.EnrichmentAdapter;
+
+public abstract class AbstractHostAdapter implements EnrichmentAdapter<CacheKey>,
+				Serializable{
+
+	/**
+	 * Adapter to attach reputation information to the telemetry message
+	 */
+	private static final long serialVersionUID = 8280523289446309728L;
+	protected static final Logger LOG = LoggerFactory
+			.getLogger(AbstractHostAdapter.class);
+	
+	abstract public boolean initializeAdapter();
+	abstract public JSONObject enrich(CacheKey metadata);
+
+	@Override
+	public void cleanup() {
+
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/host/HostFromJSONListAdapter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/host/HostFromJSONListAdapter.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/host/HostFromJSONListAdapter.java
new file mode 100644
index 0000000..347cc03
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/host/HostFromJSONListAdapter.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.adapters.host;
+
+import org.apache.metron.enrichment.bolt.CacheKey;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+public class HostFromJSONListAdapter extends AbstractHostAdapter {
+
+  Map<String, JSONObject> _known_hosts = new HashMap<>();
+
+  public HostFromJSONListAdapter(String jsonList) {
+    JSONArray jsonArray = (JSONArray) JSONValue.parse(jsonList);
+    Iterator jsonArrayIterator = jsonArray.iterator();
+    while(jsonArrayIterator.hasNext()) {
+      JSONObject jsonObject = (JSONObject) jsonArrayIterator.next();
+      String host = (String) jsonObject.remove("ip");
+      _known_hosts.put(host, jsonObject);
+    }
+  }
+
+  @Override
+  public boolean initializeAdapter()
+  {
+
+    if(_known_hosts.size() > 0)
+      return true;
+    else
+      return false;
+  }
+
+  @Override
+  public void logAccess(CacheKey value) {
+
+  }
+
+  @SuppressWarnings("unchecked")
+  @Override
+  public JSONObject enrich(CacheKey k) {
+    String metadata = k.getValue();
+
+    if(!_known_hosts.containsKey(metadata))
+      return new JSONObject();
+
+    JSONObject enrichment = new JSONObject();
+    String prefix = "known_info.";
+    JSONObject knownInfo = _known_hosts.get(metadata);
+    for(Object key: knownInfo.keySet()) {
+      enrichment.put(prefix + key, knownInfo.get(key));
+    }
+    //enrichment.put("known_info", _known_hosts.get(metadata));
+    return enrichment;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/host/HostFromPropertiesFileAdapter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/host/HostFromPropertiesFileAdapter.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/host/HostFromPropertiesFileAdapter.java
new file mode 100644
index 0000000..f92bd3f
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/host/HostFromPropertiesFileAdapter.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.enrichment.adapters.host;
+
+import java.util.Map;
+
+import org.apache.metron.enrichment.bolt.CacheKey;
+import org.json.simple.JSONObject;
+
+@SuppressWarnings("serial")
+public class HostFromPropertiesFileAdapter extends AbstractHostAdapter {
+	
+	Map<String, JSONObject> _known_hosts;
+	
+	public HostFromPropertiesFileAdapter(Map<String, JSONObject> known_hosts)
+	{
+		_known_hosts = known_hosts;
+	}
+
+	@Override
+	public boolean initializeAdapter()
+	{
+		
+		if(_known_hosts.size() > 0)
+			return true;
+		else
+			return false;
+	}
+
+	@Override
+	public void logAccess(CacheKey value) {
+
+	}
+
+	@SuppressWarnings("unchecked")
+    @Override
+	public JSONObject enrich(CacheKey metadata) {
+		
+		
+		if(!_known_hosts.containsKey(metadata.getValue()))
+			return new JSONObject();
+		
+		JSONObject enrichment = new JSONObject();
+		enrichment.put("known_info", (JSONObject) _known_hosts.get(metadata.getValue()));
+		return enrichment;
+	}
+	
+	
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/jdbc/BaseJdbcConfig.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/jdbc/BaseJdbcConfig.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/jdbc/BaseJdbcConfig.java
new file mode 100644
index 0000000..e2e26cc
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/jdbc/BaseJdbcConfig.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.adapters.jdbc;
+
+import java.io.Serializable;
+
+public abstract class BaseJdbcConfig implements JdbcConfig, Serializable {
+
+  protected String host;
+  protected int port = -1;
+  protected String username;
+  protected String password;
+  protected String table = "";
+
+  @Override
+  public String getHost() {
+    return host;
+  }
+
+  public void setHost(String host) {
+    this.host = host;
+  }
+
+  public int getPort() {
+    return port;
+  }
+
+  public void setPort(int port) {
+    this.port = port;
+  }
+
+  public String getUsername() {
+    return username;
+  }
+
+  public void setUsername(String username) {
+    this.username = username;
+  }
+
+  public String getPassword() {
+    return password;
+  }
+
+  public void setPassword(String password) {
+    this.password = password;
+  }
+
+  public String getTable() {
+    return table;
+  }
+
+  public void setTable(String table) {
+    this.table = table;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/jdbc/JdbcAdapter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/jdbc/JdbcAdapter.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/jdbc/JdbcAdapter.java
new file mode 100644
index 0000000..9233059
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/jdbc/JdbcAdapter.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.adapters.jdbc;
+
+import org.apache.metron.enrichment.bolt.CacheKey;
+import org.apache.metron.enrichment.interfaces.EnrichmentAdapter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.Serializable;
+import java.net.InetAddress;
+import java.sql.*;
+
+public abstract class JdbcAdapter implements EnrichmentAdapter<CacheKey>,
+        Serializable {
+
+  protected static final Logger _LOG = LoggerFactory
+          .getLogger(JdbcAdapter.class);
+
+  protected Connection connection;
+  protected Statement statement;
+
+  private JdbcConfig config;
+  private String host;
+
+  public void setStatement(Statement statement) {
+    this.statement = statement;
+  }
+
+  public JdbcAdapter withJdbcConfig(JdbcConfig config) {
+    this.config = config;
+    this.host = config.getHost();
+    return this;
+  }
+
+  @Override
+  public boolean initializeAdapter() {
+    try {
+      if (!InetAddress.getByName(host).isReachable(500)) {
+        throw new Exception("Unable to reach host " + host);
+      }
+      Class.forName(this.config.getClassName());
+      connection = DriverManager.getConnection(this.config.getJdbcUrl());
+      connection.setReadOnly(true);
+      if (!connection.isValid(0))
+        throw new Exception("Invalid connection string....");
+      statement = connection.createStatement(
+              ResultSet.TYPE_SCROLL_INSENSITIVE,
+              ResultSet.CONCUR_READ_ONLY);
+      return true;
+    } catch (Exception e) {
+      e.printStackTrace();
+      _LOG.error("[Metron] JDBC connection failed....", e);
+
+      return false;
+    }
+  }
+
+  @Override
+  public void cleanup() {
+    try {
+      if (statement != null) statement.close();
+      if (connection != null) connection.close();
+    } catch (SQLException e) {
+      e.printStackTrace();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/jdbc/JdbcConfig.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/jdbc/JdbcConfig.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/jdbc/JdbcConfig.java
new file mode 100644
index 0000000..f88cebe
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/jdbc/JdbcConfig.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.adapters.jdbc;
+
+public interface JdbcConfig {
+
+  public String getClassName();
+  public String getJdbcUrl();
+  public String getHost();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/jdbc/MySqlConfig.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/jdbc/MySqlConfig.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/jdbc/MySqlConfig.java
new file mode 100644
index 0000000..1dbe005
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/jdbc/MySqlConfig.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.adapters.jdbc;
+
+public class MySqlConfig extends BaseJdbcConfig {
+
+  @Override
+  public String getClassName() {
+    return "com.mysql.jdbc.Driver";
+  }
+
+  @Override
+  public String getJdbcUrl() {
+    StringBuilder url = new StringBuilder();
+    url.append("jdbc:mysql://").append(host);
+    if (port > 0) {
+      url.append(":").append(port);
+    }
+    url.append("/").append(table);
+    url.append("?user=").append(username);
+    url.append("&password=").append(password);
+    return url.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/simplehbase/SimpleHBaseAdapter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/simplehbase/SimpleHBaseAdapter.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/simplehbase/SimpleHBaseAdapter.java
new file mode 100644
index 0000000..f2e0113
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/simplehbase/SimpleHBaseAdapter.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.enrichment.adapters.simplehbase;
+
+
+import com.google.common.collect.Iterables;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.metron.enrichment.bolt.CacheKey;
+import org.apache.metron.enrichment.interfaces.EnrichmentAdapter;
+import org.apache.metron.enrichment.utils.EnrichmentUtils;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.enrichment.lookup.EnrichmentLookup;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.apache.metron.enrichment.lookup.accesstracker.NoopAccessTracker;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.List;
+import java.util.Map;
+
+public class SimpleHBaseAdapter implements EnrichmentAdapter<CacheKey>,Serializable {
+  protected static final Logger _LOG = LoggerFactory.getLogger(SimpleHBaseAdapter.class);
+  protected SimpleHBaseConfig config;
+  protected EnrichmentLookup lookup;
+
+  public SimpleHBaseAdapter() {
+  }
+  public SimpleHBaseAdapter(SimpleHBaseConfig config) {
+    withConfig(config);
+  }
+
+  public SimpleHBaseAdapter withConfig(SimpleHBaseConfig config) {
+    this.config = config;
+    return this;
+  }
+
+  @Override
+  public void logAccess(CacheKey value) {
+  }
+
+
+  @Override
+  public JSONObject enrich(CacheKey value) {
+    JSONObject enriched = new JSONObject();
+    List<String> enrichmentTypes = value.getConfig()
+                                        .getFieldToEnrichmentTypeMap()
+                                        .get(EnrichmentUtils.toTopLevelField(value.getField()));
+    if(enrichmentTypes != null && value.getValue() != null) {
+      try {
+        for (LookupKV<EnrichmentKey, EnrichmentValue> kv :
+                lookup.get(Iterables.transform(enrichmentTypes
+                                              , new EnrichmentUtils.TypeToKey(value.getValue())
+                                              )
+                          , lookup.getTable()
+                          , false
+                          )
+            )
+        {
+          if (kv != null && kv.getValue() != null && kv.getValue().getMetadata() != null) {
+            for (Map.Entry<String, String> values : kv.getValue().getMetadata().entrySet()) {
+              enriched.put(kv.getKey().type + "." + values.getKey(), values.getValue());
+            }
+            _LOG.trace("Enriched type " + kv.getKey().type + " => " + enriched);
+          }
+        }
+      }
+      catch (IOException e) {
+        _LOG.error("Unable to retrieve value: " + e.getMessage(), e);
+        throw new RuntimeException("Unable to retrieve value: " + e.getMessage(), e);
+      }
+    }
+    return enriched;
+  }
+
+  @Override
+  public boolean initializeAdapter() {
+    String hbaseTable = config.getHBaseTable();
+    Configuration hbaseConfig = HBaseConfiguration.create();
+    try {
+      lookup = new EnrichmentLookup( config.getProvider().getTable(hbaseConfig, hbaseTable)
+                                   , config.getHBaseCF()
+                                   , new NoopAccessTracker()
+                                   );
+    } catch (IOException e) {
+      throw new RuntimeException("Unable to initialize adapter: " + e.getMessage(), e);
+    }
+    return true;
+  }
+
+  @Override
+  public void cleanup() {
+    try {
+      lookup.close();
+    } catch (Exception e) {
+      throw new RuntimeException("Unable to cleanup access tracker", e);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/simplehbase/SimpleHBaseConfig.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/simplehbase/SimpleHBaseConfig.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/simplehbase/SimpleHBaseConfig.java
new file mode 100644
index 0000000..fefe008
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/simplehbase/SimpleHBaseConfig.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.adapters.simplehbase;
+
+import org.apache.metron.enrichment.utils.EnrichmentUtils;
+import org.apache.metron.hbase.HTableProvider;
+import org.apache.metron.hbase.TableProvider;
+
+import java.io.Serializable;
+
+
+public class SimpleHBaseConfig implements Serializable {
+  private String hBaseTable;
+  private String hBaseCF;
+  private TableProvider provider = new HTableProvider();
+  public String getHBaseTable() {
+    return hBaseTable;
+  }
+  public String getHBaseCF() {
+    return hBaseCF;
+  }
+
+  public TableProvider getProvider() {
+    return provider;
+  }
+
+  public SimpleHBaseConfig withProviderImpl(String connectorImpl) {
+    provider = EnrichmentUtils.getTableProvider(connectorImpl, new HTableProvider());
+    return this;
+  }
+  public SimpleHBaseConfig withHBaseTable(String hBaseTable) {
+    this.hBaseTable = hBaseTable;
+    return this;
+  }
+
+  public SimpleHBaseConfig withHBaseCF(String cf) {
+    this.hBaseCF= cf;
+    return this;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/threatintel/ThreatIntelAdapter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/threatintel/ThreatIntelAdapter.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/threatintel/ThreatIntelAdapter.java
new file mode 100644
index 0000000..dbdf6ec
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/threatintel/ThreatIntelAdapter.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.adapters.threatintel;
+
+import com.google.common.collect.Iterables;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.metron.enrichment.bolt.CacheKey;
+import org.apache.metron.enrichment.interfaces.EnrichmentAdapter;
+import org.apache.metron.enrichment.utils.EnrichmentUtils;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.lookup.EnrichmentLookup;
+import org.apache.metron.enrichment.lookup.accesstracker.BloomAccessTracker;
+import org.apache.metron.enrichment.lookup.accesstracker.PersistentAccessTracker;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.List;
+import java.util.UUID;
+
+public class ThreatIntelAdapter implements EnrichmentAdapter<CacheKey>,Serializable {
+  protected static final Logger _LOG = LoggerFactory.getLogger(ThreatIntelAdapter.class);
+  protected ThreatIntelConfig config;
+  protected EnrichmentLookup lookup;
+
+  public ThreatIntelAdapter() {
+  }
+  public ThreatIntelAdapter(ThreatIntelConfig config) {
+    withConfig(config);
+  }
+
+  public ThreatIntelAdapter withConfig(ThreatIntelConfig config) {
+    this.config = config;
+    return this;
+  }
+
+  @Override
+  public void logAccess(CacheKey value) {
+    List<String> enrichmentTypes = value.getConfig().getFieldToThreatIntelTypeMap().get(value.getField());
+    if(enrichmentTypes != null) {
+      for(String enrichmentType : enrichmentTypes) {
+        lookup.getAccessTracker().logAccess(new EnrichmentKey(enrichmentType, value.getValue()));
+      }
+    }
+  }
+
+
+  @Override
+  public JSONObject enrich(CacheKey value) {
+    JSONObject enriched = new JSONObject();
+    List<String> enrichmentTypes = value.getConfig()
+                                        .getFieldToThreatIntelTypeMap()
+                                        .get(EnrichmentUtils.toTopLevelField(value.getField()));
+    if(enrichmentTypes != null) {
+      int i = 0;
+      try {
+        for (Boolean isThreat :
+                lookup.exists(Iterables.transform(enrichmentTypes
+                                                 , new EnrichmentUtils.TypeToKey(value.getValue())
+                                                 )
+                             , lookup.getTable()
+                             , false
+                             )
+            )
+        {
+          String enrichmentType = enrichmentTypes.get(i++);
+          if (isThreat) {
+            enriched.put(enrichmentType, "alert");
+            _LOG.trace("Enriched value => " + enriched);
+          }
+        }
+      }
+      catch(IOException e) {
+        throw new RuntimeException("Unable to retrieve value", e);
+      }
+    }
+    //throw new RuntimeException("Unable to retrieve value " + value);
+    return enriched;
+  }
+
+  @Override
+  public boolean initializeAdapter() {
+    PersistentAccessTracker accessTracker;
+    String hbaseTable = config.getHBaseTable();
+    int expectedInsertions = config.getExpectedInsertions();
+    double falsePositives = config.getFalsePositiveRate();
+    String trackerHBaseTable = config.getTrackerHBaseTable();
+    String trackerHBaseCF = config.getTrackerHBaseCF();
+    long millisecondsBetweenPersist = config.getMillisecondsBetweenPersists();
+    BloomAccessTracker bat = new BloomAccessTracker(hbaseTable, expectedInsertions, falsePositives);
+    Configuration hbaseConfig = HBaseConfiguration.create();
+    try {
+      accessTracker = new PersistentAccessTracker( hbaseTable
+              , UUID.randomUUID().toString()
+              , config.getProvider().getTable(hbaseConfig, trackerHBaseTable)
+              , trackerHBaseCF
+              , bat
+              , millisecondsBetweenPersist
+      );
+      lookup = new EnrichmentLookup(config.getProvider().getTable(hbaseConfig, hbaseTable), config.getHBaseCF(), accessTracker);
+    } catch (IOException e) {
+      throw new IllegalStateException("Unable to initialize ThreatIntelAdapter", e);
+    }
+
+    return true;
+  }
+
+  @Override
+  public void cleanup() {
+    try {
+      lookup.close();
+    } catch (Exception e) {
+      throw new RuntimeException("Unable to cleanup access tracker", e);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/threatintel/ThreatIntelConfig.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/threatintel/ThreatIntelConfig.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/threatintel/ThreatIntelConfig.java
new file mode 100644
index 0000000..2d63eab
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/threatintel/ThreatIntelConfig.java
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.adapters.threatintel;
+
+import org.apache.metron.enrichment.utils.EnrichmentUtils;
+import org.apache.metron.hbase.HTableProvider;
+import org.apache.metron.hbase.TableProvider;
+
+import java.io.Serializable;
+
+public class ThreatIntelConfig implements Serializable {
+  public static final long MS_IN_HOUR = 10000*60*60;
+  private String hBaseTable;
+  private String hBaseCF;
+  private double falsePositiveRate = 0.03;
+  private int expectedInsertions = 100000;
+  private String trackerHBaseTable;
+  private String trackerHBaseCF;
+  private long millisecondsBetweenPersists = 2*MS_IN_HOUR;
+  private TableProvider provider = new HTableProvider();
+
+  public String getHBaseTable() {
+    return hBaseTable;
+  }
+
+  public int getExpectedInsertions() {
+    return expectedInsertions;
+  }
+
+  public double getFalsePositiveRate() {
+    return falsePositiveRate;
+  }
+
+  public String getTrackerHBaseTable() {
+    return trackerHBaseTable;
+  }
+
+  public String getTrackerHBaseCF() {
+    return trackerHBaseCF;
+  }
+
+  public long getMillisecondsBetweenPersists() {
+    return millisecondsBetweenPersists;
+  }
+
+  public String getHBaseCF() {
+    return hBaseCF;
+  }
+
+  public TableProvider getProvider() {
+    return provider;
+  }
+
+  public ThreatIntelConfig withProviderImpl(String connectorImpl) {
+    provider = EnrichmentUtils.getTableProvider(connectorImpl, new HTableProvider());
+    return this;
+  }
+
+  public ThreatIntelConfig withTrackerHBaseTable(String hBaseTable) {
+    this.trackerHBaseTable = hBaseTable;
+    return this;
+  }
+
+  public ThreatIntelConfig withTrackerHBaseCF(String cf) {
+    this.trackerHBaseCF = cf;
+    return this;
+  }
+  public ThreatIntelConfig withHBaseTable(String hBaseTable) {
+    this.hBaseTable = hBaseTable;
+    return this;
+  }
+
+  public ThreatIntelConfig withHBaseCF(String cf) {
+    this.hBaseCF= cf;
+    return this;
+  }
+
+  public ThreatIntelConfig withFalsePositiveRate(double falsePositiveRate) {
+    this.falsePositiveRate = falsePositiveRate;
+    return this;
+  }
+
+  public ThreatIntelConfig withExpectedInsertions(int expectedInsertions) {
+    this.expectedInsertions = expectedInsertions;
+    return this;
+  }
+
+  public ThreatIntelConfig withMillisecondsBetweenPersists(long millisecondsBetweenPersists) {
+    this.millisecondsBetweenPersists = millisecondsBetweenPersists;
+    return this;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/BulkMessageWriterBolt.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/BulkMessageWriterBolt.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/BulkMessageWriterBolt.java
new file mode 100644
index 0000000..f3e742d
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/BulkMessageWriterBolt.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.bolt;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Tuple;
+import org.apache.metron.common.Constants;
+import org.apache.metron.common.bolt.ConfiguredBolt;
+import org.apache.metron.common.configuration.SensorEnrichmentConfig;
+import org.apache.metron.common.utils.ErrorUtils;
+import org.apache.metron.common.utils.MessageUtils;
+import org.apache.metron.common.interfaces.BulkMessageWriter;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.*;
+
+public class BulkMessageWriterBolt extends ConfiguredBolt {
+
+  private static final Logger LOG = LoggerFactory
+          .getLogger(BulkMessageWriterBolt.class);
+  private OutputCollector collector;
+  private BulkMessageWriter<JSONObject> bulkMessageWriter;
+  private Map<String, List<Tuple>> sensorTupleMap = new HashMap<>();
+  private Map<String, List<JSONObject>> sensorMessageMap = new HashMap<>();
+
+  public BulkMessageWriterBolt(String zookeeperUrl) {
+    super(zookeeperUrl);
+  }
+
+  public BulkMessageWriterBolt withBulkMessageWriter(BulkMessageWriter<JSONObject> bulkMessageWriter) {
+    this.bulkMessageWriter = bulkMessageWriter;
+    return this;
+  }
+
+  @Override
+  public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+    this.collector = collector;
+    super.prepare(stormConf, context, collector);
+    try {
+      bulkMessageWriter.init(stormConf, configurations);
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  @Override
+  public void execute(Tuple tuple) {
+    JSONObject message = (JSONObject)((JSONObject) tuple.getValueByField("message")).clone();
+    message.put("index." + bulkMessageWriter.getClass().getSimpleName().toLowerCase() + ".ts", "" + System.currentTimeMillis());
+    String sensorType = MessageUtils.getSensorType(message);
+    SensorEnrichmentConfig sensorEnrichmentConfig = configurations.getSensorEnrichmentConfig(sensorType);
+    int batchSize = sensorEnrichmentConfig != null ? sensorEnrichmentConfig.getBatchSize() : 1;
+    List<Tuple> tupleList = sensorTupleMap.get(sensorType);
+    if (tupleList == null) tupleList = new ArrayList<>();
+    tupleList.add(tuple);
+    List<JSONObject> messageList = sensorMessageMap.get(sensorType);
+    if (messageList == null) messageList = new ArrayList<>();
+    messageList.add(message);
+    if (messageList.size() < batchSize) {
+      sensorTupleMap.put(sensorType, tupleList);
+      sensorMessageMap.put(sensorType, messageList);
+    } else {
+      try {
+        bulkMessageWriter.write(sensorType, configurations, tupleList, messageList);
+        for(Tuple t: tupleList) {
+          collector.ack(t);
+        }
+      } catch (Exception e) {
+        for(Tuple t: tupleList) {
+          collector.fail(t);
+        }
+        ErrorUtils.handleError(collector, e, Constants.ERROR_STREAM);
+      }
+      sensorTupleMap.remove(sensorType);
+      sensorMessageMap.remove(sensorType);
+    }
+  }
+
+  @Override
+  public void declareOutputFields(OutputFieldsDeclarer declarer) {
+    declarer.declareStream("error", new Fields("message"));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/CacheKey.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/CacheKey.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/CacheKey.java
new file mode 100644
index 0000000..1338b44
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/CacheKey.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.bolt;
+
+import org.apache.metron.common.configuration.SensorEnrichmentConfig;
+
+public class CacheKey {
+  private String field;
+  private String value;
+  private SensorEnrichmentConfig config;
+
+  public CacheKey(String field, String value, SensorEnrichmentConfig config) {
+    this.field = field;
+    this.value = value;
+    this.config = config;
+  }
+
+  public String getField() {
+    return field;
+  }
+
+  public String getValue() {
+    return value;
+  }
+
+  public SensorEnrichmentConfig getConfig() {
+    return config;
+  }
+
+  @Override
+  public String toString() {
+    return "CacheKey{" +
+            "field='" + field + '\'' +
+            ", value='" + value + '\'' +
+            '}';
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    CacheKey cacheKey = (CacheKey) o;
+
+    if (getField() != null ? !getField().equals(cacheKey.getField()) : cacheKey.getField() != null) return false;
+    if (getValue() != null ? !getValue().equals(cacheKey.getValue()) : cacheKey.getValue() != null) return false;
+    return config != null ? config.equals(cacheKey.config) : cacheKey.config == null;
+
+  }
+
+  @Override
+  public int hashCode() {
+    int result = getField() != null ? getField().hashCode() : 0;
+    result = 31 * result + (getValue() != null ? getValue().hashCode() : 0);
+    result = 31 * result + (config != null ? config.hashCode() : 0);
+    return result;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/EnrichmentJoinBolt.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/EnrichmentJoinBolt.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/EnrichmentJoinBolt.java
new file mode 100644
index 0000000..8ef44d0
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/EnrichmentJoinBolt.java
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.bolt;
+
+import backtype.storm.task.TopologyContext;
+import org.apache.metron.common.configuration.SensorEnrichmentConfig;
+import org.apache.metron.common.utils.MessageUtils;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+public class EnrichmentJoinBolt extends JoinBolt<JSONObject> {
+
+  protected static final Logger LOG = LoggerFactory
+          .getLogger(EnrichmentJoinBolt.class);
+
+  public EnrichmentJoinBolt(String zookeeperUrl) {
+    super(zookeeperUrl);
+  }
+
+  @Override
+  public void prepare(Map map, TopologyContext topologyContext) {
+
+  }
+
+  @Override
+  public Set<String> getStreamIds(JSONObject message) {
+    Set<String> streamIds = new HashSet<>();
+    String sourceType = MessageUtils.getSensorType(message);
+    Map<String, List<String>>  fieldMap = getFieldMap(sourceType);
+    if(fieldMap != null) {
+      for (String enrichmentType : getFieldMap(sourceType).keySet()) {
+        streamIds.add(enrichmentType);
+      }
+    }
+    streamIds.add("message");
+    return streamIds;
+  }
+
+
+  @Override
+  public JSONObject joinMessages(Map<String, JSONObject> streamMessageMap) {
+    JSONObject message = new JSONObject();
+    for (String key : streamMessageMap.keySet()) {
+      JSONObject obj = streamMessageMap.get(key);
+      message.putAll(obj);
+    }
+    List<Object> emptyKeys = new ArrayList<>();
+    for(Object key : message.keySet()) {
+      Object value = message.get(key);
+      if(value.toString().length() == 0) {
+        emptyKeys.add(key);
+      }
+    }
+    for(Object o : emptyKeys) {
+      message.remove(o);
+    }
+    message.put(getClass().getSimpleName().toLowerCase() + ".joiner.ts", "" + System.currentTimeMillis());
+    return message;
+  }
+
+  public Map<String, List<String>> getFieldMap(String sourceType) {
+    if(sourceType != null) {
+      SensorEnrichmentConfig config = configurations.getSensorEnrichmentConfig(sourceType);
+      if (config != null) {
+        return config.getEnrichmentFieldMap();
+      }
+      else {
+        LOG.error("Unable to retrieve a sensor enrichment config of " + sourceType);
+      }
+    }
+    else {
+      LOG.error("Trying to retrieve a field map with source type of null");
+    }
+    return null;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/EnrichmentSplitterBolt.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/EnrichmentSplitterBolt.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/EnrichmentSplitterBolt.java
new file mode 100644
index 0000000..e713d69
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/EnrichmentSplitterBolt.java
@@ -0,0 +1,142 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.bolt;
+
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Tuple;
+import org.apache.metron.common.Constants;
+import org.apache.metron.enrichment.configuration.Enrichment;
+import org.apache.metron.enrichment.utils.EnrichmentUtils;
+import org.apache.metron.common.utils.MessageUtils;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.UnsupportedEncodingException;
+import java.util.*;
+
+public class EnrichmentSplitterBolt extends SplitBolt<JSONObject> {
+    protected static final Logger LOG = LoggerFactory.getLogger(EnrichmentSplitterBolt.class);
+    private List<Enrichment> enrichments;
+    protected String messageFieldName;
+    private transient JSONParser parser;
+
+
+    public EnrichmentSplitterBolt(String zookeeperUrl) {
+        super(zookeeperUrl);
+    }
+
+    public EnrichmentSplitterBolt withEnrichments(List<Enrichment> enrichments) {
+        this.enrichments = enrichments;
+        return this;
+    }
+
+    public EnrichmentSplitterBolt withMessageFieldName(String messageFieldName) {
+        this.messageFieldName = messageFieldName;
+        return this;
+    }
+    @Override
+    public void prepare(Map map, TopologyContext topologyContext) {
+        parser = new JSONParser();
+    }
+    @Override
+    public String getKey(Tuple tuple, JSONObject message) {
+        String key = null;
+        try {
+            key = tuple.getStringByField("key");
+        }
+        catch(Throwable t) {
+            //swallowing this just in case.
+        }
+        if(key != null) {
+            return key;
+        }
+        else {
+            return UUID.randomUUID().toString();
+        }
+    }
+
+    @Override
+    public JSONObject generateMessage(Tuple tuple) {
+        JSONObject message = null;
+        if (messageFieldName == null) {
+            byte[] data = tuple.getBinary(0);
+            try {
+                message = (JSONObject) parser.parse(new String(data, "UTF8"));
+                message.put(getClass().getSimpleName().toLowerCase() + ".splitter.begin.ts", "" + System.currentTimeMillis());
+            } catch (ParseException | UnsupportedEncodingException e) {
+                e.printStackTrace();
+            }
+        } else {
+            message = (JSONObject) tuple.getValueByField(messageFieldName);
+            message.put(getClass().getSimpleName().toLowerCase() + ".splitter.begin.ts", "" + System.currentTimeMillis());
+        }
+        return message;
+    }
+
+    @Override
+    public Set<String> getStreamIds() {
+        Set<String> streamIds = new HashSet<>();
+        for(Enrichment enrichment: enrichments) {
+            streamIds.add(enrichment.getType());
+        }
+        return streamIds;
+    }
+
+    @SuppressWarnings("unchecked")
+    @Override
+    public Map<String, JSONObject> splitMessage(JSONObject message) {
+        Map<String, JSONObject> streamMessageMap = new HashMap<>();
+        String sensorType = MessageUtils.getSensorType(message);
+        Map<String, List<String>> enrichmentFieldMap = getFieldMap(sensorType);
+        for (String enrichmentType : enrichmentFieldMap.keySet()) {
+            List<String> fields = enrichmentFieldMap.get(enrichmentType);
+            JSONObject enrichmentObject = new JSONObject();
+            if (fields != null && fields.size() > 0) {
+                for (String field : fields) {
+                    enrichmentObject.put(getKeyName(enrichmentType, field), message.get(field));
+                }
+                enrichmentObject.put(Constants.SENSOR_TYPE, sensorType);
+                streamMessageMap.put(enrichmentType, enrichmentObject);
+            }
+        }
+        message.put(getClass().getSimpleName().toLowerCase() + ".splitter.end.ts", "" + System.currentTimeMillis());
+        return streamMessageMap;
+    }
+
+    protected Map<String, List<String>> getFieldMap(String sensorType) {
+        return configurations.getSensorEnrichmentConfig(sensorType).getEnrichmentFieldMap();
+    }
+
+    protected String getKeyName(String type, String field) {
+        return EnrichmentUtils.getEnrichmentKey(type, field);
+    }
+
+    @Override
+    public void declareOther(OutputFieldsDeclarer declarer) {
+
+    }
+
+    @Override
+    public void emitOther(Tuple tuple, JSONObject message) {
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/GenericEnrichmentBolt.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/GenericEnrichmentBolt.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/GenericEnrichmentBolt.java
new file mode 100644
index 0000000..e5b8ca6
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/GenericEnrichmentBolt.java
@@ -0,0 +1,225 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.enrichment.bolt;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Tuple;
+import backtype.storm.tuple.Values;
+import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.CacheLoader;
+import com.google.common.cache.LoadingCache;
+import org.apache.metron.common.Constants;
+import org.apache.metron.common.bolt.ConfiguredBolt;
+import org.apache.metron.common.configuration.Configurations;
+import org.apache.metron.enrichment.configuration.Enrichment;
+import org.apache.metron.common.configuration.SensorEnrichmentConfig;
+import org.apache.metron.enrichment.interfaces.EnrichmentAdapter;
+import org.apache.metron.common.utils.ErrorUtils;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Uses an adapter to enrich telemetry messages with additional metadata
+ * entries. For a list of available enrichment adapters see
+ * org.apache.metron.enrichment.adapters.
+ * <p/>
+ * At the moment of release the following enrichment adapters are available:
+ * <p/>
+ * <ul>
+ * <p/>
+ * <li>geo = attaches geo coordinates to IPs
+ * <li>whois = attaches whois information to domains
+ * <li>host = attaches reputation information to known hosts
+ * <li>CIF = attaches information from threat intelligence feeds
+ * <ul>
+ * <p/>
+ * <p/>
+ * Enrichments are optional
+ **/
+
+@SuppressWarnings({"rawtypes", "serial"})
+public class GenericEnrichmentBolt extends ConfiguredBolt {
+
+  private static final Logger LOG = LoggerFactory
+          .getLogger(GenericEnrichmentBolt.class);
+  private OutputCollector collector;
+
+  protected String enrichmentType;
+  protected EnrichmentAdapter<CacheKey> adapter;
+  protected transient CacheLoader<CacheKey, JSONObject> loader;
+  protected transient LoadingCache<CacheKey, JSONObject> cache;
+  protected Long maxCacheSize;
+  protected Long maxTimeRetain;
+  protected boolean invalidateCacheOnReload = false;
+
+  public GenericEnrichmentBolt(String zookeeperUrl) {
+    super(zookeeperUrl);
+  }
+
+  /**
+   * @param enrichment enrichment
+   * @return Instance of this class
+   */
+
+  public GenericEnrichmentBolt withEnrichment(Enrichment enrichment) {
+    this.enrichmentType = enrichment.getType();
+    this.adapter = enrichment.getAdapter();
+    return this;
+  }
+
+  /**
+   * @param maxCacheSize Maximum size of cache before flushing
+   * @return Instance of this class
+   */
+
+  public GenericEnrichmentBolt withMaxCacheSize(long maxCacheSize) {
+    this.maxCacheSize = maxCacheSize;
+    return this;
+  }
+
+  /**
+   * @param maxTimeRetain Maximum time to retain cached entry before expiring
+   * @return Instance of this class
+   */
+
+  public GenericEnrichmentBolt withMaxTimeRetain(long maxTimeRetain) {
+    this.maxTimeRetain = maxTimeRetain;
+    return this;
+  }
+
+  public GenericEnrichmentBolt withCacheInvalidationOnReload(boolean cacheInvalidationOnReload) {
+    this.invalidateCacheOnReload= cacheInvalidationOnReload;
+    return this;
+  }
+  @Override
+  public void reloadCallback(String name, Configurations.Type type) {
+    if(invalidateCacheOnReload) {
+      if (cache != null) {
+        cache.invalidateAll();
+      }
+    }
+  }
+
+  @Override
+  public void prepare(Map conf, TopologyContext topologyContext,
+                      OutputCollector collector) {
+    super.prepare(conf, topologyContext, collector);
+    this.collector = collector;
+    if (this.maxCacheSize == null)
+      throw new IllegalStateException("MAX_CACHE_SIZE_OBJECTS_NUM must be specified");
+    if (this.maxTimeRetain == null)
+      throw new IllegalStateException("MAX_TIME_RETAIN_MINUTES must be specified");
+    if (this.adapter == null)
+      throw new IllegalStateException("Adapter must be specified");
+    loader = new CacheLoader<CacheKey, JSONObject>() {
+      public JSONObject load(CacheKey key) throws Exception {
+        return adapter.enrich(key);
+      }
+    };
+    cache = CacheBuilder.newBuilder().maximumSize(maxCacheSize)
+            .expireAfterWrite(maxTimeRetain, TimeUnit.MINUTES)
+            .build(loader);
+    boolean success = adapter.initializeAdapter();
+    if (!success) {
+      LOG.error("[Metron] EnrichmentSplitterBolt could not initialize adapter");
+      throw new IllegalStateException("Could not initialize adapter...");
+    }
+  }
+
+  @Override
+  public void declareOutputFields(OutputFieldsDeclarer declarer) {
+    declarer.declareStream(enrichmentType, new Fields("key", "message"));
+    declarer.declareStream("error", new Fields("message"));
+  }
+
+  @SuppressWarnings("unchecked")
+  @Override
+  public void execute(Tuple tuple) {
+    String key = tuple.getStringByField("key");
+    JSONObject rawMessage = (JSONObject) tuple.getValueByField("message");
+
+    JSONObject enrichedMessage = new JSONObject();
+    enrichedMessage.put("adapter." + adapter.getClass().getSimpleName().toLowerCase() + ".begin.ts", "" + System.currentTimeMillis());
+    try {
+      if (rawMessage == null || rawMessage.isEmpty())
+        throw new Exception("Could not parse binary stream to JSON");
+      if (key == null)
+        throw new Exception("Key is not valid");
+      String sourceType = null;
+      if(rawMessage.containsKey(Constants.SENSOR_TYPE)) {
+        sourceType = rawMessage.get(Constants.SENSOR_TYPE).toString();
+      }
+      else {
+        throw new RuntimeException("Source type is missing from enrichment fragment: " + rawMessage.toJSONString());
+      }
+      for (Object o : rawMessage.keySet()) {
+        String field = (String) o;
+        String value = (String) rawMessage.get(field);
+        if (field.equals(Constants.SENSOR_TYPE)) {
+          enrichedMessage.put(Constants.SENSOR_TYPE, value);
+        } else {
+          JSONObject enrichedField = new JSONObject();
+          if (value != null && value.length() != 0) {
+            SensorEnrichmentConfig config = configurations.getSensorEnrichmentConfig(sourceType);
+            if(config == null) {
+              throw new RuntimeException("Unable to find " + config);
+            }
+            CacheKey cacheKey= new CacheKey(field, value, config);
+            adapter.logAccess(cacheKey);
+            enrichedField = cache.getUnchecked(cacheKey);
+            if (enrichedField == null)
+              throw new Exception("[Metron] Could not enrich string: "
+                      + value);
+          }
+          if (!enrichedField.isEmpty()) {
+            for (Object enrichedKey : enrichedField.keySet()) {
+              enrichedMessage.put(field + "." + enrichedKey, enrichedField.get(enrichedKey));
+            }
+          } else {
+            enrichedMessage.put(field, "");
+          }
+        }
+      }
+
+      enrichedMessage.put("adapter." + adapter.getClass().getSimpleName().toLowerCase() + ".end.ts", "" + System.currentTimeMillis());
+      if (!enrichedMessage.isEmpty()) {
+        collector.emit(enrichmentType, new Values(key, enrichedMessage));
+      }
+    } catch (Exception e) {
+      LOG.error("[Metron] Unable to enrich message: " + rawMessage, e);
+      JSONObject error = ErrorUtils.generateErrorMessage("Enrichment problem: " + rawMessage, e);
+      if (key != null) {
+        collector.emit(enrichmentType, new Values(key, enrichedMessage));
+      }
+      collector.emit("error", new Values(error));
+    }
+  }
+
+  @Override
+  public void cleanup() {
+    adapter.cleanup();
+  }
+}


[16/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/asa/GrokAsaParser.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/asa/GrokAsaParser.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/asa/GrokAsaParser.java
new file mode 100644
index 0000000..0f8a862
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/asa/GrokAsaParser.java
@@ -0,0 +1,280 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.asa;
+
+import oi.thekraken.grok.api.Grok;
+import oi.thekraken.grok.api.Match;
+import oi.thekraken.grok.api.exception.GrokException;
+import org.apache.commons.io.IOUtils;
+import org.apache.metron.parsers.BasicParser;
+import org.json.simple.JSONObject;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.*;
+
+public class GrokAsaParser extends BasicParser {
+
+	private static final long serialVersionUID = 945353287115350798L;
+	private transient  Grok  grok;
+	Map<String, String> patternMap;
+	private transient  Map<String, Grok> grokMap;
+	private transient  InputStream pattern_url;
+
+	public static final String PREFIX = "stream2file";
+	public static final String SUFFIX = ".tmp";
+
+	public static File stream2file(InputStream in) throws IOException {
+		final File tempFile = File.createTempFile(PREFIX, SUFFIX);
+		tempFile.deleteOnExit();
+		try (FileOutputStream out = new FileOutputStream(tempFile)) {
+			IOUtils.copy(in, out);
+		}
+		return tempFile;
+	}
+
+	public GrokAsaParser() throws Exception {
+		// pattern_url = Resources.getResource("patterns/asa");
+
+		pattern_url = getClass().getClassLoader().getResourceAsStream(
+						"src/main/patterns/asa");
+
+		File file = stream2file(pattern_url);
+		grok = Grok.create(file.getPath());
+
+		patternMap = getPatternMap();
+		grokMap = getGrokMap();
+
+		grok.compile("%{CISCO_TAGGED_SYSLOG}");
+	}
+
+	public GrokAsaParser(String filepath) throws Exception {
+
+		grok = Grok.create(filepath);
+		// grok.getNamedRegexCollection().put("ciscotag","CISCOFW302013_302014_302015_302016");
+		grok.compile("%{CISCO_TAGGED_SYSLOG}");
+
+	}
+
+	public GrokAsaParser(String filepath, String pattern) throws Exception {
+
+		grok = Grok.create(filepath);
+		grok.compile("%{" + pattern + "}");
+	}
+
+	private Map<String, Object> getMap(String pattern, String text)
+			throws GrokException {
+
+		Grok g = grokMap.get(pattern);
+		if (g != null) {
+			Match gm = g.match(text);
+			gm.captures();
+			return gm.toMap();
+		} else {
+			return new HashMap<String, Object>();
+		}
+
+	}
+
+	private Map<String, Grok> getGrokMap() throws GrokException, IOException {
+		Map<String, Grok> map = new HashMap<String, Grok>();
+
+		for (Map.Entry<String, String> entry : patternMap.entrySet()) {
+			File file = stream2file(pattern_url);
+			Grok grok = Grok.create(file.getPath());
+			grok.compile("%{" + entry.getValue() + "}");
+
+			map.put(entry.getValue(), grok);
+
+		}
+
+		return map;
+	}
+
+	private Map<String, String> getPatternMap() {
+		Map<String, String> map = new HashMap<String, String>();
+
+		map.put("ASA-2-106001", "CISCOFW106001");
+		map.put("ASA-2-106006", "CISCOFW106006_106007_106010");
+		map.put("ASA-2-106007", "CISCOFW106006_106007_106010");
+		map.put("ASA-2-106010", "CISCOFW106006_106007_106010");
+		map.put("ASA-3-106014", "CISCOFW106014");
+		map.put("ASA-6-106015", "CISCOFW106015");
+		map.put("ASA-1-106021", "CISCOFW106021");
+		map.put("ASA-4-106023", "CISCOFW106023");
+		map.put("ASA-5-106100", "CISCOFW106100");
+		map.put("ASA-6-110002", "CISCOFW110002");
+		map.put("ASA-6-302010", "CISCOFW302010");
+		map.put("ASA-6-302013", "CISCOFW302013_302014_302015_302016");
+		map.put("ASA-6-302014", "CISCOFW302013_302014_302015_302016");
+		map.put("ASA-6-302015", "CISCOFW302013_302014_302015_302016");
+		map.put("ASA-6-302016", "CISCOFW302013_302014_302015_302016");
+		map.put("ASA-6-302020", "CISCOFW302020_302021");
+		map.put("ASA-6-302021", "CISCOFW302020_302021");
+		map.put("ASA-6-305011", "CISCOFW305011");
+		map.put("ASA-3-313001", "CISCOFW313001_313004_313008");
+		map.put("ASA-3-313004", "CISCOFW313001_313004_313008");
+		map.put("ASA-3-313008", "CISCOFW313001_313004_313008");
+		map.put("ASA-4-313005", "CISCOFW313005");
+		map.put("ASA-4-402117", "CISCOFW402117");
+		map.put("ASA-4-402119", "CISCOFW402119");
+		map.put("ASA-4-419001", "CISCOFW419001");
+		map.put("ASA-4-419002", "CISCOFW419002");
+		map.put("ASA-4-500004", "CISCOFW500004");
+		map.put("ASA-6-602303", "CISCOFW602303_602304");
+		map.put("ASA-6-602304", "CISCOFW602303_602304");
+		map.put("ASA-7-710001", "CISCOFW710001_710002_710003_710005_710006");
+		map.put("ASA-7-710002", "CISCOFW710001_710002_710003_710005_710006");
+		map.put("ASA-7-710003", "CISCOFW710001_710002_710003_710005_710006");
+		map.put("ASA-7-710005", "CISCOFW710001_710002_710003_710005_710006");
+		map.put("ASA-7-710006", "CISCOFW710001_710002_710003_710005_710006");
+		map.put("ASA-6-713172", "CISCOFW713172");
+		map.put("ASA-4-733100", "CISCOFW733100");
+		map.put("ASA-6-305012", "CISCOFW305012");
+		map.put("ASA-7-609001", "CISCOFW609001");
+		map.put("ASA-7-609002", "CISCOFW609002");
+
+		return map;
+	}
+
+	public static Long convertToEpoch(String m, String d, String ts,
+			boolean adjust_timezone) throws ParseException {
+		d = d.trim();
+
+		if (d.length() <= 2)
+			d = "0" + d;
+
+		Date date = new SimpleDateFormat("MMM", Locale.ENGLISH).parse(m);
+		Calendar cal = Calendar.getInstance();
+		cal.setTime(date);
+		String month = String.valueOf(cal.get(Calendar.MONTH));
+		int year = Calendar.getInstance().get(Calendar.YEAR);
+
+		if (month.length() <= 2)
+			month = "0" + month;
+
+		String coglomerated_ts = year + "-" + month + "-" + d + " " + ts;
+
+		System.out.println(coglomerated_ts);
+
+		SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+
+		if (adjust_timezone)
+			sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
+
+		date = sdf.parse(coglomerated_ts);
+		long timeInMillisSinceEpoch = date.getTime();
+
+		return timeInMillisSinceEpoch;
+	}
+	
+	@Override
+	public void init() {
+		// pattern_url = Resources.getResource("patterns/asa");
+
+				pattern_url = getClass().getClassLoader().getResourceAsStream(
+								"src/main/patterns/asa");
+
+				File file = null;
+				try {
+					file = stream2file(pattern_url);
+				} catch (IOException e) {
+					// TODO Auto-generated catch block
+					e.printStackTrace();
+				}
+				try {
+					grok = Grok.create(file.getPath());
+				} catch (GrokException e) {
+					// TODO Auto-generated catch block
+					e.printStackTrace();
+				}
+
+				patternMap = getPatternMap();
+				try {
+					grokMap = getGrokMap();
+				} catch (GrokException | IOException e1) {
+					// TODO Auto-generated catch block
+					e1.printStackTrace();
+				}
+
+				try {
+					grok.compile("%{CISCO_TAGGED_SYSLOG}");
+				} catch (GrokException e) {
+					// TODO Auto-generated catch block
+					e.printStackTrace();
+				}
+	}
+
+	@Override
+	public List<JSONObject> parse(byte[] raw_message) {
+
+		String toParse = "";
+		JSONObject toReturn;
+		List<JSONObject> messages = new ArrayList<>();
+		try {
+
+			toParse = new String(raw_message, "UTF-8");
+
+			System.out.println("Received message: " + toParse);
+
+			Match gm = grok.match(toParse);
+			gm.captures();
+
+			toReturn = new JSONObject();
+
+			toReturn.putAll(gm.toMap());
+
+			String str = toReturn.get("ciscotag").toString();
+			String pattern = patternMap.get(str);
+
+			Map<String, Object> response = getMap(pattern, toParse);
+
+			toReturn.putAll(response);
+
+			//System.out.println("*******I MAPPED: " + toReturn);
+			long timestamp = convertToEpoch(toReturn.get("MONTH").toString(), toReturn
+											.get("MONTHDAY").toString(),
+							toReturn.get("TIME").toString(),
+							true);
+			toReturn.put("timestamp", timestamp);
+			
+			toReturn.remove("MONTHDAY");
+			toReturn.remove("TIME");
+			toReturn.remove("MINUTE");
+			toReturn.remove("HOUR");
+			toReturn.remove("YEAR");
+			toReturn.remove("SECOND");
+			
+			toReturn.put("ip_src_addr", toReturn.remove("IPORHOST"));
+			toReturn.put("original_string", toParse);
+			messages.add(toReturn);
+			return messages;
+
+		} catch (Exception e) {
+			e.printStackTrace();
+			return null;
+		}
+
+	}
+
+	
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/bolt/ParserBolt.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/bolt/ParserBolt.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/bolt/ParserBolt.java
new file mode 100644
index 0000000..e29f900
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/bolt/ParserBolt.java
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.bolt;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Tuple;
+import org.apache.metron.common.Constants;
+import org.apache.metron.common.bolt.ConfiguredBolt;
+import org.apache.metron.parsers.filters.GenericMessageFilter;
+import org.apache.metron.common.utils.ErrorUtils;
+import org.apache.metron.parsers.interfaces.MessageFilter;
+import org.apache.metron.parsers.interfaces.MessageParser;
+import org.apache.metron.common.interfaces.MessageWriter;
+import org.json.simple.JSONObject;
+
+import java.util.List;
+import java.util.Map;
+
+public class ParserBolt extends ConfiguredBolt {
+
+  private OutputCollector collector;
+  private MessageParser<JSONObject> parser;
+  private MessageFilter<JSONObject> filter = new GenericMessageFilter();
+  private MessageWriter<JSONObject> writer;
+  private String sensorType;
+
+  public ParserBolt(String zookeeperUrl, String sensorType, MessageParser<JSONObject> parser, MessageWriter<JSONObject> writer) {
+    super(zookeeperUrl);
+    this.parser = parser;
+    this.sensorType = sensorType;
+    this.writer = writer;
+  }
+
+  public ParserBolt withMessageFilter(MessageFilter<JSONObject> filter) {
+    this.filter = filter;
+    return this;
+  }
+
+  @SuppressWarnings("unchecked")
+  @Override
+  public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+    super.prepare(stormConf, context, collector);
+    this.collector = collector;
+    parser.init();
+    writer.init();
+  }
+
+  @SuppressWarnings("unchecked")
+  @Override
+  public void execute(Tuple tuple) {
+    byte[] originalMessage = tuple.getBinary(0);
+    try {
+      List<JSONObject> messages = parser.parse(originalMessage);
+      for(JSONObject message: messages) {
+        if (parser.validate(message)) {
+          if (filter != null && filter.emitTuple(message)) {
+            message.put(Constants.SENSOR_TYPE, sensorType);
+            writer.write(sensorType, configurations, tuple, message);
+          }
+        }
+      }
+      collector.ack(tuple);
+    } catch (Throwable ex) {
+      ErrorUtils.handleError(collector, ex, Constants.ERROR_STREAM);
+    }
+  }
+
+  @Override
+  public void declareOutputFields(OutputFieldsDeclarer declarer) {
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/bro/BasicBroParser.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/bro/BasicBroParser.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/bro/BasicBroParser.java
new file mode 100644
index 0000000..71eb64f
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/bro/BasicBroParser.java
@@ -0,0 +1,159 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.parsers.bro;
+
+import org.apache.metron.parsers.BasicParser;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+
+@SuppressWarnings("serial")
+public class BasicBroParser extends BasicParser {
+
+    protected static final Logger _LOG = LoggerFactory
+            .getLogger(BasicBroParser.class);
+    private JSONCleaner cleaner = new JSONCleaner();
+
+    @Override
+    public void init() {
+
+    }
+
+    @SuppressWarnings("unchecked")
+    public List<JSONObject> parse(byte[] msg) {
+
+        _LOG.trace("[Metron] Starting to parse incoming message");
+
+        String rawMessage = null;
+        List<JSONObject> messages = new ArrayList<>();
+        try {
+            rawMessage = new String(msg, "UTF-8");
+            _LOG.trace("[Metron] Received message: " + rawMessage);
+
+            JSONObject cleanedMessage = cleaner.clean(rawMessage);
+            _LOG.debug("[Metron] Cleaned message: " + cleanedMessage);
+
+            if (cleanedMessage == null || cleanedMessage.isEmpty()) {
+                throw new Exception("Unable to clean message: " + rawMessage);
+            }
+
+            String key;
+            JSONObject payload;
+            if (cleanedMessage.containsKey("type")) {
+                key = cleanedMessage.get("type").toString();
+                payload = cleanedMessage;
+            } else {
+                key = cleanedMessage.keySet().iterator().next().toString();
+
+                if (key == null) {
+                    throw new Exception("Unable to retrieve key for message: "
+                            + rawMessage);
+                }
+
+                payload = (JSONObject) cleanedMessage.get(key);
+            }
+
+            if (payload == null) {
+                throw new Exception("Unable to retrieve payload for message: "
+                    + rawMessage);
+            }
+
+            String originalString = key.toUpperCase() + " |";
+            for (Object k : payload.keySet()) {
+                String value = payload.get(k).toString();
+                originalString += " " + k.toString() + ":" + value;
+            }
+            payload.put("original_string", originalString);
+
+            replaceKey(payload, "timestamp", new String[]{ "ts" });
+
+            long timestamp = 0L;
+            if (payload.containsKey("timestamp")) {
+                try {
+                    String broTimestamp = payload.get("timestamp").toString();
+                    String convertedTimestamp = broTimestamp.replace(".","");
+                    convertedTimestamp = convertedTimestamp.substring(0,13);
+                    timestamp = Long.parseLong(convertedTimestamp);
+                    payload.put("timestamp", timestamp);
+                    payload.put("bro_timestamp",broTimestamp);
+                    _LOG.trace(String.format("[Metron] new bro record - timestamp : %s", payload.get("timestamp")));
+                } catch (NumberFormatException nfe) {
+                    _LOG.error(String.format("[Metron] timestamp is invalid: %s", payload.get("timestamp")));
+                    payload.put("timestamp", 0);
+                }
+            }
+
+            boolean ipSrcReplaced = replaceKey(payload, "ip_src_addr", new String[]{"source_ip", "id.orig_h"});
+            if (!ipSrcReplaced) {
+                replaceKeyArray(payload, "ip_src_addr", new String[]{ "tx_hosts" });
+            }
+
+            boolean ipDstReplaced = replaceKey(payload, "ip_dst_addr", new String[]{"dest_ip", "id.resp_h"});
+            if (!ipDstReplaced) {
+                replaceKeyArray(payload, "ip_dst_addr", new String[]{ "rx_hosts" });
+            }
+
+            replaceKey(payload, "ip_src_port", new String[]{"source_port", "id.orig_p"});
+            replaceKey(payload, "ip_dst_port", new String[]{"dest_port", "id.resp_p"});
+
+            payload.put("protocol", key);
+            _LOG.debug("[Metron] Returning parsed message: " + payload);
+            messages.add(payload);
+            return messages;
+
+        } catch (Exception e) {
+
+            _LOG.error("Unable to Parse Message: " + rawMessage);
+            e.printStackTrace();
+            return null;
+        }
+
+    }
+
+    private boolean replaceKey(JSONObject payload, String toKey, String[] fromKeys) {
+        for (String fromKey : fromKeys) {
+            if (payload.containsKey(fromKey)) {
+                Object value = payload.remove(fromKey);
+                payload.put(toKey, value);
+                _LOG.trace(String.format("[Metron] Added %s to %s", toKey, payload));
+                return true;
+            }
+        }
+        return false;
+    }
+
+    private boolean replaceKeyArray(JSONObject payload, String toKey, String[] fromKeys) {
+        for (String fromKey : fromKeys) {
+            if (payload.containsKey(fromKey)) {
+                JSONArray value = (JSONArray) payload.remove(fromKey);
+                if (value != null && !value.isEmpty()) {
+                    payload.put(toKey, value.get(0));
+                    _LOG.trace(String.format("[Metron] Added %s to %s", toKey, payload));
+                    return true;
+                }
+            }
+        }
+        return false;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/bro/JSONCleaner.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/bro/JSONCleaner.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/bro/JSONCleaner.java
new file mode 100644
index 0000000..41d97f4
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/bro/JSONCleaner.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.bro;
+
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+public class JSONCleaner implements Serializable {
+
+	/**
+	 * 
+	 */
+	private static final long serialVersionUID = 1L;
+
+
+	/**
+	 * @param jsonString
+	 * @return
+	 * @throws ParseException
+	 * Takes a json String as input and removes any Special Chars (^ a-z A-Z 0-9) in the keys
+	 */
+	@SuppressWarnings({"unchecked","rawtypes"})
+	public JSONObject clean(String jsonString) throws ParseException
+	{
+		JSONParser parser = new JSONParser();
+		
+		
+		Map json = (Map) parser.parse(jsonString);
+		JSONObject output = new JSONObject();
+	    Iterator iter = json.entrySet().iterator();
+
+		 while(iter.hasNext()){
+		      Map.Entry entry = (Map.Entry)iter.next();
+		      
+		      String key = ((String)entry.getKey()).replaceAll("[^\\._a-zA-Z0-9]+","");
+		      output.put(key, entry.getValue());
+		    }
+
+		return output;
+	}
+	
+	
+	@SuppressWarnings({ "unchecked", "rawtypes", "unused" })
+	public static void main(String args[])
+	{
+		String jsonText = "{\"first_1\": 123, \"second\": [4, 5, 6], \"third\": 789}";
+		JSONCleaner cleaner = new JSONCleaner();
+		try {
+			//cleaner.clean(jsonText);
+			Map obj=new HashMap();
+			  obj.put("name","foo");
+			  obj.put("num",new Integer(100));
+			  obj.put("balance",new Double(1000.21));
+			  obj.put("is_vip",new Boolean(true));
+			  obj.put("nickname",null);
+			Map obj1 = new HashMap();
+			obj1.put("sourcefile", obj);
+			
+			JSONObject json = new JSONObject(obj1);
+			System.out.println(json);
+			  
+			  
+			  
+			  System.out.print(jsonText);
+		} catch (Exception e) {
+			e.printStackTrace();
+		}
+	}
+	
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/filters/BroMessageFilter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/filters/BroMessageFilter.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/filters/BroMessageFilter.java
new file mode 100644
index 0000000..d026d08
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/filters/BroMessageFilter.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.filters;
+
+import org.apache.commons.configuration.Configuration;
+import org.apache.metron.parsers.interfaces.MessageFilter;
+import org.json.simple.JSONObject;
+
+import java.io.Serializable;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+public class BroMessageFilter implements MessageFilter<JSONObject>,
+				Serializable {
+
+	/**
+	 * Filter protocols based on whitelists and blacklists
+	 */
+	
+	private static final long serialVersionUID = -3824683649114625033L;
+	private String _key;
+	private final Set<String> _known_protocols;
+
+	 /**
+	 * @param  conf  Commons configuration for reading properties files
+	 * @param  key Key in a JSON mesage where the protocol field is located
+	 */
+	
+	@SuppressWarnings({ "unchecked", "rawtypes" })
+	public BroMessageFilter(Configuration conf, String key) {
+		_key = key;
+		_known_protocols = new HashSet<>();
+		List known_protocols = conf.getList("source.known.protocols");
+		_known_protocols.addAll(known_protocols);
+	}
+
+	 /**
+	 * @param  message  JSON representation of a message with a protocol field
+	 * @return      False if message if filtered and True if message is not filtered
+	 */
+	
+	public boolean emitTuple(JSONObject message) {
+		String protocol = (String) message.get(_key);
+		return _known_protocols.contains(protocol);
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/filters/GenericMessageFilter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/filters/GenericMessageFilter.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/filters/GenericMessageFilter.java
new file mode 100644
index 0000000..9defe32
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/filters/GenericMessageFilter.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.filters;
+
+import org.apache.metron.parsers.interfaces.MessageFilter;
+import org.json.simple.JSONObject;
+
+import java.io.Serializable;
+
+public class GenericMessageFilter implements MessageFilter<JSONObject>,
+				Serializable {
+
+	private static final long serialVersionUID = 3626397212398318852L;
+
+	public boolean emitTuple(JSONObject message) {
+		return true;
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/fireeye/BasicFireEyeParser.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/fireeye/BasicFireEyeParser.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/fireeye/BasicFireEyeParser.java
new file mode 100644
index 0000000..b90d2b7
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/fireeye/BasicFireEyeParser.java
@@ -0,0 +1,218 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.fireeye;
+
+import com.google.common.base.Joiner;
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.Multimap;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.metron.parsers.utils.ParserUtils;
+import org.apache.metron.parsers.BasicParser;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class BasicFireEyeParser extends BasicParser {
+
+	private static final long serialVersionUID = 6328907550159134550L;
+	protected static final Logger LOG = LoggerFactory
+					.getLogger(BasicFireEyeParser.class);
+
+
+	String tsRegex ="([a-zA-Z]{3})\\s+(\\d+)\\s+(\\d+\\:\\d+\\:\\d+)\\s+(\\d+\\.\\d+\\.\\d+\\.\\d+)";
+	
+	
+	Pattern tsPattern = Pattern.compile(tsRegex);
+	// private transient static MetronGrok grok;
+	// private transient static InputStream pattern_url;
+
+	public BasicFireEyeParser() throws Exception {
+		// pattern_url = getClass().getClassLoader().getResourceAsStream(
+		// "patterns/fireeye");
+		//
+		// File file = ParserUtils.stream2file(pattern_url);
+		// grok = MetronGrok.create(file.getPath());
+		//
+		// grok.compile("%{FIREEYE_BASE}");
+	}
+
+	@Override
+	public void init() {
+
+	}
+
+	@Override
+	public List<JSONObject> parse(byte[] raw_message) {
+		String toParse = "";
+		List<JSONObject> messages = new ArrayList<>();
+		try {
+
+			toParse = new String(raw_message, "UTF-8");
+
+			// String[] mTokens = toParse.split(" ");
+
+			String positveIntPattern = "<[1-9][0-9]*>";
+			Pattern p = Pattern.compile(positveIntPattern);
+			Matcher m = p.matcher(toParse);
+
+			String delimiter = "";
+
+			while (m.find()) {
+				delimiter = m.group();
+
+			}
+
+			if (!StringUtils.isBlank(delimiter)) {
+				String[] tokens = toParse.split(delimiter);
+
+				if (tokens.length > 1)
+					toParse = delimiter + tokens[1];
+
+			}
+
+			JSONObject toReturn = parseMessage(toParse);
+
+			toReturn.put("timestamp", getTimeStamp(toParse,delimiter));
+			messages.add(toReturn);
+			return messages;
+
+		} catch (Exception e) {
+			e.printStackTrace();
+			return null;
+		}
+
+	}
+
+	private long getTimeStamp(String toParse,String delimiter) throws ParseException {
+		
+		long ts = 0;
+		String month = null;
+		String day = null;
+		String time = null;
+		Matcher tsMatcher = tsPattern.matcher(toParse);
+		if (tsMatcher.find()) {
+			month = tsMatcher.group(1);
+			day = tsMatcher.group(2);
+			time = tsMatcher.group(3);
+	
+				} else {
+			LOG.warn("Unable to find timestamp in message: " + toParse);
+			ts = ParserUtils.convertToEpoch(month, day, time, true);
+		}
+
+			return ts;
+	
+	}
+
+	private JSONObject parseMessage(String toParse) {
+
+		// System.out.println("Received message: " + toParse);
+
+		// MetronMatch gm = grok.match(toParse);
+		// gm.captures();
+
+		JSONObject toReturn = new JSONObject();
+		//toParse = toParse.replaceAll("  ", " ");
+		String[] mTokens = toParse.split("\\s+");
+	 //mTokens = toParse.split(" ");
+
+		// toReturn.putAll(gm.toMap());
+
+		String id = mTokens[4];
+
+		// We are not parsing the fedata for multi part message as we cannot
+		// determine how we can split the message and how many multi part
+		// messages can there be.
+		// The message itself will be stored in the response.
+
+		String[] tokens = id.split("\\.");
+		if (tokens.length == 2) {
+
+			String[] array = Arrays.copyOfRange(mTokens, 1, mTokens.length - 1);
+			String syslog = Joiner.on(" ").join(array);
+
+			Multimap<String, String> multiMap = formatMain(syslog);
+
+			for (String key : multiMap.keySet()) {
+
+				String value = Joiner.on(",").join(multiMap.get(key));
+				toReturn.put(key, value.trim());
+			}
+
+		}
+
+		toReturn.put("original_string", toParse);
+
+		String ip_src_addr = (String) toReturn.get("dvc");
+		String ip_src_port = (String) toReturn.get("src_port");
+		String ip_dst_addr = (String) toReturn.get("dst_ip");
+		String ip_dst_port = (String) toReturn.get("dst_port");
+
+		if (ip_src_addr != null)
+			toReturn.put("ip_src_addr", ip_src_addr);
+		if (ip_src_port != null)
+			toReturn.put("ip_src_port", ip_src_port);
+		if (ip_dst_addr != null)
+			toReturn.put("ip_dst_addr", ip_dst_addr);
+		if (ip_dst_port != null)
+			toReturn.put("ip_dst_port", ip_dst_port);
+
+		System.out.println(toReturn);
+
+		return toReturn;
+	}
+
+	private Multimap<String, String> formatMain(String in) {
+		Multimap<String, String> multiMap = ArrayListMultimap.create();
+		String input = in.replaceAll("cn3", "dst_port")
+				.replaceAll("cs5", "cncHost").replaceAll("proto", "protocol")
+				.replaceAll("rt=", "timestamp=").replaceAll("cs1", "malware")
+				.replaceAll("dst=", "dst_ip=")
+				.replaceAll("shost", "src_hostname")
+				.replaceAll("dmac", "dst_mac").replaceAll("smac", "src_mac")
+				.replaceAll("spt", "src_port")
+				.replaceAll("\\bsrc\\b", "src_ip");
+		String[] tokens = input.split("\\|");
+
+		if (tokens.length > 0) {
+			String message = tokens[tokens.length - 1];
+
+			String pattern = "([\\w\\d]+)=([^=]*)(?=\\s*\\w+=|\\s*$) ";
+			Pattern p = Pattern.compile(pattern);
+			Matcher m = p.matcher(message);
+
+			while (m.find()) {
+				String[] str = m.group().split("=");
+				multiMap.put(str[0], str[1]);
+
+			}
+
+		}
+		return multiMap;
+	}
+
+	
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/interfaces/MessageFilter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/interfaces/MessageFilter.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/interfaces/MessageFilter.java
new file mode 100644
index 0000000..2e5ab29
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/interfaces/MessageFilter.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.interfaces;
+
+public interface MessageFilter<T> {
+
+	boolean emitTuple(T message);
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/interfaces/MessageParser.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/interfaces/MessageParser.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/interfaces/MessageParser.java
new file mode 100644
index 0000000..11efa53
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/interfaces/MessageParser.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.interfaces;
+
+import java.util.List;
+
+public interface MessageParser<T> {
+
+	void init();
+	List<T> parse(byte[] rawMessage);
+	boolean validate(T message);
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/BasicIseParser.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/BasicIseParser.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/BasicIseParser.java
new file mode 100644
index 0000000..19b3ac6
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/BasicIseParser.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.metron.parsers.ise;
+
+import com.esotericsoftware.minlog.Log;
+import org.apache.metron.parsers.BasicParser;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.StringReader;
+import java.util.ArrayList;
+import java.util.List;
+
+@SuppressWarnings("serial")
+public class BasicIseParser extends BasicParser {
+
+	private static final Logger _LOG = LoggerFactory
+			.getLogger(BasicIseParser.class);
+	static final transient ISEParser _parser = new ISEParser("header=");
+
+	@Override
+	public void init() {
+
+	}
+
+	@SuppressWarnings("unchecked")
+	@Override
+	public List<JSONObject> parse(byte[] msg) {
+	
+		String raw_message = "";
+		List<JSONObject> messages = new ArrayList<>();
+		try {
+
+			raw_message = new String(msg, "UTF-8");
+			_LOG.debug("Received message: " + raw_message);
+
+			/*
+			 * Reinitialize Parser. It has the effect of calling the constructor again.
+			 */
+			_parser.ReInit(new StringReader("header=" + raw_message.trim()));
+
+			JSONObject payload = _parser.parseObject();
+
+			String ip_src_addr = (String) payload.get("Device IP Address");
+			String ip_src_port = (String) payload.get("Device Port");
+			String ip_dst_addr = (String) payload.get("DestinationIPAddress");
+			String ip_dst_port = (String) payload.get("DestinationPort");
+
+			/*
+			 * Standard Fields for Metron.
+			 */
+
+			if(ip_src_addr != null)
+				payload.put("ip_src_addr", ip_src_addr);
+			if(ip_src_port != null)
+				payload.put("ip_src_port", ip_src_port);
+			if(ip_dst_addr != null)
+				payload.put("ip_dst_addr", ip_dst_addr);
+			if(ip_dst_port != null)
+				payload.put("ip_dst_port", ip_dst_port);
+			messages.add(payload);
+			return messages;
+
+		} catch (Exception e) {
+			Log.error(e.toString());
+			e.printStackTrace();
+		}
+		return null;
+	}
+
+	@Override
+	public boolean validate(JSONObject message) {
+		return true;
+	}
+
+	
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ISEParser.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ISEParser.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ISEParser.java
new file mode 100644
index 0000000..0f54261
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ISEParser.java
@@ -0,0 +1,660 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/* Generated By:JavaCC: Do not edit this line. ISEParser.java */
+package org.apache.metron.parsers.ise;
+import java.io.*;
+import java.util.*;
+import org.json.simple.*;
+
+/**
+* Basic ISE data parser generated by JavaCC. 
+*/
+public class ISEParser implements Serializable, ISEParserConstants {
+ // private boolean nativeNumbers = false;
+
+	private static final long serialVersionUID = -2531656825360044979L;
+
+	public ISEParser()
+	  { //do nothing
+	  }
+
+  public ISEParser(String input)
+  {
+    this (new StringReader(input));
+  }
+
+  /**
+	* Parses a ISE String into a JSON object {@code Map}.
+	*/
+  public JSONObject parseObject() throws ParseException
+  {
+    JSONObject toReturn = object();
+    if (!ensureEOF()) throw new IllegalStateException("Expected EOF, but still had content to parse");
+    return toReturn;
+  }
+
+  @SuppressWarnings("unused")
+final public boolean ensureEOF() throws ParseException {
+    switch (jj_nt.kind) {
+    case COMMA:
+      jj_consume_token(COMMA);
+      break;
+    default:
+      jj_la1[0] = jj_gen;
+      ;
+    }
+    jj_consume_token(0);
+    {if (true) return true;}
+    throw new Error("Missing return statement in function");
+  }
+
+  @SuppressWarnings({ "unchecked", "unused" })
+final public JSONObject innerMap() throws ParseException {
+  final JSONObject json = new JSONObject();
+  String key;
+  Object value;
+    key = objectKey();
+    jj_consume_token(EQUALS);
+    value = value();
+    json.put(key, value);
+    key = null;
+    value = null;
+    label_1:
+    while (true) {
+      switch (jj_nt.kind) {
+      case SLASH:
+        ;
+        break;
+      default:
+        jj_la1[1] = jj_gen;
+        break label_1;
+      }
+      jj_consume_token(SLASH);
+      jj_consume_token(COMMA);
+      key = objectKey();
+      jj_consume_token(EQUALS);
+      value = value();
+      json.put(key, value);
+      key = null;
+      value = null;
+    }
+    {if (true) return json;}
+    throw new Error("Missing return statement in function");
+  }
+
+  @SuppressWarnings({ "unused", "unchecked" })
+final public JSONObject object() throws ParseException {
+  final JSONObject json = new JSONObject();
+  String key;
+  Object value;
+    key = objectKey();
+    jj_consume_token(EQUALS);
+    value = value();
+    json.put(key, value);
+    key = null;
+    value = null;
+    label_2:
+    while (true) {
+      if (jj_2_1(2)) {
+        ;
+      } else {
+        break label_2;
+      }
+      jj_consume_token(COMMA);
+      key = objectKey();
+      jj_consume_token(EQUALS);
+      value = value();
+        json.put(key, value);
+        key = null;
+        value = null;
+    }
+    {if (true) return json;}
+    throw new Error("Missing return statement in function");
+  }
+
+  @SuppressWarnings("unused")
+final public String objectKey() throws ParseException {
+  String k;
+    k = string();
+    //  System.out.println("key == " + k);
+    {if (true) return k.trim();}
+    throw new Error("Missing return statement in function");
+  }
+
+  @SuppressWarnings({ "unused", "rawtypes" })
+final public Object value() throws ParseException {
+  Object x;
+  String eof = "EOF";
+  Map m = null;
+    if (jj_2_2(2147483647)) {
+      x = nullValue();
+    } else if (jj_2_3(2147483647)) {
+      x = innerMap();
+    } else {
+      switch (jj_nt.kind) {
+      case TAG:
+        x = tagString();
+        break;
+      default:
+        jj_la1[2] = jj_gen;
+        if (jj_2_4(2147483647)) {
+          x = blankValue();
+        } else if (jj_2_5(2147483647)) {
+          x = braced_string();
+        } else if (jj_2_6(2)) {
+          x = string();
+        } else {
+          jj_consume_token(-1);
+          throw new ParseException();
+        }
+      }
+    }
+    //  System.out.println("val == " + x);
+    //if (x instanceof Map) return "Map";
+    //return (String) x;
+    {if (true) return x;}
+    throw new Error("Missing return statement in function");
+  }
+
+  @SuppressWarnings("unused")
+final public String nullValue() throws ParseException {
+    {if (true) return null;}
+    throw new Error("Missing return statement in function");
+  }
+
+  @SuppressWarnings("unused")
+final public String tagString() throws ParseException {
+  String output = "(tag=0)";
+    jj_consume_token(TAG);
+    jj_consume_token(STRING_BODY);
+    {if (true) return output + token.image;}
+    throw new Error("Missing return statement in function");
+  }
+
+  @SuppressWarnings("unused")
+final public String blankValue() throws ParseException {
+    {if (true) return null;}
+    throw new Error("Missing return statement in function");
+  }
+
+  @SuppressWarnings("unused")
+final public String string() throws ParseException {
+  String s;
+    jj_consume_token(STRING_BODY);
+    {if (true) return token.image.trim();}
+    throw new Error("Missing return statement in function");
+  }
+
+  @SuppressWarnings("unused")
+final public String braced_string() throws ParseException {
+  String s;
+    jj_consume_token(BRACED_STRING);
+    //  System.out.println("braced == " + token.image);
+    s = token.image;
+    jj_consume_token(COMMA);
+    {if (true) return s.trim();}
+    throw new Error("Missing return statement in function");
+  }
+
+  private boolean jj_2_1(int xla) {
+    jj_la = xla; jj_lastpos = jj_scanpos = token;
+    try { return !jj_3_1(); }
+    catch(LookaheadSuccess ls) { return true; }
+    finally { jj_save(0, xla); }
+  }
+
+  private boolean jj_2_2(int xla) {
+    jj_la = xla; jj_lastpos = jj_scanpos = token;
+    try { return !jj_3_2(); }
+    catch(LookaheadSuccess ls) { return true; }
+    finally { jj_save(1, xla); }
+  }
+
+  private boolean jj_2_3(int xla) {
+    jj_la = xla; jj_lastpos = jj_scanpos = token;
+    try { return !jj_3_3(); }
+    catch(LookaheadSuccess ls) { return true; }
+    finally { jj_save(2, xla); }
+  }
+
+  private boolean jj_2_4(int xla) {
+    jj_la = xla; jj_lastpos = jj_scanpos = token;
+    try { return !jj_3_4(); }
+    catch(LookaheadSuccess ls) { return true; }
+    finally { jj_save(3, xla); }
+  }
+
+  private boolean jj_2_5(int xla) {
+    jj_la = xla; jj_lastpos = jj_scanpos = token;
+    try { return !jj_3_5(); }
+    catch(LookaheadSuccess ls) { return true; }
+    finally { jj_save(4, xla); }
+  }
+
+  private boolean jj_2_6(int xla) {
+    jj_la = xla; jj_lastpos = jj_scanpos = token;
+    try { return !jj_3_6(); }
+    catch(LookaheadSuccess ls) { return true; }
+    finally { jj_save(5, xla); }
+  }
+
+  private boolean jj_3_5() {
+    if (jj_3R_5()) return true;
+    return false;
+  }
+
+  private boolean jj_3_4() {
+    if (jj_scan_token(0)) return true;
+    return false;
+  }
+
+  private boolean jj_3R_5() {
+    if (jj_scan_token(BRACED_STRING)) return true;
+    if (jj_scan_token(COMMA)) return true;
+    return false;
+  }
+
+  private boolean jj_3_3() {
+    if (jj_3R_4()) return true;
+    return false;
+  }
+
+  private boolean jj_3R_4() {
+    if (jj_3R_3()) return true;
+    if (jj_scan_token(EQUALS)) return true;
+    if (jj_3R_7()) return true;
+    Token xsp;
+    while (true) {
+      xsp = jj_scanpos;
+      if (jj_3R_8()) { jj_scanpos = xsp; break; }
+    }
+    return false;
+  }
+
+  private boolean jj_3_2() {
+    if (jj_scan_token(COMMA)) return true;
+    return false;
+  }
+
+  private boolean jj_3_6() {
+    if (jj_3R_6()) return true;
+    return false;
+  }
+
+  private boolean jj_3_1() {
+    if (jj_scan_token(COMMA)) return true;
+    if (jj_3R_3()) return true;
+    return false;
+  }
+
+  private boolean jj_3R_13() {
+    if (jj_3R_5()) return true;
+    return false;
+  }
+
+  private boolean jj_3R_12() {
+    if (jj_3R_16()) return true;
+    return false;
+  }
+
+  private boolean jj_3R_11() {
+    if (jj_3R_15()) return true;
+    return false;
+  }
+
+  private boolean jj_3R_6() {
+    if (jj_scan_token(STRING_BODY)) return true;
+    return false;
+  }
+
+  private boolean jj_3R_10() {
+    if (jj_3R_4()) return true;
+    return false;
+  }
+
+  private boolean jj_3R_9() {
+    if (jj_3R_14()) return true;
+    return false;
+  }
+
+  private boolean jj_3R_7() {
+    Token xsp;
+    xsp = jj_scanpos;
+    if (jj_3R_9()) {
+    jj_scanpos = xsp;
+    if (jj_3R_10()) {
+    jj_scanpos = xsp;
+    if (jj_3R_11()) {
+    jj_scanpos = xsp;
+    if (jj_3R_12()) {
+    jj_scanpos = xsp;
+    if (jj_3R_13()) {
+    jj_scanpos = xsp;
+    if (jj_3_6()) return true;
+    }
+    }
+    }
+    }
+    }
+    return false;
+  }
+
+  private boolean jj_3R_16() {
+    return false;
+  }
+
+  private boolean jj_3R_15() {
+    if (jj_scan_token(TAG)) return true;
+    if (jj_scan_token(STRING_BODY)) return true;
+    return false;
+  }
+
+  private boolean jj_3R_3() {
+    if (jj_3R_6()) return true;
+    return false;
+  }
+
+  private boolean jj_3R_8() {
+    if (jj_scan_token(SLASH)) return true;
+    if (jj_scan_token(COMMA)) return true;
+    if (jj_3R_3()) return true;
+    if (jj_scan_token(EQUALS)) return true;
+    if (jj_3R_7()) return true;
+    return false;
+  }
+
+  private boolean jj_3R_14() {
+    return false;
+  }
+
+  /** Generated Token Manager. */
+  public ISEParserTokenManager token_source;
+  JavaCharStream jj_input_stream;
+  /** Current token. */
+  public Token token;
+  /** Next token. */
+  public Token jj_nt;
+  private Token jj_scanpos, jj_lastpos;
+  private int jj_la;
+  private int jj_gen;
+  final private int[] jj_la1 = new int[3];
+  static private int[] jj_la1_0;
+  static {
+      jj_la1_init_0();
+   }
+   private static void jj_la1_init_0() {
+      jj_la1_0 = new int[] {0x20,0x80,0x100,};
+   }
+  final private JJCalls[] jj_2_rtns = new JJCalls[6];
+  private boolean jj_rescan = false;
+  private int jj_gc = 0;
+
+  /** Constructor with InputStream. */
+  public ISEParser(java.io.InputStream stream) {
+     this(stream, null);
+  }
+  /** Constructor with InputStream and supplied encoding */
+  public ISEParser(java.io.InputStream stream, String encoding) {
+    try { jj_input_stream = new JavaCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
+    token_source = new ISEParserTokenManager(jj_input_stream);
+    token = new Token();
+    token.next = jj_nt = token_source.getNextToken();
+    jj_gen = 0;
+    for (int i = 0; i < 3; i++) jj_la1[i] = -1;
+    for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
+  }
+
+  /** Reinitialise. */
+  public void ReInit(java.io.InputStream stream) {
+     ReInit(stream, null);
+  }
+  /** Reinitialise. */
+  public void ReInit(java.io.InputStream stream, String encoding) {
+    try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
+    token_source.ReInit(jj_input_stream);
+    token = new Token();
+    token.next = jj_nt = token_source.getNextToken();
+    jj_gen = 0;
+    for (int i = 0; i < 3; i++) jj_la1[i] = -1;
+    for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
+  }
+
+  /** Constructor. */
+  public ISEParser(java.io.Reader stream) {
+    jj_input_stream = new JavaCharStream(stream, 1, 1);
+    token_source = new ISEParserTokenManager(jj_input_stream);
+    token = new Token();
+    token.next = jj_nt = token_source.getNextToken();
+    jj_gen = 0;
+    for (int i = 0; i < 3; i++) jj_la1[i] = -1;
+    for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
+  }
+
+  /** Reinitialise. */
+  public void ReInit(java.io.Reader stream) {
+    jj_input_stream.ReInit(stream, 1, 1);
+    token_source.ReInit(jj_input_stream);
+    token = new Token();
+    token.next = jj_nt = token_source.getNextToken();
+    jj_gen = 0;
+    for (int i = 0; i < 3; i++) jj_la1[i] = -1;
+    for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
+  }
+
+  /** Constructor with generated Token Manager. */
+  public ISEParser(ISEParserTokenManager tm) {
+    token_source = tm;
+    token = new Token();
+    token.next = jj_nt = token_source.getNextToken();
+    jj_gen = 0;
+    for (int i = 0; i < 3; i++) jj_la1[i] = -1;
+    for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
+  }
+
+  /** Reinitialise. */
+  public void ReInit(ISEParserTokenManager tm) {
+    token_source = tm;
+    token = new Token();
+    token.next = jj_nt = token_source.getNextToken();
+    jj_gen = 0;
+    for (int i = 0; i < 3; i++) jj_la1[i] = -1;
+    for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
+  }
+
+  private Token jj_consume_token(int kind) throws ParseException {
+    Token oldToken = token;
+    if ((token = jj_nt).next != null) jj_nt = jj_nt.next;
+    else jj_nt = jj_nt.next = token_source.getNextToken();
+    if (token.kind == kind) {
+      jj_gen++;
+      if (++jj_gc > 100) {
+        jj_gc = 0;
+        for (int i = 0; i < jj_2_rtns.length; i++) {
+          JJCalls c = jj_2_rtns[i];
+          while (c != null) {
+            if (c.gen < jj_gen) c.first = null;
+            c = c.next;
+          }
+        }
+      }
+      return token;
+    }
+    jj_nt = token;
+    token = oldToken;
+    jj_kind = kind;
+    throw generateParseException();
+  }
+
+  static private final class LookaheadSuccess extends java.lang.Error {
+
+	private static final long serialVersionUID = -5724812746511794505L; }
+  final private LookaheadSuccess jj_ls = new LookaheadSuccess();
+  private boolean jj_scan_token(int kind) {
+    if (jj_scanpos == jj_lastpos) {
+      jj_la--;
+      if (jj_scanpos.next == null) {
+        jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.getNextToken();
+      } else {
+        jj_lastpos = jj_scanpos = jj_scanpos.next;
+      }
+    } else {
+      jj_scanpos = jj_scanpos.next;
+    }
+    if (jj_rescan) {
+      int i = 0; Token tok = token;
+      while (tok != null && tok != jj_scanpos) { i++; tok = tok.next; }
+      if (tok != null) jj_add_error_token(kind, i);
+    }
+    if (jj_scanpos.kind != kind) return true;
+    if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls;
+    return false;
+  }
+
+
+/** Get the next Token. */
+  final public Token getNextToken() {
+    if ((token = jj_nt).next != null) jj_nt = jj_nt.next;
+    else jj_nt = jj_nt.next = token_source.getNextToken();
+    jj_gen++;
+    return token;
+  }
+
+/** Get the specific Token. */
+  final public Token getToken(int index) {
+    Token t = token;
+    for (int i = 0; i < index; i++) {
+      if (t.next != null) t = t.next;
+      else t = t.next = token_source.getNextToken();
+    }
+    return t;
+  }
+
+  private java.util.List<int[]> jj_expentries = new java.util.ArrayList<int[]>();
+  private int[] jj_expentry;
+  private int jj_kind = -1;
+  private int[] jj_lasttokens = new int[100];
+  private int jj_endpos;
+
+  private void jj_add_error_token(int kind, int pos) {
+    if (pos >= 100) return;
+    if (pos == jj_endpos + 1) {
+      jj_lasttokens[jj_endpos++] = kind;
+    } else if (jj_endpos != 0) {
+      jj_expentry = new int[jj_endpos];
+      for (int i = 0; i < jj_endpos; i++) {
+        jj_expentry[i] = jj_lasttokens[i];
+      }
+      jj_entries_loop: for (java.util.Iterator<?> it = jj_expentries.iterator(); it.hasNext();) {
+        int[] oldentry = (int[])(it.next());
+        if (oldentry.length == jj_expentry.length) {
+          for (int i = 0; i < jj_expentry.length; i++) {
+            if (oldentry[i] != jj_expentry[i]) {
+              continue jj_entries_loop;
+            }
+          }
+          jj_expentries.add(jj_expentry);
+          break jj_entries_loop;
+        }
+      }
+      if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind;
+    }
+  }
+
+  /** Generate ParseException. */
+  public ParseException generateParseException() {
+    jj_expentries.clear();
+    boolean[] la1tokens = new boolean[11];
+    if (jj_kind >= 0) {
+      la1tokens[jj_kind] = true;
+      jj_kind = -1;
+    }
+    for (int i = 0; i < 3; i++) {
+      if (jj_la1[i] == jj_gen) {
+        for (int j = 0; j < 32; j++) {
+          if ((jj_la1_0[i] & (1<<j)) != 0) {
+            la1tokens[j] = true;
+          }
+        }
+      }
+    }
+    for (int i = 0; i < 11; i++) {
+      if (la1tokens[i]) {
+        jj_expentry = new int[1];
+        jj_expentry[0] = i;
+        jj_expentries.add(jj_expentry);
+      }
+    }
+    jj_endpos = 0;
+    jj_rescan_token();
+    jj_add_error_token(0, 0);
+    int[][] exptokseq = new int[jj_expentries.size()][];
+    for (int i = 0; i < jj_expentries.size(); i++) {
+      exptokseq[i] = jj_expentries.get(i);
+    }
+    return new ParseException(token, exptokseq, tokenImage);
+  }
+
+  /** Enable tracing. */
+  final public void enable_tracing() {
+  }
+
+  /** Disable tracing. */
+  final public void disable_tracing() {
+  }
+
+  private void jj_rescan_token() {
+    jj_rescan = true;
+    for (int i = 0; i < 6; i++) {
+    try {
+      JJCalls p = jj_2_rtns[i];
+      do {
+        if (p.gen > jj_gen) {
+          jj_la = p.arg; jj_lastpos = jj_scanpos = p.first;
+          switch (i) {
+            case 0: jj_3_1(); break;
+            case 1: jj_3_2(); break;
+            case 2: jj_3_3(); break;
+            case 3: jj_3_4(); break;
+            case 4: jj_3_5(); break;
+            case 5: jj_3_6(); break;
+          }
+        }
+        p = p.next;
+      } while (p != null);
+      } catch(LookaheadSuccess ls) { }
+    }
+    jj_rescan = false;
+  }
+
+  private void jj_save(int index, int xla) {
+    JJCalls p = jj_2_rtns[index];
+    while (p.gen > jj_gen) {
+      if (p.next == null) { p = p.next = new JJCalls(); break; }
+      p = p.next;
+    }
+    p.gen = jj_gen + xla - jj_la; p.first = token; p.arg = xla;
+  }
+
+  static final class JJCalls {
+    int gen;
+    Token first;
+    int arg;
+    JJCalls next;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ISEParser.jj
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ISEParser.jj b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ISEParser.jj
new file mode 100644
index 0000000..6071922
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ISEParser.jj
@@ -0,0 +1,12 @@
+options{  CHOICE_AMBIGUITY_CHECK = 3;  OTHER_AMBIGUITY_CHECK = 2;  //DEBUG_PARSER=true
+  //DEBUG_LOOKAHEAD=true
+  //DEBUG_TOKEN_MANAGER=true
+  ERROR_REPORTING = true;  JAVA_UNICODE_ESCAPE = true;  UNICODE_INPUT = true;  IGNORE_CASE = true;  SUPPORT_CLASS_VISIBILITY_PUBLIC = false;  FORCE_LA_CHECK = true;  CACHE_TOKENS = true;  SANITY_CHECK = true;  STATIC = false;  //KEEP_LINE_COLUMN=true;
+}PARSER_BEGIN(ISEParser)/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements.  See the NOTICE file * distributed with this work for additional information * regarding copyright ownership.  The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License.  You may obtain a copy of the License at * *     http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */package org.apache.metron.ise.parser;import java.io.*;import java.util.*;import org.json.simple.*;/**
+* Basic ISE data parser generated by JavaCC.
+*/public class ISEParser implements Serializable{  private boolean nativeNumbers = false;  public ISEParser()  { //do nothing
+  }  public ISEParser(String input)  {    this (new StringReader(input));  }  /**
+	* Parses a ISE String into a JSON object {@code Map}.
+	*/  public JSONObject parseObject() throws ParseException  {    JSONObject toReturn = object();    if (!ensureEOF()) throw new IllegalStateException("Expected EOF, but still had content to parse");    return toReturn;  }}PARSER_END(ISEParser)// Ignore commentsSKIP :{  < C_SINGLE_COMMENT : "//" (~[ "\n", "\r", "\f" ])* < EOL >>| < C_MULTILINE_COMMENT : "/*" (~[ ])* "*/" >| < SH_SINGLE_COMMENT : "#" (~[ "\n", "\r", "\f" ])* < EOL >>  /*| < WHITESPACE :    " "  | "\t" >*/| < EOL :    "\n"  | "\r"  | "\f" >}// Common tokens
+TOKEN :{  < COMMA : "," >| < EQUALS : "=" >| < SLASH : "\\" >| < TAG : "(tag=0)" >}// Null token/*TOKEN :{  //< NULL : "null" >}*/// String tokens
+TOKEN :{  //< SYMBOL : ([ "a"-"z", "A"-"Z", "0", "1"-"9", " ", "\t" , ":" , "-" , "." ])+ >  < STRING_BODY :    (      (~[ "\"", "\r", "\n", "\f", "\t", "=", "," ])    |      (        "\\"        (          "r"        | "n"        | "f"        | "\\"        | "/"        | "\""        | "b"        | "t"        | ","        )      )    )+ >| < BRACED_STRING :    (      "{" (~[ "{", "}" ])+ "}"    ) >}boolean ensureEOF() :{}{  (< COMMA >)? < EOF >  {    return true;  }}JSONObject innerMap() :{  final JSONObject json = new JSONObject();  String key;  Object value;}{  key = objectKey() < EQUALS > value = value()  {    json.put(key, value);  }  {    key = null;    value = null;  }  (    < SLASH > < COMMA > key = objectKey() < EQUALS > value = value()    {      json.put(key, value);    }    {      key = null;      value = null;    }  )*  {    return json;  }}JSONObject object() :{  final JSONObject json = new JSONObject()
 ;  String key;  Object value;}{  key = objectKey() < EQUALS > value = value()  {    json.put(key, value);  }  {    key = null;    value = null;  }  (    (      LOOKAHEAD(2)      < COMMA > key = objectKey() < EQUALS > value = value()      {        json.put(key, value);      }      {        key = null;        value = null;      }    )*  | LOOKAHEAD(2)    < COMMA > < EOF >  )  // ensureEOF()  {    return json;  }}String objectKey() :{  String k;}{  (    k = string()  )  {    //  System.out.println("key == " + k);    return k.trim();  }}Object value() :{  Object x;  String eof = "EOF";  Map m = null;}{  (    LOOKAHEAD(< COMMA >)    x = nullValue()  | LOOKAHEAD(innerMap())    x = innerMap()  | x = tagString()  | LOOKAHEAD(< EOF >)    x = blankValue()  | LOOKAHEAD(braced_string())    x = braced_string()  | LOOKAHEAD(2)    x = string()  )  {    //  System.out.println("val == " + x);    //if (x instanceof Map) return "Map
 ";    //return (String) x;    return x;  }}String nullValue() :{}{  {    return null;  }}String tagString() :{  String output = "(tag=0)";}{  < TAG > < STRING_BODY >  {    return output + token.image;  }}String blankValue() :{}{  {    return null;  }}String string() :{  String s;}{  < STRING_BODY >  {    return token.image.trim();  }}String braced_string() :{  String s;}{  < BRACED_STRING >  {    //  System.out.println("braced == " + token.image);    s = token.image;  }  < COMMA >  {    return s.trim();  }}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ISEParserConstants.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ISEParserConstants.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ISEParserConstants.java
new file mode 100644
index 0000000..126d120
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ISEParserConstants.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/* Generated By:JavaCC: Do not edit this line. ISEParserConstants.java */
+package org.apache.metron.parsers.ise;
+
+
+/**
+ * Token literal values and constants.
+ * Generated by org.javacc.parser.OtherFilesGen#start()
+ */
+interface ISEParserConstants {
+
+  /** End of File. */
+  int EOF = 0;
+  /** RegularExpression Id. */
+  int C_SINGLE_COMMENT = 1;
+  /** RegularExpression Id. */
+  int C_MULTILINE_COMMENT = 2;
+  /** RegularExpression Id. */
+  int SH_SINGLE_COMMENT = 3;
+  /** RegularExpression Id. */
+  int EOL = 4;
+  /** RegularExpression Id. */
+  int COMMA = 5;
+  /** RegularExpression Id. */
+  int EQUALS = 6;
+  /** RegularExpression Id. */
+  int SLASH = 7;
+  /** RegularExpression Id. */
+  int TAG = 8;
+  /** RegularExpression Id. */
+  int STRING_BODY = 9;
+  /** RegularExpression Id. */
+  int BRACED_STRING = 10;
+
+  /** Lexical state. */
+  int DEFAULT = 0;
+
+  /** Literal token values. */
+  String[] tokenImage = {
+    "<EOF>",
+    "<C_SINGLE_COMMENT>",
+    "<C_MULTILINE_COMMENT>",
+    "<SH_SINGLE_COMMENT>",
+    "<EOL>",
+    "\",\"",
+    "\"=\"",
+    "\"\\\\\"",
+    "\"(tag=0)\"",
+    "<STRING_BODY>",
+    "<BRACED_STRING>",
+  };
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ISEParserTokenManager.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ISEParserTokenManager.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ISEParserTokenManager.java
new file mode 100644
index 0000000..9bd5347
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ISEParserTokenManager.java
@@ -0,0 +1,676 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/* Generated By:JavaCC: Do not edit this line. ISEParserTokenManager.java */
+package org.apache.metron.parsers.ise;
+
+/** Token Manager. */
+class ISEParserTokenManager implements ISEParserConstants
+{
+
+  /** Debug output. */
+  public  java.io.PrintStream debugStream = System.out;
+  /** Set debug output. */
+  public  void setDebugStream(java.io.PrintStream ds) { debugStream = ds; }
+private final int jjStopStringLiteralDfa_0(int pos, long active0)
+{
+   switch (pos)
+   {
+      case 0:
+         if ((active0 & 0x100L) != 0L)
+         {
+            jjmatchedKind = 9;
+            return 18;
+         }
+         if ((active0 & 0x80L) != 0L)
+            return 6;
+         return -1;
+      case 1:
+         if ((active0 & 0x100L) != 0L)
+         {
+            jjmatchedKind = 9;
+            jjmatchedPos = 1;
+            return 18;
+         }
+         return -1;
+      case 2:
+         if ((active0 & 0x100L) != 0L)
+         {
+            jjmatchedKind = 9;
+            jjmatchedPos = 2;
+            return 18;
+         }
+         return -1;
+      case 3:
+         if ((active0 & 0x100L) != 0L)
+         {
+            jjmatchedKind = 9;
+            jjmatchedPos = 3;
+            return 18;
+         }
+         return -1;
+      case 4:
+         if ((active0 & 0x100L) != 0L)
+         {
+            if (jjmatchedPos < 3)
+            {
+               jjmatchedKind = 9;
+               jjmatchedPos = 3;
+            }
+            return -1;
+         }
+         return -1;
+      case 5:
+         if ((active0 & 0x100L) != 0L)
+         {
+            if (jjmatchedPos < 3)
+            {
+               jjmatchedKind = 9;
+               jjmatchedPos = 3;
+            }
+            return -1;
+         }
+         return -1;
+      default :
+         return -1;
+   }
+}
+private final int jjStartNfa_0(int pos, long active0)
+{
+   return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0), pos + 1);
+}
+private int jjStopAtPos(int pos, int kind)
+{
+   jjmatchedKind = kind;
+   jjmatchedPos = pos;
+   return pos + 1;
+}
+private int jjMoveStringLiteralDfa0_0()
+{
+   switch(curChar)
+   {
+      case 40:
+         return jjMoveStringLiteralDfa1_0(0x100L);
+      case 44:
+         return jjStopAtPos(0, 5);
+      case 61:
+         return jjStopAtPos(0, 6);
+      case 92:
+         return jjStartNfaWithStates_0(0, 7, 6);
+      default :
+         return jjMoveNfa_0(0, 0);
+   }
+}
+private int jjMoveStringLiteralDfa1_0(long active0)
+{
+   try { curChar = input_stream.readChar(); }
+   catch(java.io.IOException e) {
+      jjStopStringLiteralDfa_0(0, active0);
+      return 1;
+   }
+   switch(curChar)
+   {
+      case 84:
+      case 116:
+         return jjMoveStringLiteralDfa2_0(active0, 0x100L);
+      default :
+         break;
+   }
+   return jjStartNfa_0(0, active0);
+}
+private int jjMoveStringLiteralDfa2_0(long old0, long active0)
+{
+   if (((active0 &= old0)) == 0L)
+      return jjStartNfa_0(0, old0);
+   try { curChar = input_stream.readChar(); }
+   catch(java.io.IOException e) {
+      jjStopStringLiteralDfa_0(1, active0);
+      return 2;
+   }
+   switch(curChar)
+   {
+      case 65:
+      case 97:
+         return jjMoveStringLiteralDfa3_0(active0, 0x100L);
+      default :
+         break;
+   }
+   return jjStartNfa_0(1, active0);
+}
+private int jjMoveStringLiteralDfa3_0(long old0, long active0)
+{
+   if (((active0 &= old0)) == 0L)
+      return jjStartNfa_0(1, old0);
+   try { curChar = input_stream.readChar(); }
+   catch(java.io.IOException e) {
+      jjStopStringLiteralDfa_0(2, active0);
+      return 3;
+   }
+   switch(curChar)
+   {
+      case 71:
+      case 103:
+         return jjMoveStringLiteralDfa4_0(active0, 0x100L);
+      default :
+         break;
+   }
+   return jjStartNfa_0(2, active0);
+}
+private int jjMoveStringLiteralDfa4_0(long old0, long active0)
+{
+   if (((active0 &= old0)) == 0L)
+      return jjStartNfa_0(2, old0);
+   try { curChar = input_stream.readChar(); }
+   catch(java.io.IOException e) {
+      jjStopStringLiteralDfa_0(3, active0);
+      return 4;
+   }
+   switch(curChar)
+   {
+      case 61:
+         return jjMoveStringLiteralDfa5_0(active0, 0x100L);
+      default :
+         break;
+   }
+   return jjStartNfa_0(3, active0);
+}
+private int jjMoveStringLiteralDfa5_0(long old0, long active0)
+{
+   if (((active0 &= old0)) == 0L)
+      return jjStartNfa_0(3, old0);
+   try { curChar = input_stream.readChar(); }
+   catch(java.io.IOException e) {
+      jjStopStringLiteralDfa_0(4, active0);
+      return 5;
+   }
+   switch(curChar)
+   {
+      case 48:
+         return jjMoveStringLiteralDfa6_0(active0, 0x100L);
+      default :
+         break;
+   }
+   return jjStartNfa_0(4, active0);
+}
+private int jjMoveStringLiteralDfa6_0(long old0, long active0)
+{
+   if (((active0 &= old0)) == 0L)
+      return jjStartNfa_0(4, old0);
+   try { curChar = input_stream.readChar(); }
+   catch(java.io.IOException e) {
+      jjStopStringLiteralDfa_0(5, active0);
+      return 6;
+   }
+   switch(curChar)
+   {
+      case 41:
+         if ((active0 & 0x100L) != 0L)
+            return jjStopAtPos(6, 8);
+         break;
+      default :
+         break;
+   }
+   return jjStartNfa_0(5, active0);
+}
+private int jjStartNfaWithStates_0(int pos, int kind, int state)
+{
+   jjmatchedKind = kind;
+   jjmatchedPos = pos;
+   try { curChar = input_stream.readChar(); }
+   catch(java.io.IOException e) { return pos + 1; }
+   return jjMoveNfa_0(state, pos + 1);
+}
+static final long[] jjbitVec0 = {
+   0xfffffffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL
+};
+static final long[] jjbitVec2 = {
+   0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL
+};
+private int jjMoveNfa_0(int startState, int curPos)
+{
+   int startsAt = 0;
+   jjnewStateCnt = 18;
+   int i = 1;
+   jjstateSet[0] = startState;
+   int kind = 0x7fffffff;
+   for (;;)
+   {
+      if (++jjround == 0x7fffffff)
+         ReInitRounds();
+      if (curChar < 64)
+      {
+         long l = 1L << curChar;
+         do
+         {
+            switch(jjstateSet[--i])
+            {
+               case 18:
+               case 4:
+                  if ((0xdfffeffbffffc9ffL & l) == 0L)
+                     break;
+                  if (kind > 9)
+                     kind = 9;
+                  jjCheckNAddTwoStates(4, 5);
+                  break;
+               case 0:
+                  if ((0xdfffeffbffffc9ffL & l) != 0L)
+                  {
+                     if (kind > 9)
+                        kind = 9;
+                     jjCheckNAddTwoStates(4, 5);
+                  }
+                  else if ((0x3400L & l) != 0L)
+                  {
+                     if (kind > 4)
+                        kind = 4;
+                  }
+                  if (curChar == 47)
+                     jjAddStates(0, 1);
+                  else if (curChar == 35)
+                     jjCheckNAddTwoStates(1, 2);
+                  break;
+               case 6:
+                  if ((0xdfffeffbffffc9ffL & l) != 0L)
+                  {
+                     if (kind > 9)
+                        kind = 9;
+                     jjCheckNAddTwoStates(4, 5);
+                  }
+                  if ((0x900400000000L & l) != 0L)
+                  {
+                     if (kind > 9)
+                        kind = 9;
+                     jjCheckNAddTwoStates(4, 5);
+                  }
+                  break;
+               case 1:
+                  if ((0xffffffffffffcbffL & l) != 0L)
+                     jjCheckNAddTwoStates(1, 2);
+                  break;
+               case 2:
+                  if ((0x3400L & l) != 0L && kind > 3)
+                     kind = 3;
+                  break;
+               case 3:
+                  if ((0x3400L & l) != 0L && kind > 4)
+                     kind = 4;
+                  break;
+               case 8:
+                  jjAddStates(2, 3);
+                  break;
+               case 10:
+                  if (curChar == 47)
+                     jjAddStates(0, 1);
+                  break;
+               case 11:
+                  if (curChar == 47)
+                     jjCheckNAddTwoStates(12, 13);
+                  break;
+               case 12:
+                  if ((0xffffffffffffcbffL & l) != 0L)
+                     jjCheckNAddTwoStates(12, 13);
+                  break;
+               case 13:
+                  if ((0x3400L & l) != 0L && kind > 1)
+                     kind = 1;
+                  break;
+               case 14:
+                  if (curChar == 42)
+                     jjCheckNAddTwoStates(15, 17);
+                  break;
+               case 15:
+                  jjCheckNAddTwoStates(15, 17);
+                  break;
+               case 16:
+                  if (curChar == 47 && kind > 2)
+                     kind = 2;
+                  break;
+               case 17:
+                  if (curChar == 42)
+                     jjstateSet[jjnewStateCnt++] = 16;
+                  break;
+               default : break;
+            }
+         } while(i != startsAt);
+      }
+      else if (curChar < 128)
+      {
+         long l = 1L << (curChar & 077);
+         do
+         {
+            switch(jjstateSet[--i])
+            {
+               case 18:
+                  if (kind > 9)
+                     kind = 9;
+                  jjCheckNAddTwoStates(4, 5);
+                  if (curChar == 92)
+                     jjstateSet[jjnewStateCnt++] = 6;
+                  break;
+               case 0:
+                  if (kind > 9)
+                     kind = 9;
+                  jjCheckNAddTwoStates(4, 5);
+                  if (curChar == 123)
+                     jjCheckNAdd(8);
+                  else if (curChar == 92)
+                     jjstateSet[jjnewStateCnt++] = 6;
+                  break;
+               case 6:
+                  if (kind > 9)
+                     kind = 9;
+                  jjCheckNAddTwoStates(4, 5);
+                  if ((0x14404410144044L & l) != 0L)
+                  {
+                     if (kind > 9)
+                        kind = 9;
+                     jjCheckNAddTwoStates(4, 5);
+                  }
+                  if (curChar == 92)
+                     jjstateSet[jjnewStateCnt++] = 6;
+                  break;
+               case 1:
+                  jjAddStates(4, 5);
+                  break;
+               case 4:
+                  if (kind > 9)
+                     kind = 9;
+                  jjCheckNAddTwoStates(4, 5);
+                  break;
+               case 5:
+                  if (curChar == 92)
+                     jjstateSet[jjnewStateCnt++] = 6;
+                  break;
+               case 7:
+                  if (curChar == 123)
+                     jjCheckNAdd(8);
+                  break;
+               case 8:
+                  if ((0xd7ffffffffffffffL & l) != 0L)
+                     jjCheckNAddTwoStates(8, 9);
+                  break;
+               case 9:
+                  if (curChar == 125 && kind > 10)
+                     kind = 10;
+                  break;
+               case 12:
+                  jjAddStates(6, 7);
+                  break;
+               case 15:
+                  jjAddStates(8, 9);
+                  break;
+               default : break;
+            }
+         } while(i != startsAt);
+      }
+      else
+      {
+         int hiByte = (int)(curChar >> 8);
+         int i1 = hiByte >> 6;
+         long l1 = 1L << (hiByte & 077);
+         int i2 = (curChar & 0xff) >> 6;
+         long l2 = 1L << (curChar & 077);
+         do
+         {
+            switch(jjstateSet[--i])
+            {
+               case 18:
+               case 4:
+                  if (!jjCanMove_0(hiByte, i1, i2, l1, l2))
+                     break;
+                  if (kind > 9)
+                     kind = 9;
+                  jjCheckNAddTwoStates(4, 5);
+                  break;
+               case 0:
+                  if (!jjCanMove_0(hiByte, i1, i2, l1, l2))
+                     break;
+                  if (kind > 9)
+                     kind = 9;
+                  jjCheckNAddTwoStates(4, 5);
+                  break;
+               case 6:
+                  if (!jjCanMove_0(hiByte, i1, i2, l1, l2))
+                     break;
+                  if (kind > 9)
+                     kind = 9;
+                  jjCheckNAddTwoStates(4, 5);
+                  break;
+               case 1:
+                  if (jjCanMove_0(hiByte, i1, i2, l1, l2))
+                     jjAddStates(4, 5);
+                  break;
+               case 8:
+                  if (jjCanMove_0(hiByte, i1, i2, l1, l2))
+                     jjAddStates(2, 3);
+                  break;
+               case 12:
+                  if (jjCanMove_0(hiByte, i1, i2, l1, l2))
+                     jjAddStates(6, 7);
+                  break;
+               case 15:
+                  if (jjCanMove_0(hiByte, i1, i2, l1, l2))
+                     jjAddStates(8, 9);
+                  break;
+               default : break;
+            }
+         } while(i != startsAt);
+      }
+      if (kind != 0x7fffffff)
+      {
+         jjmatchedKind = kind;
+         jjmatchedPos = curPos;
+         kind = 0x7fffffff;
+      }
+      ++curPos;
+      if ((i = jjnewStateCnt) == (startsAt = 18 - (jjnewStateCnt = startsAt)))
+         return curPos;
+      try { curChar = input_stream.readChar(); }
+      catch(java.io.IOException e) { return curPos; }
+   }
+}
+static final int[] jjnextStates = {
+   11, 14, 8, 9, 1, 2, 12, 13, 15, 17, 
+};
+private static final boolean jjCanMove_0(int hiByte, int i1, int i2, long l1, long l2)
+{
+   switch(hiByte)
+   {
+      case 0:
+         return ((jjbitVec2[i2] & l2) != 0L);
+      default :
+         if ((jjbitVec0[i1] & l1) != 0L)
+            return true;
+         return false;
+   }
+}
+
+/** Token literal values. */
+public static final String[] jjstrLiteralImages = {
+"", null, null, null, null, "\54", "\75", "\134", null, null, null, };
+
+/** Lexer state names. */
+public static final String[] lexStateNames = {
+   "DEFAULT",
+};
+static final long[] jjtoToken = {
+   0x7e1L, 
+};
+static final long[] jjtoSkip = {
+   0x1eL, 
+};
+protected JavaCharStream input_stream;
+private final int[] jjrounds = new int[18];
+private final int[] jjstateSet = new int[36];
+protected char curChar;
+/** Constructor. */
+public ISEParserTokenManager(JavaCharStream stream){
+   if (JavaCharStream.staticFlag)
+      throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer.");
+   input_stream = stream;
+}
+
+/** Constructor. */
+public ISEParserTokenManager(JavaCharStream stream, int lexState){
+   this(stream);
+   SwitchTo(lexState);
+}
+
+/** Reinitialise parser. */
+public void ReInit(JavaCharStream stream)
+{
+   jjmatchedPos = jjnewStateCnt = 0;
+   curLexState = defaultLexState;
+   input_stream = stream;
+   ReInitRounds();
+}
+private void ReInitRounds()
+{
+   int i;
+   jjround = 0x80000001;
+   for (i = 18; i-- > 0;)
+      jjrounds[i] = 0x80000000;
+}
+
+/** Reinitialise parser. */
+public void ReInit(JavaCharStream stream, int lexState)
+{
+   ReInit(stream);
+   SwitchTo(lexState);
+}
+
+/** Switch to specified lex state. */
+public void SwitchTo(int lexState)
+{
+   if (lexState >= 1 || lexState < 0)
+      throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
+   else
+      curLexState = lexState;
+}
+
+protected Token jjFillToken()
+{
+   final Token t;
+   final String curTokenImage;
+   final int beginLine;
+   final int endLine;
+   final int beginColumn;
+   final int endColumn;
+   String im = jjstrLiteralImages[jjmatchedKind];
+   curTokenImage = (im == null) ? input_stream.GetImage() : im;
+   beginLine = input_stream.getBeginLine();
+   beginColumn = input_stream.getBeginColumn();
+   endLine = input_stream.getEndLine();
+   endColumn = input_stream.getEndColumn();
+   t = Token.newToken(jjmatchedKind, curTokenImage);
+
+   t.beginLine = beginLine;
+   t.endLine = endLine;
+   t.beginColumn = beginColumn;
+   t.endColumn = endColumn;
+
+   return t;
+}
+
+int curLexState = 0;
+int defaultLexState = 0;
+int jjnewStateCnt;
+int jjround;
+int jjmatchedPos;
+int jjmatchedKind;
+
+/** Get the next Token. */
+public Token getNextToken() 
+{
+  Token matchedToken;
+  int curPos = 0;
+
+  EOFLoop :
+  for (;;)
+  {
+   try
+   {
+      curChar = input_stream.BeginToken();
+   }
+   catch(java.io.IOException e)
+   {
+      jjmatchedKind = 0;
+      matchedToken = jjFillToken();
+      return matchedToken;
+   }
+
+   jjmatchedKind = 0x7fffffff;
+   jjmatchedPos = 0;
+   curPos = jjMoveStringLiteralDfa0_0();
+   if (jjmatchedKind != 0x7fffffff)
+   {
+      if (jjmatchedPos + 1 < curPos)
+         input_stream.backup(curPos - jjmatchedPos - 1);
+      if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
+      {
+         matchedToken = jjFillToken();
+         return matchedToken;
+      }
+      else
+      {
+         continue EOFLoop;
+      }
+   }
+   int error_line = input_stream.getEndLine();
+   int error_column = input_stream.getEndColumn();
+   String error_after = null;
+   boolean EOFSeen = false;
+   try { input_stream.readChar(); input_stream.backup(1); }
+   catch (java.io.IOException e1) {
+      EOFSeen = true;
+      error_after = curPos <= 1 ? "" : input_stream.GetImage();
+      if (curChar == '\n' || curChar == '\r') {
+         error_line++;
+         error_column = 0;
+      }
+      else
+         error_column++;
+   }
+   if (!EOFSeen) {
+      input_stream.backup(1);
+      error_after = curPos <= 1 ? "" : input_stream.GetImage();
+   }
+   throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
+  }
+}
+
+private void jjCheckNAdd(int state)
+{
+   if (jjrounds[state] != jjround)
+   {
+      jjstateSet[jjnewStateCnt++] = state;
+      jjrounds[state] = jjround;
+   }
+}
+private void jjAddStates(int start, int end)
+{
+   do {
+      jjstateSet[jjnewStateCnt++] = jjnextStates[start];
+   } while (start++ != end);
+}
+private void jjCheckNAddTwoStates(int state1, int state2)
+{
+   jjCheckNAdd(state1);
+   jjCheckNAdd(state2);
+}
+
+}



[06/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/bolt/SplitBolt.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/bolt/SplitBolt.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/bolt/SplitBolt.java
deleted file mode 100644
index 89e13a4..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/bolt/SplitBolt.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.bolt;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-
-import java.util.Map;
-import java.util.Set;
-
-public abstract class SplitBolt<T> extends
-        ConfiguredBolt {
-
-  protected OutputCollector collector;
-
-  public SplitBolt(String zookeeperUrl) {
-    super(zookeeperUrl);
-  }
-
-  @Override
-  public final void prepare(Map map, TopologyContext topologyContext,
-                       OutputCollector outputCollector) {
-    super.prepare(map, topologyContext, outputCollector);
-    collector = outputCollector;
-    prepare(map, topologyContext);
-  }
-
-  @Override
-  public final void execute(Tuple tuple) {
-    emit(tuple, generateMessage(tuple));
-  }
-
-  @Override
-  public final void declareOutputFields(OutputFieldsDeclarer declarer) {
-    declarer.declareStream("message", new Fields("key", "message"));
-    for (String streamId : getStreamIds()) {
-      declarer.declareStream(streamId, new Fields("key", "message"));
-    }
-    declarer.declareStream("error", new Fields("message"));
-    declareOther(declarer);
-  }
-
-  public void emit(Tuple tuple, T message) {
-    if (message == null) return;
-    String key = getKey(tuple, message);
-    collector.emit("message", tuple, new Values(key, message));
-    Map<String, T> streamMessageMap = splitMessage(message);
-    for (String streamId : streamMessageMap.keySet()) {
-      T streamMessage = streamMessageMap.get(streamId);
-      if (streamMessage == null) {
-        streamMessage = getDefaultMessage(streamId);
-      }
-      collector.emit(streamId, new Values(key, streamMessage));
-    }
-    collector.ack(tuple);
-    emitOther(tuple, message);
-  }
-
-  protected T getDefaultMessage(String streamId) {
-    throw new IllegalArgumentException("Could not find a message for" +
-            " stream: " + streamId);
-  }
-
-  public abstract void prepare(Map map, TopologyContext topologyContext);
-
-  public abstract Set<String> getStreamIds();
-
-  public abstract String getKey(Tuple tuple, T message);
-
-  public abstract T generateMessage(Tuple tuple);
-
-  public abstract Map<String, T> splitMessage(T message);
-
-  public abstract void declareOther(OutputFieldsDeclarer declarer);
-
-  public abstract void emitOther(Tuple tuple, T message);
-
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/configuration/ConfigurationManager.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/configuration/ConfigurationManager.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/configuration/ConfigurationManager.java
deleted file mode 100644
index 0989150..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/configuration/ConfigurationManager.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.configuration;
-
-
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.commons.configuration.CombinedConfiguration;
-import org.apache.commons.configuration.Configuration;
-import org.apache.commons.configuration.ConfigurationException;
-import org.apache.commons.configuration.DefaultConfigurationBuilder;
-import org.apache.commons.lang.StringUtils;
-import org.apache.log4j.Logger;
-
-/**
- * Configuration manager class which loads all 'config-definition.xml' files and
- * creates a Configuration object which holds all properties from the underlying
- * configuration resource
- */
-public class ConfigurationManager {
-
-  /** configuration definition file name. */
-  private static String DEFAULT_CONFIG_DEFINITION_FILE_NAME = "config-definition.xml";
-
-  /** Stores a map with the configuration for each path specified. */
-  private static Map<String, Configuration> configurationsCache = new HashMap<String, Configuration>();
-
-  /** The Constant LOGGER. */
-  private static final Logger LOGGER = Logger
-      .getLogger(ConfigurationManager.class);
-
-  /**
-   * Common method to load content of all configuration resources defined in
-   * 'config-definition.xml'.
-   * 
-   * @param configDefFilePath
-   *          the config def file path
-   * @return Configuration
-   */
-  public static Configuration getConfiguration(String configDefFilePath) {
-    if (configurationsCache.containsKey(configDefFilePath)) {
-      return configurationsCache.get(configDefFilePath);
-    }
-    CombinedConfiguration configuration = null;
-    synchronized (configurationsCache) {
-      if (configurationsCache.containsKey(configDefFilePath)) {
-        return configurationsCache.get(configDefFilePath);
-      }
-      DefaultConfigurationBuilder builder = new DefaultConfigurationBuilder();
-      String fielPath = getConfigDefFilePath(configDefFilePath);
-      LOGGER.info("loading from 'configDefFilePath' :" + fielPath);
-      builder.setFile(new File(fielPath));
-      try {
-        configuration = builder.getConfiguration(true);
-        configurationsCache.put(fielPath, configuration);
-      } catch (ConfigurationException e) {
-        LOGGER.info("Exception in loading property files.", e);
-      }
-    }
-    return configuration;
-  }
-
-  /**
-   * Removes the configuration created from a config definition file located at
-   * 'configDefFilePath'.
-   * 
-   * @param configDefFilePath
-   *          path to the config definition file
-   */
-  public static void clearConfiguration(String configDefFilePath) {
-    configurationsCache.remove(configDefFilePath);
-  }
-
-  /**
-   * Gets the configuration.
-   * 
-   * @return the configuration
-   */
-  public static Configuration getConfiguration() {
-    return getConfiguration(null);
-  }
-
-  /**
-   * Returns the 'config-definition.xml' file path. 1. If the param
-   * 'configDefFilePath' has a valid value, returns configDefFilePath 2. If the
-   * system property key 'configDefFilePath' has a valid value, returns the
-   * value 3. By default, it returns the file name 'config-definition.xml'
-   * 
-   * @param configDefFilePath
-   *          given input path to the config definition file
-   * @return the config def file path
-   */
-  private static String getConfigDefFilePath(String configDefFilePath) {
-    if (StringUtils.isNotEmpty(configDefFilePath)) {
-      return configDefFilePath;
-    }
-    return DEFAULT_CONFIG_DEFINITION_FILE_NAME;
-  }
-
-  /**
-   * The main method.
-   * 
-   * @param args
-   *          the args
-   * @throws InterruptedException
-   *           the interrupted exception
-   */
-  public static void main(String[] args) throws InterruptedException {
-    Configuration config = ConfigurationManager
-        .getConfiguration("/Users/Sayi/Documents/config/config-definition-dpi.xml");
-    System.out.println("elastic.search.cluster ="
-        + config.getString("elastic.search.cluster"));
-    Thread.sleep(10000);
-    System.out.println("storm.topology.dpi.bolt.es-index.index.name ="
-        + config.getString("storm.topology.dpi.bolt.es-index.index.name"));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/dataloads/interfaces/ThreatIntelSource.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/dataloads/interfaces/ThreatIntelSource.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/dataloads/interfaces/ThreatIntelSource.java
deleted file mode 100644
index 4e87a1c..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/dataloads/interfaces/ThreatIntelSource.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.dataloads.interfaces;
-
-import java.util.Iterator;
-import org.apache.commons.configuration.Configuration;
-import org.json.simple.JSONObject;
-
-public interface ThreatIntelSource extends Iterator<JSONObject> {
-
-	void initializeSource(Configuration config);
-	void cleanupSource();
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/domain/Configuration.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/domain/Configuration.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/domain/Configuration.java
deleted file mode 100644
index d21c686..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/domain/Configuration.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.domain;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.metron.utils.ConfigurationsUtils;
-
-import java.nio.file.Path;
-import java.util.Map;
-
-public class Configuration extends Configurations {
-
-    protected CuratorFramework curatorFramework = null;
-    private Path configFileRoot;
-
-    public Configuration(CuratorFramework curatorFramework){
-
-        this.curatorFramework = curatorFramework;
-
-    }
-
-
-    public Configuration(Path configFileRoot){
-
-        this.configFileRoot = configFileRoot;
-    }
-
-    public void update() throws Exception {
-
-        if( null != curatorFramework ) {
-
-            ConfigurationsUtils.updateConfigsFromZookeeper(this, this.curatorFramework);
-
-        } else {
-
-            updateGlobalConfig(ConfigurationsUtils.readGlobalConfigFromFile(configFileRoot.toAbsolutePath().toString()));
-            Map<String, byte[]> sensorEnrichmentConfigs = ConfigurationsUtils.readSensorEnrichmentConfigsFromFile(configFileRoot.toAbsolutePath().toString());
-            for(String sensorType: sensorEnrichmentConfigs.keySet()) {
-                updateSensorEnrichmentConfig(sensorType, sensorEnrichmentConfigs.get(sensorType));
-            }
-
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/domain/Configurations.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/domain/Configurations.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/domain/Configurations.java
deleted file mode 100644
index 63e0f95..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/domain/Configurations.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.domain;
-
-import com.fasterxml.jackson.core.type.TypeReference;
-import org.apache.log4j.Logger;
-import org.apache.metron.utils.JSONUtils;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.Serializable;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-
-public class Configurations implements Serializable {
-
-  private static final Logger LOG = Logger.getLogger(Configurations.class);
-
-  public enum Type {
-    GLOBAL, SENSOR, OTHER
-  }
-
-  public static final String GLOBAL_CONFIG_NAME = "global";
-
-  private ConcurrentMap<String, Object> configurations = new ConcurrentHashMap<>();
-
-  @SuppressWarnings("unchecked")
-  public Map<String, Object> getGlobalConfig() {
-    return (Map<String, Object>) configurations.get(GLOBAL_CONFIG_NAME);
-  }
-
-  public void updateGlobalConfig(byte[] data) throws IOException {
-    updateGlobalConfig(new ByteArrayInputStream(data));
-  }
-
-  public void updateGlobalConfig(InputStream io) throws IOException {
-    Map<String, Object> globalConfig = JSONUtils.INSTANCE.load(io, new TypeReference<Map<String, Object>>() {
-    });
-    updateGlobalConfig(globalConfig);
-  }
-
-  public void updateGlobalConfig(Map<String, Object> globalConfig) {
-    configurations.put(GLOBAL_CONFIG_NAME, globalConfig);
-  }
-
-  public SensorEnrichmentConfig getSensorEnrichmentConfig(String sensorType) {
-    return (SensorEnrichmentConfig) configurations.get(sensorType);
-  }
-
-  public void updateSensorEnrichmentConfig(String sensorType, byte[] data) throws IOException {
-    updateSensorEnrichmentConfig(sensorType, new ByteArrayInputStream(data));
-  }
-
-  public void updateSensorEnrichmentConfig(String sensorType, InputStream io) throws IOException {
-    SensorEnrichmentConfig sensorEnrichmentConfig = JSONUtils.INSTANCE.load(io, SensorEnrichmentConfig.class);
-    updateSensorEnrichmentConfig(sensorType, sensorEnrichmentConfig);
-  }
-
-  public void updateSensorEnrichmentConfig(String sensorType, SensorEnrichmentConfig sensorEnrichmentConfig) {
-    configurations.put(sensorType, sensorEnrichmentConfig);
-  }
-
-  @SuppressWarnings("unchecked")
-  public Map<String, Object> getConfig(String name) {
-    return (Map<String, Object>) configurations.get(name);
-  }
-
-  public void updateConfig(String name, byte[] data) throws IOException {
-    if (data == null) throw new IllegalStateException("config data cannot be null");
-    Map<String, Object> config = JSONUtils.INSTANCE.load(new ByteArrayInputStream(data), new TypeReference<Map<String, Object>>() {});
-    updateConfig(name, config);
-  }
-
-  public void updateConfig(String name, Map<String, Object> config) {
-    configurations.put(name, config);
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    if (this == o) return true;
-    if (o == null || getClass() != o.getClass()) return false;
-    Configurations that = (Configurations) o;
-    return configurations.equals(that.configurations);
-  }
-
-  @Override
-  public int hashCode() {
-    return configurations.hashCode();
-  }
-
-  @Override
-  public String toString() {
-    return configurations.toString();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/domain/Enrichment.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/domain/Enrichment.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/domain/Enrichment.java
deleted file mode 100644
index 6f43739..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/domain/Enrichment.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.domain;
-
-import org.apache.metron.enrichment.interfaces.EnrichmentAdapter;
-
-import java.io.Serializable;
-import java.util.List;
-
-public class Enrichment<T extends EnrichmentAdapter> implements Serializable {
-
-  private String type;
-  private List<String> fields;
-  private T adapter;
-
-  public Enrichment() {}
-
-  public Enrichment(String type, T adapter) {
-    this.type = type;
-    this.adapter = adapter;
-  }
-
-
-  public List<String> getFields() {
-    return fields;
-  }
-
-  public void setFields(List<String> fields) {
-    this.fields = fields;
-  }
-
-  public String getType() {
-    return type;
-  }
-
-  public void setType(String type) {
-    this.type = type;
-  }
-
-  public T getAdapter() {
-    return adapter;
-  }
-
-  public void setAdapter(T adapter) {
-    this.adapter = adapter;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/domain/SensorEnrichmentConfig.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/domain/SensorEnrichmentConfig.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/domain/SensorEnrichmentConfig.java
deleted file mode 100644
index ea345ca..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/domain/SensorEnrichmentConfig.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.domain;
-
-import com.fasterxml.jackson.core.JsonProcessingException;
-import org.apache.metron.utils.JSONUtils;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class SensorEnrichmentConfig {
-
-  private String index;
-  private Map<String, List<String>> enrichmentFieldMap;
-  private Map<String, List<String>> threatIntelFieldMap;
-  private Map<String, List<String>> fieldToEnrichmentTypeMap = new HashMap<>();
-  private Map<String, List<String>> fieldToThreatIntelTypeMap = new HashMap<>();
-  private int batchSize;
-
-  public String getIndex() {
-    return index;
-  }
-
-  public void setIndex(String index) {
-    this.index = index;
-  }
-
-  public Map<String, List<String>> getEnrichmentFieldMap() {
-    return enrichmentFieldMap;
-  }
-
-  public void setEnrichmentFieldMap(Map<String, List<String>> enrichmentFieldMap) {
-    this.enrichmentFieldMap = enrichmentFieldMap;
-  }
-
-  public Map<String, List<String>> getThreatIntelFieldMap() {
-    return threatIntelFieldMap;
-  }
-
-  public void setThreatIntelFieldMap(Map<String, List<String>> threatIntelFieldMap) {
-    this.threatIntelFieldMap = threatIntelFieldMap;
-  }
-
-  public Map<String, List<String>> getFieldToEnrichmentTypeMap() {
-    return fieldToEnrichmentTypeMap;
-  }
-
-  public Map<String, List<String>> getFieldToThreatIntelTypeMap() {
-    return fieldToThreatIntelTypeMap;
-  }
-  public void setFieldToEnrichmentTypeMap(Map<String, List<String>> fieldToEnrichmentTypeMap) {
-    this.fieldToEnrichmentTypeMap = fieldToEnrichmentTypeMap;
-  }
-
-  public void setFieldToThreatIntelTypeMap(Map<String, List<String>> fieldToThreatIntelTypeMap) {
-    this.fieldToThreatIntelTypeMap= fieldToThreatIntelTypeMap;
-  }
-  public int getBatchSize() {
-    return batchSize;
-  }
-
-  public void setBatchSize(int batchSize) {
-    this.batchSize = batchSize;
-  }
-
-  public static SensorEnrichmentConfig fromBytes(byte[] config) throws IOException {
-    return JSONUtils.INSTANCE.load(new String(config), SensorEnrichmentConfig.class);
-  }
-
-  public String toJSON() throws JsonProcessingException {
-    return JSONUtils.INSTANCE.toJSON(this, true);
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    if (this == o) return true;
-    if (o == null || getClass() != o.getClass()) return false;
-
-    SensorEnrichmentConfig that = (SensorEnrichmentConfig) o;
-
-    if (getBatchSize() != that.getBatchSize()) return false;
-    if (getIndex() != null ? !getIndex().equals(that.getIndex()) : that.getIndex() != null) return false;
-    if (getEnrichmentFieldMap() != null ? !getEnrichmentFieldMap().equals(that.getEnrichmentFieldMap()) : that.getEnrichmentFieldMap() != null)
-      return false;
-    if (getThreatIntelFieldMap() != null ? !getThreatIntelFieldMap().equals(that.getThreatIntelFieldMap()) : that.getThreatIntelFieldMap() != null)
-      return false;
-    if (getFieldToEnrichmentTypeMap() != null ? !getFieldToEnrichmentTypeMap().equals(that.getFieldToEnrichmentTypeMap()) : that.getFieldToEnrichmentTypeMap() != null)
-      return false;
-    return getFieldToThreatIntelTypeMap() != null ? getFieldToThreatIntelTypeMap().equals(that.getFieldToThreatIntelTypeMap()) : that.getFieldToThreatIntelTypeMap() == null;
-
-  }
-
-  @Override
-  public String toString() {
-    return "{index=" + index + ", batchSize=" + batchSize +
-            ", enrichmentFieldMap=" + enrichmentFieldMap +
-            ", threatIntelFieldMap" + threatIntelFieldMap +
-            ", fieldToEnrichmentTypeMap=" + fieldToEnrichmentTypeMap +
-            ", fieldToThreatIntelTypeMap=" + fieldToThreatIntelTypeMap + "}";
-  }
-
-  @Override
-  public int hashCode() {
-    int result = getIndex() != null ? getIndex().hashCode() : 0;
-    result = 31 * result + (getEnrichmentFieldMap() != null ? getEnrichmentFieldMap().hashCode() : 0);
-    result = 31 * result + (getThreatIntelFieldMap() != null ? getThreatIntelFieldMap().hashCode() : 0);
-    result = 31 * result + (getFieldToEnrichmentTypeMap() != null ? getFieldToEnrichmentTypeMap().hashCode() : 0);
-    result = 31 * result + (getFieldToThreatIntelTypeMap() != null ? getFieldToThreatIntelTypeMap().hashCode() : 0);
-    result = 31 * result + getBatchSize();
-    return result;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/enrichment/EnrichmentConfig.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/enrichment/EnrichmentConfig.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/enrichment/EnrichmentConfig.java
deleted file mode 100644
index 92913d9..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/enrichment/EnrichmentConfig.java
+++ /dev/null
@@ -1,203 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.enrichment;
-
-import com.google.common.base.Joiner;
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.metron.Constants;
-import org.apache.metron.domain.SensorEnrichmentConfig;
-import org.apache.metron.utils.ConfigurationsUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.util.*;
-
-public class EnrichmentConfig {
-  public static enum Type {
-     THREAT_INTEL
-    ,ENRICHMENT
-  }
-
-  protected static final Logger _LOG = LoggerFactory.getLogger(EnrichmentConfig.class);
-  public static class FieldList {
-    Type type;
-    Map<String, List<String>> fieldToEnrichmentTypes;
-
-    public Type getType() {
-      return type;
-    }
-
-    public void setType(Type type) {
-      this.type = type;
-    }
-
-    public Map<String, List<String>> getFieldToEnrichmentTypes() {
-      return fieldToEnrichmentTypes;
-    }
-
-    public void setFieldToEnrichmentTypes(Map<String, List<String>> fieldToEnrichmentTypes) {
-      this.fieldToEnrichmentTypes = fieldToEnrichmentTypes;
-    }
-  }
-  public String zkQuorum;
-  public Map<String, FieldList> sensorToFieldList;
-
-  public String getZkQuorum() {
-    return zkQuorum;
-  }
-
-  public void setZkQuorum(String zkQuorum) {
-    this.zkQuorum = zkQuorum;
-  }
-
-  public Map<String, FieldList> getSensorToFieldList() {
-    return sensorToFieldList;
-  }
-
-  public void setSensorToFieldList(Map<String, FieldList> sensorToFieldList) {
-    this.sensorToFieldList = sensorToFieldList;
-  }
-
-  public void updateSensorConfigs( ) throws Exception {
-    CuratorFramework client = ConfigurationsUtils.getClient(getZkQuorum());
-    try {
-      client.start();
-      updateSensorConfigs(new ZKSourceConfigHandler(client), sensorToFieldList);
-    }
-    finally {
-      client.close();
-    }
-  }
-
-  public static interface SourceConfigHandler {
-    SensorEnrichmentConfig readConfig(String sensor) throws Exception;
-    void persistConfig(String sensor, SensorEnrichmentConfig config) throws Exception;
-  }
-
-  public static class ZKSourceConfigHandler implements SourceConfigHandler {
-    CuratorFramework client;
-    public ZKSourceConfigHandler(CuratorFramework client) {
-      this.client = client;
-    }
-    @Override
-    public SensorEnrichmentConfig readConfig(String sensor) throws Exception {
-      return SensorEnrichmentConfig.fromBytes(ConfigurationsUtils.readSensorEnrichmentConfigBytesFromZookeeper(sensor, client));
-    }
-
-    @Override
-    public void persistConfig(String sensor, SensorEnrichmentConfig config) throws Exception {
-      ConfigurationsUtils.writeSensorEnrichmentConfigToZookeeper(sensor, config.toJSON().getBytes(), client);
-    }
-  }
-
-  public static void updateSensorConfigs( SourceConfigHandler scHandler
-                                        , Map<String, FieldList> sensorToFieldList
-                                        ) throws Exception
-  {
-    Map<String, SensorEnrichmentConfig> sourceConfigsChanged = new HashMap<>();
-    for (Map.Entry<String, FieldList> kv : sensorToFieldList.entrySet()) {
-      SensorEnrichmentConfig config = sourceConfigsChanged.get(kv.getKey());
-      if(config == null) {
-        config = scHandler.readConfig(kv.getKey());
-        if(_LOG.isDebugEnabled()) {
-          _LOG.debug(config.toJSON());
-        }
-      }
-      Map<String, List<String> > fieldMap = null;
-      Map<String, List<String> > fieldToTypeMap = null;
-      List<String> fieldList = null;
-      if(kv.getValue().type == Type.THREAT_INTEL) {
-        fieldMap = config.getThreatIntelFieldMap();
-        if(fieldMap!= null) {
-          fieldList = fieldMap.get(Constants.SIMPLE_HBASE_THREAT_INTEL);
-        }
-        if(fieldList == null) {
-          fieldList = new ArrayList<>();
-          fieldMap.put(Constants.SIMPLE_HBASE_THREAT_INTEL, fieldList);
-        }
-        fieldToTypeMap = config.getFieldToThreatIntelTypeMap();
-        if(fieldToTypeMap == null) {
-          fieldToTypeMap = new HashMap<>();
-          config.setFieldToThreatIntelTypeMap(fieldToTypeMap);
-        }
-      }
-      else if(kv.getValue().type == Type.ENRICHMENT) {
-        fieldMap = config.getEnrichmentFieldMap();
-        if(fieldMap!= null) {
-          fieldList = fieldMap.get(Constants.SIMPLE_HBASE_ENRICHMENT);
-        }
-        if(fieldList == null) {
-          fieldList = new ArrayList<>();
-          fieldMap.put(Constants.SIMPLE_HBASE_ENRICHMENT, fieldList);
-        }
-        fieldToTypeMap = config.getFieldToEnrichmentTypeMap();
-        if(fieldToTypeMap == null) {
-          fieldToTypeMap = new HashMap<>();
-          config.setFieldToEnrichmentTypeMap(fieldToTypeMap);
-        }
-      }
-      if(fieldToTypeMap == null  || fieldMap == null) {
-        _LOG.debug("fieldToTypeMap is null or fieldMap is null, so skipping");
-        continue;
-      }
-      //Add the additional fields to the field list associated with the hbase adapter
-      {
-        HashSet<String> fieldSet = new HashSet<>(fieldList);
-        List<String> additionalFields = new ArrayList<>();
-        for (String field : kv.getValue().getFieldToEnrichmentTypes().keySet()) {
-          if (!fieldSet.contains(field)) {
-            additionalFields.add(field);
-          }
-        }
-        //adding only the ones that we don't already have to the field list
-        if (additionalFields.size() > 0) {
-          _LOG.debug("Adding additional fields: " + Joiner.on(',').join(additionalFields));
-          fieldList.addAll(additionalFields);
-          sourceConfigsChanged.put(kv.getKey(), config);
-        }
-      }
-      //Add the additional enrichment types to the mapping between the fields
-      {
-        for(Map.Entry<String, List<String>> fieldToType : kv.getValue().getFieldToEnrichmentTypes().entrySet()) {
-          String field = fieldToType.getKey();
-          final HashSet<String> types = new HashSet<>(fieldToType.getValue());
-          int sizeBefore = 0;
-          if(fieldToTypeMap.containsKey(field)) {
-            List<String> typeList = fieldToTypeMap.get(field);
-            sizeBefore = new HashSet<>(typeList).size();
-            types.addAll(typeList);
-          }
-          int sizeAfter = types.size();
-          boolean changed = sizeBefore != sizeAfter;
-          if(changed) {
-            fieldToTypeMap.put(field, new ArrayList<String>() {{
-                addAll(types);
-              }});
-            sourceConfigsChanged.put(kv.getKey(), config);
-          }
-        }
-      }
-    }
-    for(Map.Entry<String, SensorEnrichmentConfig> kv : sourceConfigsChanged.entrySet()) {
-      scHandler.persistConfig(kv.getKey(), kv.getValue());
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/enrichment/EnrichmentConstants.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/enrichment/EnrichmentConstants.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/enrichment/EnrichmentConstants.java
deleted file mode 100644
index 4f7be3b..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/enrichment/EnrichmentConstants.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.enrichment;
-
-public class EnrichmentConstants {
-
-
-
-  public static final String INDEX_NAME = "index.name";
-
-
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/enrichment/interfaces/EnrichmentAdapter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/enrichment/interfaces/EnrichmentAdapter.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/enrichment/interfaces/EnrichmentAdapter.java
deleted file mode 100644
index 28f9956..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/enrichment/interfaces/EnrichmentAdapter.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.enrichment.interfaces;
-
-import org.json.simple.JSONObject;
-
-public interface EnrichmentAdapter<T>
-{
-	void logAccess(T value);
-	JSONObject enrich(T value);
-	boolean initializeAdapter();
-	void cleanup();
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/Connector.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/Connector.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/Connector.java
deleted file mode 100644
index 35da040..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/Connector.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.hbase;
-
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
-
-import java.io.IOException;
-import java.io.InterruptedIOException;
-import java.io.Serializable;
-
-/**
- * Created by cstella on 1/29/16.
- */
-public abstract class Connector {
-  protected TableConfig tableConf;
-  protected String _quorum;
-  protected String _port;
-
-  public Connector(final TableConfig conf, String _quorum, String _port) throws IOException {
-    this.tableConf = conf;
-    this._quorum = _quorum;
-    this._port = _port;
-  }
-  public abstract void put(Put put) throws IOException;
-  public abstract void close();
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/HBaseBolt.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/HBaseBolt.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/HBaseBolt.java
deleted file mode 100644
index 6caa016..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/HBaseBolt.java
+++ /dev/null
@@ -1,181 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.hbase;
-
-
-
-import java.io.IOException;
-import java.util.Map;
-
-import com.google.common.base.Function;
-import com.google.common.base.Joiner;
-import com.google.common.base.Splitter;
-import com.google.common.collect.Iterables;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.log4j.Logger;
-import org.json.simple.JSONObject;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-
-import org.apache.metron.helpers.topology.ErrorUtils;
-
-/**
- * A Storm bolt for putting data into HBase.
- * <p>
- * By default works in batch mode by enabling HBase's client-side write buffer. Enabling batch mode
- * is recommended for high throughput, but it can be disabled in {@link TupleTableConfig}.
- * <p>
- * The HBase configuration is picked up from the first <tt>hbase-site.xml</tt> encountered in the
- * classpath
- * @see TupleTableConfig
- * @see HTableConnector
- */
-@SuppressWarnings("serial")
-public class HBaseBolt implements IRichBolt {
-  private static final Logger LOG = Logger.getLogger(HBaseBolt.class);
-  private static final String DEFAULT_ZK_PORT = "2181";
-
-  protected OutputCollector collector;
-  protected TupleTableConfig conf;
-  protected boolean autoAck = true;
-  protected Connector connector;
-  private String _quorum;
-  private String _port;
-
-  public HBaseBolt(TupleTableConfig conf, String quorum, String port) {
-    this.conf = conf;
-    _quorum = quorum;
-    _port = port;
-  }
-  public HBaseBolt(final TupleTableConfig conf, String zkConnectString) throws IOException {
-    this(conf, zkConnectStringToHosts(zkConnectString), zkConnectStringToPort(zkConnectString));
-  }
-  public static String zkConnectStringToHosts(String connString) {
-    Iterable<String> hostPortPairs = Splitter.on(',').split(connString);
-    return Joiner.on(',').join(Iterables.transform(hostPortPairs, new Function<String, String>() {
-
-      @Override
-      public String apply(String hostPortPair) {
-        return Iterables.getFirst(Splitter.on(':').split(hostPortPair), "");
-      }
-    }));
-  }
-  public static String zkConnectStringToPort(String connString) {
-    String hostPortPair = Iterables.getFirst(Splitter.on(",").split(connString), "");
-    return Iterables.getLast(Splitter.on(":").split(hostPortPair),DEFAULT_ZK_PORT);
-  }
-
-
-  public Connector createConnector() throws IOException{
-    initialize();
-    return new HTableConnector(conf, _quorum, _port);
-  }
-
-  public void initialize() {
-    TupleTableConfig hbaseBoltConfig = conf;
-    String allColumnFamiliesColumnQualifiers = conf.getFields();
-    String[] tokenizedColumnFamiliesWithColumnQualifiers = StringUtils
-            .split(allColumnFamiliesColumnQualifiers, "\\|");
-    for (String tokenizedColumnFamilyWithColumnQualifiers : tokenizedColumnFamiliesWithColumnQualifiers) {
-      String[] cfCqTokens = StringUtils.split( tokenizedColumnFamilyWithColumnQualifiers, ":");
-      String columnFamily = cfCqTokens[0];
-      String[] columnQualifiers = StringUtils.split(cfCqTokens[1], ",");
-      for (String columnQualifier : columnQualifiers) {
-        hbaseBoltConfig.addColumn(columnFamily, columnQualifier);
-      }
-      setAutoAck(true);
-    }
-  }
-
-  /** {@inheritDoc} */
-  @SuppressWarnings("rawtypes")
-  
-  public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
-    this.collector = collector;
-
-    try {
-      if(connector == null) {
-        this.connector = createConnector();
-      }
-		
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-
-    LOG.info("Preparing HBaseBolt for table: " + this.conf.getTableName());
-  }
-
-  /** {@inheritDoc} */
-  
-  public void execute(Tuple input) {
-    try {
-      Put p = conf.getPutFromTuple(input);
-      this.connector.put(p);
-    } catch (IOException ex) {
-
-  		JSONObject error = ErrorUtils.generateErrorMessage(
-  				"Alerts problem: " + input.toString(), ex);
-  		collector.emit("error", new Values(error));
-  		
-      throw new RuntimeException(ex);
-    }
-
-    if (this.autoAck) {
-      this.collector.ack(input);
-    }
-  }
-
-  /** {@inheritDoc} */
-  
-  public void cleanup() {
-    this.connector.close();
-  }
-
-  /** {@inheritDoc} */
-  
-  public void declareOutputFields(OutputFieldsDeclarer declarer) {
-	  declarer.declareStream("error", new Fields("HBase"));
-  }
-
-  /** {@inheritDoc} */
-  
-  public Map<String, Object> getComponentConfiguration() {
-    return null;
-  }
-
-  /**
-   * @return the autoAck
-   */
-  public boolean isAutoAck() {
-    return autoAck;
-  }
-
-  /**
-   * @param autoAck the autoAck to set
-   */
-  public void setAutoAck(boolean autoAck) {
-    this.autoAck = autoAck;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/HBaseStreamPartitioner.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/HBaseStreamPartitioner.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/HBaseStreamPartitioner.java
deleted file mode 100644
index 519f76c..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/HBaseStreamPartitioner.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.hbase;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.client.HTable;
-
-import backtype.storm.generated.GlobalStreamId;
-import backtype.storm.grouping.CustomStreamGrouping;
-import backtype.storm.task.WorkerTopologyContext;
-
-public class HBaseStreamPartitioner implements CustomStreamGrouping {
-
-  private static final long serialVersionUID = -148324019395976092L;
-  private String[] regionStartKeys = { "0" };
-  private Map<String, String> regionStartKeyRegionNameMap = new HashMap<String, String>();
-
-  private List<Integer> targetTasks = null;
-  private int targetTasksSize = 0;
-  private int rowKeyFieldIndex = 0;
-  private String tableName = null;
-  private long regionCheckTime = 0;
-  private int regionInforRefreshIntervalInMins = 60;
-  private int regionInforRefreshIntervalInMillis = regionInforRefreshIntervalInMins * 60000;
-
-  HTable hTable = null;;
-
-  
-  public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
-    
-    System.out.println("preparing HBaseStreamPartitioner for streamId " + stream.get_streamId());
-    this.targetTasks = targetTasks;
-    this.targetTasksSize = this.targetTasks.size();
-
-    Configuration conf = HBaseConfiguration.create();
-    try {
-      hTable = new HTable(conf, tableName);
-      refreshRegionInfo(tableName);
-
-      System.out.println("regionStartKeyRegionNameMap: " + regionStartKeyRegionNameMap);
-
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-
-  }
-  
-  public void prepare() {
-    
-    System.out.println("preparing HBaseStreamPartitioner for streamId " );
-
-    Configuration conf = HBaseConfiguration.create();
-    try {
-      hTable = new HTable(conf, tableName);
-      refreshRegionInfo(tableName);
-
-      System.out.println("regionStartKeyRegionNameMap: " + regionStartKeyRegionNameMap);
-
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-
-  }
-
-  public HBaseStreamPartitioner(String tableName, int rowKeyFieldIndex, int regionInforRefreshIntervalInMins) {
-    System.out.println("Created HBaseStreamPartitioner ");
-    this.rowKeyFieldIndex = rowKeyFieldIndex;
-    this.tableName = tableName;
-    this.regionInforRefreshIntervalInMins = regionInforRefreshIntervalInMins;
-    this.regionInforRefreshIntervalInMillis = regionInforRefreshIntervalInMins * 60000;
-
-  }
-
-  
-  public List<Integer> chooseTasks(int taskId, List<Object> values) {
-    List<Integer> choosenTasks = null;
-    System.out.println("Choosing task for taskId " + taskId + " and values " + values);
-
-    if (regionInforRefreshIntervalInMillis > (System.currentTimeMillis() - regionCheckTime)) {
-      try {
-        refreshRegionInfo(tableName);
-      } catch (IOException e) {
-        e.printStackTrace();
-      }
-    }
-
-    int regionIndex = getRegionIndex((String) values.get(rowKeyFieldIndex));
-
-    if (regionIndex < targetTasksSize) {
-      choosenTasks = Arrays.asList(regionIndex);
-
-    } else {
-      choosenTasks = Arrays.asList(regionIndex % targetTasksSize);
-    }
-    System.out.println("Choosen tasks are " + choosenTasks);
-
-    return choosenTasks;
-
-
-  }
-
-  
-  
-  public int getRegionIndex(String key) {
-    int index = Arrays.binarySearch(regionStartKeys, key);
-    if (index < -1) {
-      index = (index + 2) * -1;
-    } else if (index == -1) {
-      index = 0;
-    }
-
-    return index;
-  }
-
-  private void refreshRegionInfo(String tableName) throws IOException {
-
-    System.out.println("in refreshRegionInfo ");
-
-    Map<HRegionInfo, ServerName> regionMap = hTable.getRegionLocations();
-
-    synchronized (regionStartKeys) {
-      synchronized (regionStartKeyRegionNameMap) {
-        regionStartKeys = new String[regionMap.size()];
-        int index = 0;
-        String startKey = null;
-        regionStartKeyRegionNameMap.clear();
-        for (HRegionInfo regionInfo : regionMap.keySet()) {
-          startKey = new String(regionInfo.getStartKey());
-          regionStartKeyRegionNameMap.put(startKey, regionInfo.getRegionNameAsString());
-          regionStartKeys[index] = startKey;
-          index++;
-        }
-
-        Arrays.sort(regionStartKeys);
-        regionCheckTime = System.currentTimeMillis();
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/HTableConnector.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/HTableConnector.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/HTableConnector.java
deleted file mode 100644
index d1a9327..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/HTableConnector.java
+++ /dev/null
@@ -1,166 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.hbase;
-
-import java.io.IOException;
-import java.io.InterruptedIOException;
-import java.io.Serializable;
-import java.lang.reflect.InvocationTargetException;
-
-import com.google.common.base.Function;
-import com.google.common.base.Joiner;
-import com.google.common.base.Splitter;
-import com.google.common.collect.Iterables;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.log4j.Logger;
-
-import backtype.storm.generated.Bolt;
-
-import javax.annotation.Nullable;
-
-/**
- * HTable connector for Storm {@link Bolt}
- * <p>
- * The HBase configuration is picked up from the first <tt>hbase-site.xml</tt> encountered in the
- * classpath
- */
-@SuppressWarnings("serial")
-public class HTableConnector extends Connector implements Serializable{
-  private static final Logger LOG = Logger.getLogger(HTableConnector.class);
-  private Configuration conf;
-  protected HTableInterface table;
-  private String tableName;
-  private String connectorImpl;
-
-
-  /**
-   * Initialize HTable connection
-   * @param conf The {@link TupleTableConfig}
-   * @throws IOException
-   */
-  public HTableConnector(final TableConfig conf, String _quorum, String _port) throws IOException {
-    super(conf, _quorum, _port);
-    this.connectorImpl = conf.getConnectorImpl();
-    this.tableName = conf.getTableName();
-    this.conf = HBaseConfiguration.create();
-    
-    if(_quorum != null && _port != null)
-    {
-    	this.conf.set("hbase.zookeeper.quorum", _quorum);
-    	this.conf.set("hbase.zookeeper.property.clientPort", _port);
-    }
-
-    LOG.info(String.format("Initializing connection to HBase table %s at %s", tableName,
-      this.conf.get("hbase.rootdir")));
-
-    try {
-      this.table = getTableProvider().getTable(this.conf, this.tableName);
-    } catch (IOException ex) {
-      throw new IOException("Unable to establish connection to HBase table " + this.tableName, ex);
-    }
-
-    if (conf.isBatch()) {
-      // Enable client-side write buffer
-      this.table.setAutoFlush(false, true);
-      LOG.info("Enabled client-side write buffer");
-    }
-
-    // If set, override write buffer size
-    if (conf.getWriteBufferSize() > 0) {
-      try {
-        this.table.setWriteBufferSize(conf.getWriteBufferSize());
-
-        LOG.info("Setting client-side write buffer to " + conf.getWriteBufferSize());
-      } catch (IOException ex) {
-        LOG.error("Unable to set client-side write buffer size for HBase table " + this.tableName,
-          ex);
-      }
-    }
-
-    // Check the configured column families exist
-    for (String cf : conf.getColumnFamilies()) {
-      if (!columnFamilyExists(cf)) {
-        throw new RuntimeException(String.format(
-          "HBase table '%s' does not have column family '%s'", conf.getTableName(), cf));
-      }
-    }
-  }
-
-  protected TableProvider getTableProvider() throws IOException {
-    if(connectorImpl == null || connectorImpl.length() == 0 || connectorImpl.charAt(0) == '$') {
-      return new HTableProvider();
-    }
-    else {
-      try {
-        Class<? extends TableProvider> clazz = (Class<? extends TableProvider>) Class.forName(connectorImpl);
-        return clazz.getConstructor().newInstance();
-      } catch (InstantiationException e) {
-        throw new IOException("Unable to instantiate connector.", e);
-      } catch (IllegalAccessException e) {
-        throw new IOException("Unable to instantiate connector: illegal access", e);
-      } catch (InvocationTargetException e) {
-        throw new IOException("Unable to instantiate connector", e);
-      } catch (NoSuchMethodException e) {
-        throw new IOException("Unable to instantiate connector: no such method", e);
-      } catch (ClassNotFoundException e) {
-        throw new IOException("Unable to instantiate connector: class not found", e);
-      }
-    }
-  }
-
-  /**
-   * Checks to see if table contains the given column family
-   * @param columnFamily The column family name
-   * @return boolean
-   * @throws IOException
-   */
-  private boolean columnFamilyExists(final String columnFamily) throws IOException {
-    return this.table.getTableDescriptor().hasFamily(Bytes.toBytes(columnFamily));
-  }
-
-  /**
-   * @return the table
-   */
-  public HTableInterface getTable() {
-    return table;
-  }
-
-  @Override
-  public void put(Put put) throws IOException {
-      table.put(put);
-  }
-
-  /**
-   * Close the table
-   */
-  @Override
-  public void close() {
-    try {
-      this.table.close();
-    } catch (IOException ex) {
-      LOG.error("Unable to close connection to HBase table " + tableName, ex);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/HTableProvider.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/HTableProvider.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/HTableProvider.java
deleted file mode 100644
index e454f04..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/HTableProvider.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.hbase;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.HTableInterface;
-
-import java.io.IOException;
-
-public class HTableProvider implements TableProvider {
-    @Override
-    public HTableInterface getTable(Configuration config, String tableName) throws IOException {
-        return new HTable(config, tableName);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/TableConfig.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/TableConfig.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/TableConfig.java
deleted file mode 100644
index de2e929..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/TableConfig.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.hbase;
-
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
-
-
-public class TableConfig implements Serializable {
-    static final long serialVersionUID = -1L;
-    private String tableName;
-    private boolean batch = true;
-    protected Map<String, Set<String>> columnFamilies = new HashMap<>();
-    private long writeBufferSize = 0L;
-    private String connectorImpl;
-
-    public TableConfig() {
-
-    }
-
-    public TableConfig(String tableName) {
-        this.tableName = tableName;
-    }
-
-    public String getTableName() {
-        return tableName;
-    }
-
-    public TableConfig withConnectorImpl(String impl) {
-        connectorImpl = impl;
-        return this;
-    }
-
-    public TableConfig withTable(String table) {
-        this.tableName = table;
-        return this;
-    }
-
-    public TableConfig withBatch(Boolean isBatch) {
-        this.batch = isBatch;
-        return this;
-    }
-
-    public String getConnectorImpl() {
-        return connectorImpl;
-    }
-
-    /**
-     * @return Whether batch mode is enabled
-     */
-    public boolean isBatch() {
-        return batch;
-    }
-
-    /**
-     * @param batch
-     *          Whether to enable HBase's client-side write buffer.
-     *          <p>
-     *          When enabled your bolt will store put operations locally until the
-     *          write buffer is full, so they can be sent to HBase in a single RPC
-     *          call. When disabled each put operation is effectively an RPC and
-     *          is sent straight to HBase. As your bolt can process thousands of
-     *          values per second it is recommended that the write buffer is
-     *          enabled.
-     *          <p>
-     *          Enabled by default
-     */
-    public void setBatch(boolean batch) {
-        this.batch = batch;
-    }
-    /**
-     * @param writeBufferSize
-     *          Overrides the client-side write buffer size.
-     *          <p>
-     *          By default the write buffer size is 2 MB (2097152 bytes). If you
-     *          are storing larger data, you may want to consider increasing this
-     *          value to allow your bolt to efficiently group together a larger
-     *          number of records per RPC
-     *          <p>
-     *          Overrides the write buffer size you have set in your
-     *          hbase-site.xml e.g. <code>hbase.client.write.buffer</code>
-     */
-    public void setWriteBufferSize(long writeBufferSize) {
-        this.writeBufferSize = writeBufferSize;
-    }
-
-    /**
-     * @return the writeBufferSize
-     */
-    public long getWriteBufferSize() {
-        return writeBufferSize;
-    }
-    /**
-     * @return A Set of configured column families
-     */
-    public Set<String> getColumnFamilies() {
-        return this.columnFamilies.keySet();
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/TableProvider.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/TableProvider.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/TableProvider.java
deleted file mode 100644
index dc0569e..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/TableProvider.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.hbase;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.HTableInterface;
-
-import java.io.IOException;
-import java.io.Serializable;
-
-public interface TableProvider extends Serializable {
-    HTableInterface getTable(Configuration config, String tableName) throws IOException;
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/TupleTableConfig.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/TupleTableConfig.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/TupleTableConfig.java
deleted file mode 100644
index a9ec20a..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/TupleTableConfig.java
+++ /dev/null
@@ -1,276 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.hbase;
-
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.NavigableMap;
-import java.util.Set;
-import java.util.TreeMap;
-
-import com.google.common.base.Joiner;
-import org.apache.hadoop.hbase.client.Durability;
-import org.apache.hadoop.hbase.client.Increment;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.util.Bytes;
-
-import backtype.storm.tuple.Tuple;
-import org.apache.log4j.Logger;
-
-/**
- * Configuration for Storm {@link Tuple} to HBase serialization.
- */
-@SuppressWarnings("serial")
-public class TupleTableConfig extends TableConfig implements Serializable {
-  private static final Logger LOG = Logger.getLogger(TupleTableConfig.class);
-  static final long serialVersionUID = -1L;
-  public static final long DEFAULT_INCREMENT = 1L;
-  
-  protected String tupleRowKeyField;
-  protected String tupleTimestampField;
-  protected Durability durability = Durability.USE_DEFAULT;
-  private String fields;
-
-  /**
-   * Initialize configuration
-   * 
-   * @param table
-   *          The HBase table name
-   * @param rowKeyField
-   *          The {@link Tuple} field used to set the rowKey
-   */
-  public TupleTableConfig(final String table, final String rowKeyField) {
-    super(table);
-    this.tupleRowKeyField = rowKeyField;
-    this.tupleTimestampField = "";
-    this.columnFamilies = new HashMap<String, Set<String>>();
-  }
-  
-  /**
-   * Initialize configuration
-   * 
-   * @param table
-   *          The HBase table name
-   * @param rowKeyField
-   *          The {@link Tuple} field used to set the rowKey
-   * @param timestampField
-   *          The {@link Tuple} field used to set the timestamp
-   */
-  public TupleTableConfig(final String table, final String rowKeyField, final String timestampField) {
-    super(table);
-    this.tupleRowKeyField = rowKeyField;
-    this.tupleTimestampField = timestampField;
-    this.columnFamilies = new HashMap<String, Set<String>>();
-  }
-
-  public TupleTableConfig() {
-    super(null);
-    this.columnFamilies = new HashMap<String, Set<String>>();
-  }
-
-
-
-  public TupleTableConfig withRowKeyField(String rowKeyField) {
-    this.tupleRowKeyField = rowKeyField;
-    return this;
-  }
-
-  public TupleTableConfig withTimestampField(String timestampField) {
-    this.tupleTimestampField = timestampField;
-    return this;
-  }
-
-  public TupleTableConfig withFields(String fields) {
-    this.fields = fields;
-    return this;
-  }
-
-
-
-  public String getFields() {
-    return fields;
-  }
-
-
-
-  /**
-   * Add column family and column qualifier to be extracted from tuple
-   * 
-   * @param columnFamily
-   *          The column family name
-   * @param columnQualifier
-   *          The column qualifier name
-   */
-  public void addColumn(final String columnFamily, final String columnQualifier) {
-    Set<String> columns = this.columnFamilies.get(columnFamily);
-    
-    if (columns == null) {
-      columns = new HashSet<String>();
-    }
-    columns.add(columnQualifier);
-    
-    this.columnFamilies.put(columnFamily, columns);
-  }
-  
-  /**
-   * Creates a HBase {@link Put} from a Storm {@link Tuple}
-   * 
-   * @param tuple
-   *          The {@link Tuple}
-   * @return {@link Put}
-   */
-  public Put getPutFromTuple(final Tuple tuple) throws IOException{
-    byte[] rowKey = null;
-    try {
-      rowKey = Bytes.toBytes(tuple.getStringByField(tupleRowKeyField));
-    }
-    catch(IllegalArgumentException iae) {
-      throw new IOException("Unable to retrieve " + tupleRowKeyField + " from " + tuple + " [ " + Joiner.on(',').join(tuple.getFields()) + " ]", iae);
-    }
-    
-    long ts = 0;
-    if (!tupleTimestampField.equals("")) {
-      ts = tuple.getLongByField(tupleTimestampField);
-    }
-    
-    Put p = new Put(rowKey);
-    
-    p.setDurability(durability);
-    
-    if (columnFamilies.size() > 0) {
-      for (String cf : columnFamilies.keySet()) {
-        byte[] cfBytes = Bytes.toBytes(cf);
-        for (String cq : columnFamilies.get(cf)) {
-          byte[] cqBytes = Bytes.toBytes(cq);
-          byte[] val = tuple.getBinaryByField(cq);
-          
-          if (ts > 0) {
-            p.add(cfBytes, cqBytes, ts, val);
-          } else {
-            p.add(cfBytes, cqBytes, val);
-          }
-        }
-      }
-    }
-    
-    return p;
-  }
-  
-  /**
-   * Creates a HBase {@link Increment} from a Storm {@link Tuple}
-   * 
-   * @param tuple
-   *          The {@link Tuple}
-   * @param increment
-   *          The amount to increment the counter by
-   * @return {@link Increment}
-   */
-  public Increment getIncrementFromTuple(final Tuple tuple, final long increment) {
-    byte[] rowKey = Bytes.toBytes(tuple.getStringByField(tupleRowKeyField));
-    
-    Increment inc = new Increment(rowKey);
-    inc.setDurability(durability);
-    
-    if (columnFamilies.size() > 0) {
-      for (String cf : columnFamilies.keySet()) {
-        byte[] cfBytes = Bytes.toBytes(cf);
-        for (String cq : columnFamilies.get(cf)) {
-          byte[] val;
-          try {
-            val = Bytes.toBytes(tuple.getStringByField(cq));
-          } catch (IllegalArgumentException ex) {
-            // if cq isn't a tuple field, use cq for counter instead of tuple
-            // value
-            val = Bytes.toBytes(cq);
-          }
-          inc.addColumn(cfBytes, val, increment);
-        }
-      }
-    }
-    
-    return inc;
-  }
-  
-  /**
-   * Increment the counter for the given family and column by the specified
-   * amount
-   * <p>
-   * If the family and column already exist in the Increment the counter value
-   * is incremented by the specified amount rather than overridden, as it is in
-   * HBase's {@link Increment#addColumn(byte[], byte[], long)} method
-   * 
-   * @param inc
-   *          The {@link Increment} to update
-   * @param family
-   *          The column family
-   * @param qualifier
-   *          The column qualifier
-   * @param amount
-   *          The amount to increment the counter by
-   */
-  public static void addIncrement(Increment inc, final byte[] family, final byte[] qualifier, final Long amount) {
-    
-    NavigableMap<byte[], Long> set = inc.getFamilyMapOfLongs().get(family);
-    if (set == null) {
-      set = new TreeMap<byte[], Long>(Bytes.BYTES_COMPARATOR);
-    }
-    
-    // If qualifier exists, increment amount
-    Long counter = set.get(qualifier);
-    if (counter == null) {
-      counter = 0L;
-    }
-    set.put(qualifier, amount + counter);
-    
-    inc.getFamilyMapOfLongs().put(family, set);
-  }
-  
-
-
-  /**
-   * @param durability
-   *          Sets whether to write to HBase's edit log.
-   *          <p>
-   *          Setting to false will mean fewer operations to perform when
-   *          writing to HBase and hence better performance, but changes that
-   *          haven't been flushed to a store file will be lost in the event of
-   *          HBase failure
-   *          <p>
-   *          Enabled by default
-   */
-  public void setDurability(Durability durability) {
-    this.durability = durability;
-  }
-  
-  
-  public Durability getDurability() {
-    return  durability;
-  }
-  
-
-
-  /**
-   * @return the tupleRowKeyField
-   */
-  public String getTupleRowKeyField() {
-    return tupleRowKeyField;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/AbstractConverter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/AbstractConverter.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/AbstractConverter.java
deleted file mode 100644
index c58dc22..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/AbstractConverter.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.hbase.converters;
-
-import com.google.common.base.Function;
-import com.google.common.collect.Iterables;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.metron.reference.lookup.LookupKV;
-import org.apache.metron.reference.lookup.LookupKey;
-import org.apache.metron.reference.lookup.LookupValue;
-
-import javax.annotation.Nullable;
-import java.io.IOException;
-import java.util.*;
-
-
-public abstract class AbstractConverter<KEY_T extends LookupKey, VALUE_T extends LookupValue> implements HbaseConverter<KEY_T,VALUE_T> {
-  public static Function<Cell, Map.Entry<byte[], byte[]>> CELL_TO_ENTRY  = new Function<Cell, Map.Entry<byte[], byte[]>>() {
-
-    @Nullable
-    @Override
-    public Map.Entry<byte[], byte[]> apply(@Nullable Cell cell) {
-      return new AbstractMap.SimpleEntry<>(cell.getQualifier(), cell.getValue());
-    }
-  };
-  @Override
-  public Put toPut(String columnFamily, KEY_T key, VALUE_T values) throws IOException {
-    Put put = new Put(key.toBytes());
-    byte[] cf = Bytes.toBytes(columnFamily);
-    for(Map.Entry<byte[], byte[]> kv : values.toColumns()) {
-      put.add(cf, kv.getKey(), kv.getValue());
-    }
-    return put;
-  }
-
-  public LookupKV<KEY_T, VALUE_T> fromPut(Put put, String columnFamily, KEY_T key, VALUE_T value) throws IOException {
-    key.fromBytes(put.getRow());
-    byte[] cf = Bytes.toBytes(columnFamily);
-    value.fromColumns(Iterables.transform(put.getFamilyCellMap().get(cf), CELL_TO_ENTRY));
-    return new LookupKV<>(key, value);
-  }
-
-  @Override
-  public Result toResult(String columnFamily, KEY_T key, VALUE_T values) throws IOException {
-    Put put = toPut(columnFamily, key, values);
-    return Result.create(put.getFamilyCellMap().get(Bytes.toBytes(columnFamily)));
-  }
-
-  public LookupKV<KEY_T, VALUE_T> fromResult(Result result, String columnFamily, KEY_T key, VALUE_T value) throws IOException {
-    if(result == null || result.getRow() == null) {
-      return null;
-    }
-    key.fromBytes(result.getRow());
-    byte[] cf = Bytes.toBytes(columnFamily);
-    NavigableMap<byte[], byte[]> cols = result.getFamilyMap(cf);
-    value.fromColumns(cols.entrySet());
-    return new LookupKV<>(key, value);
-  }
-  @Override
-  public Get toGet(String columnFamily, KEY_T key) {
-    Get ret = new Get(key.toBytes());
-    ret.addFamily(Bytes.toBytes(columnFamily));
-    return ret;
-  }
-
-  public static Iterable<Map.Entry<byte[], byte[]>> toEntries(byte[]... kvs) {
-    if(kvs.length % 2 != 0)  {
-      throw new IllegalStateException("Must be an even size");
-    }
-    List<Map.Entry<byte[], byte[]>> ret = new ArrayList<>(kvs.length/2);
-    for(int i = 0;i < kvs.length;i += 2) {
-      ret.add(new AbstractMap.SimpleImmutableEntry<>(kvs[i], kvs[i+1])) ;
-    }
-    return ret;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/HbaseConverter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/HbaseConverter.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/HbaseConverter.java
deleted file mode 100644
index 449d9cf..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/HbaseConverter.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.hbase.converters;
-
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.metron.reference.lookup.LookupKV;
-import org.apache.metron.reference.lookup.LookupKey;
-import org.apache.metron.reference.lookup.LookupValue;
-
-import java.io.IOException;
-
-public interface HbaseConverter<KEY_T extends LookupKey, VALUE_T extends LookupValue> {
-    Put toPut(String columnFamily, KEY_T key, VALUE_T values) throws IOException;
-
-    LookupKV<KEY_T, VALUE_T> fromPut(Put put, String columnFamily) throws IOException;
-
-    Result toResult(String columnFamily, KEY_T key, VALUE_T values) throws IOException;
-
-    LookupKV<KEY_T, VALUE_T> fromResult(Result result, String columnFamily) throws IOException;
-
-    Get toGet(String columnFamily, KEY_T key);
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/enrichment/EnrichmentConverter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/enrichment/EnrichmentConverter.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/enrichment/EnrichmentConverter.java
deleted file mode 100644
index a044498..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/enrichment/EnrichmentConverter.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.hbase.converters.enrichment;
-
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.metron.hbase.converters.AbstractConverter;
-import org.apache.metron.reference.lookup.LookupKV;
-
-import java.io.IOException;
-
-public class EnrichmentConverter extends AbstractConverter<EnrichmentKey, EnrichmentValue> {
-
-  @Override
-  public LookupKV<EnrichmentKey, EnrichmentValue> fromPut(Put put, String columnFamily) throws IOException {
-    return fromPut(put, columnFamily, new EnrichmentKey(), new EnrichmentValue());
-  }
-
-  @Override
-  public LookupKV<EnrichmentKey, EnrichmentValue> fromResult(Result result, String columnFamily) throws IOException {
-    return fromResult(result, columnFamily, new EnrichmentKey(), new EnrichmentValue());
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/enrichment/EnrichmentHelper.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/enrichment/EnrichmentHelper.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/enrichment/EnrichmentHelper.java
deleted file mode 100644
index a3d1b66..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/enrichment/EnrichmentHelper.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.hbase.converters.enrichment;
-
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.metron.reference.lookup.LookupKV;
-
-import java.io.IOException;
-
-public enum EnrichmentHelper {
-    INSTANCE;
-    EnrichmentConverter converter = new EnrichmentConverter();
-
-    public void load(HTableInterface table, String cf, Iterable<LookupKV<EnrichmentKey, EnrichmentValue>> results) throws IOException {
-        for(LookupKV<EnrichmentKey, EnrichmentValue> result : results) {
-            Put put = converter.toPut(cf, result.getKey(), result.getValue());
-            table.put(put);
-        }
-    }
-}


[09/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/test/resources/solr/conf/schema.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/test/resources/solr/conf/schema.xml b/metron-platform/metron-solr/src/test/resources/solr/conf/schema.xml
new file mode 100644
index 0000000..a689e1c
--- /dev/null
+++ b/metron-platform/metron-solr/src/test/resources/solr/conf/schema.xml
@@ -0,0 +1,191 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<schema name="metron" version="1.5">
+
+    <field name="_version_" type="long" indexed="true" stored="true"/>
+    <field name="id" type="string" indexed="true" stored="true" required="true" multiValued="false"/>
+    <field name="sensorType" type="string" indexed="true" stored="true" required="true"/>
+
+    <dynamicField name="*_i" type="int" indexed="true" stored="true"/>
+    <dynamicField name="*_is" type="int" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_s" type="string" indexed="true" stored="true"/>
+    <dynamicField name="*_ss" type="string" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_l" type="long" indexed="true" stored="true"/>
+    <dynamicField name="*_ls" type="long" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_t" type="text_general" indexed="true" stored="true"/>
+    <dynamicField name="*_txt" type="text_general" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_en" type="text_en" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_b" type="boolean" indexed="true" stored="true"/>
+    <dynamicField name="*_bs" type="boolean" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_f" type="float" indexed="true" stored="true"/>
+    <dynamicField name="*_fs" type="float" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_d" type="double" indexed="true" stored="true"/>
+    <dynamicField name="*_ds" type="double" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_coordinate" type="tdouble" indexed="true" stored="false"/>
+    <dynamicField name="*_dt" type="date" indexed="true" stored="true"/>
+    <dynamicField name="*_dts" type="date" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_p" type="location" indexed="true" stored="true"/>
+    <dynamicField name="*_ti" type="tint" indexed="true" stored="true"/>
+    <dynamicField name="*_tl" type="tlong" indexed="true" stored="true"/>
+    <dynamicField name="*_tf" type="tfloat" indexed="true" stored="true"/>
+    <dynamicField name="*_td" type="tdouble" indexed="true" stored="true"/>
+    <dynamicField name="*_tdt" type="tdate" indexed="true" stored="true"/>
+    <dynamicField name="*_c" type="currency" indexed="true" stored="true"/>
+    <dynamicField name="ignored_*" type="ignored" multiValued="true"/>
+    <dynamicField name="attr_*" type="text_general" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="random_*" type="random"/>
+
+    <uniqueKey>id</uniqueKey>
+
+    <fieldType name="string" class="solr.StrField" sortMissingLast="true"/>
+    <fieldType name="boolean" class="solr.BoolField" sortMissingLast="true"/>
+    <fieldType name="int" class="solr.TrieIntField" precisionStep="0" positionIncrementGap="0"/>
+    <fieldType name="float" class="solr.TrieFloatField" precisionStep="0" positionIncrementGap="0"/>
+    <fieldType name="long" class="solr.TrieLongField" precisionStep="0" positionIncrementGap="0"/>
+    <fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" positionIncrementGap="0"/>
+    <fieldType name="tint" class="solr.TrieIntField" precisionStep="8" positionIncrementGap="0"/>
+    <fieldType name="tfloat" class="solr.TrieFloatField" precisionStep="8" positionIncrementGap="0"/>
+    <fieldType name="tlong" class="solr.TrieLongField" precisionStep="8" positionIncrementGap="0"/>
+    <fieldType name="tdouble" class="solr.TrieDoubleField" precisionStep="8" positionIncrementGap="0"/>
+    <fieldType name="date" class="solr.TrieDateField" precisionStep="0" positionIncrementGap="0"/>
+    <fieldType name="tdate" class="solr.TrieDateField" precisionStep="6" positionIncrementGap="0"/>
+    <fieldType name="binary" class="solr.BinaryField"/>
+    <fieldType name="random" class="solr.RandomSortField" indexed="true"/>
+    <fieldType name="text_ws" class="solr.TextField" positionIncrementGap="100">
+        <analyzer>
+            <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+        </analyzer>
+    </fieldType>
+    <fieldType name="text_general" class="solr.TextField" positionIncrementGap="100">
+        <analyzer type="index">
+            <tokenizer class="solr.StandardTokenizerFactory"/>
+            <filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+        </analyzer>
+        <analyzer type="query">
+            <tokenizer class="solr.StandardTokenizerFactory"/>
+            <filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>
+            <filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+        </analyzer>
+    </fieldType>
+    <fieldType name="text_en" class="solr.TextField" positionIncrementGap="100">
+        <analyzer type="index">
+            <tokenizer class="solr.StandardTokenizerFactory"/>
+            <filter class="solr.StopFilterFactory"
+                    ignoreCase="true"
+                    words="lang/stopwords_en.txt"
+            />
+            <filter class="solr.LowerCaseFilterFactory"/>
+            <filter class="solr.EnglishPossessiveFilterFactory"/>
+            <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
+            <filter class="solr.PorterStemFilterFactory"/>
+        </analyzer>
+        <analyzer type="query">
+            <tokenizer class="solr.StandardTokenizerFactory"/>
+            <filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
+            <filter class="solr.StopFilterFactory"
+                    ignoreCase="true"
+                    words="lang/stopwords_en.txt"
+            />
+            <filter class="solr.LowerCaseFilterFactory"/>
+            <filter class="solr.EnglishPossessiveFilterFactory"/>
+            <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
+            <filter class="solr.PorterStemFilterFactory"/>
+        </analyzer>
+    </fieldType>
+    <fieldType name="text_en_splitting" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="true">
+        <analyzer type="index">
+            <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+            <filter class="solr.StopFilterFactory"
+                    ignoreCase="true"
+                    words="lang/stopwords_en.txt"
+            />
+            <filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="1" catenateNumbers="1" catenateAll="0" splitOnCaseChange="1"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+            <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
+            <filter class="solr.PorterStemFilterFactory"/>
+        </analyzer>
+        <analyzer type="query">
+            <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+            <filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
+            <filter class="solr.StopFilterFactory"
+                    ignoreCase="true"
+                    words="lang/stopwords_en.txt"
+            />
+            <filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="0" catenateNumbers="0" catenateAll="0" splitOnCaseChange="1"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+            <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
+            <filter class="solr.PorterStemFilterFactory"/>
+        </analyzer>
+    </fieldType>
+
+    <fieldType name="text_en_splitting_tight" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="true">
+        <analyzer>
+            <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+            <filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="false"/>
+            <filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_en.txt"/>
+            <filter class="solr.WordDelimiterFilterFactory" generateWordParts="0" generateNumberParts="0" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+            <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
+            <filter class="solr.EnglishMinimalStemFilterFactory"/>
+            <filter class="solr.RemoveDuplicatesTokenFilterFactory"/>
+        </analyzer>
+    </fieldType>
+    <fieldType name="text_general_rev" class="solr.TextField" positionIncrementGap="100">
+        <analyzer type="index">
+            <tokenizer class="solr.StandardTokenizerFactory"/>
+            <filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+            <filter class="solr.ReversedWildcardFilterFactory" withOriginal="true"
+                    maxPosAsterisk="3" maxPosQuestion="2" maxFractionAsterisk="0.33"/>
+        </analyzer>
+        <analyzer type="query">
+            <tokenizer class="solr.StandardTokenizerFactory"/>
+            <filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
+            <filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+        </analyzer>
+    </fieldType>
+    <fieldType name="alphaOnlySort" class="solr.TextField" sortMissingLast="true" omitNorms="true">
+        <analyzer>
+            <tokenizer class="solr.KeywordTokenizerFactory"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+            <filter class="solr.TrimFilterFactory"/>
+            <filter class="solr.PatternReplaceFilterFactory"
+                    pattern="([^a-z])" replacement="" replace="all"
+            />
+        </analyzer>
+    </fieldType>
+    <fieldType name="lowercase" class="solr.TextField" positionIncrementGap="100">
+        <analyzer>
+            <tokenizer class="solr.KeywordTokenizerFactory"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+        </analyzer>
+    </fieldType>
+    <fieldType name="ignored" stored="false" indexed="false" multiValued="true" class="solr.StrField"/>
+    <fieldType name="point" class="solr.PointType" dimension="2" subFieldSuffix="_d"/>
+    <fieldType name="location" class="solr.LatLonType" subFieldSuffix="_coordinate"/>
+    <fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
+               geo="true" distErrPct="0.025" maxDistErr="0.001" distanceUnits="kilometers"/>
+    <fieldType name="bbox" class="solr.BBoxField"
+               geo="true" distanceUnits="kilometers" numberType="_bbox_coord"/>
+    <fieldType name="_bbox_coord" class="solr.TrieDoubleField" precisionStep="8" docValues="true" stored="false"/>
+    <fieldType name="currency" class="solr.CurrencyField" precisionStep="8" defaultCurrency="USD" currencyConfig="currency.xml"/>
+</schema>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/test/resources/solr/conf/solrconfig.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/test/resources/solr/conf/solrconfig.xml b/metron-platform/metron-solr/src/test/resources/solr/conf/solrconfig.xml
new file mode 100644
index 0000000..7d30fea
--- /dev/null
+++ b/metron-platform/metron-solr/src/test/resources/solr/conf/solrconfig.xml
@@ -0,0 +1,583 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- 
+     For more details about configurations options that may appear in
+     this file, see http://wiki.apache.org/solr/SolrConfigXml. 
+-->
+<config>
+  <!-- In all configuration below, a prefix of "solr." for class names
+       is an alias that causes solr to search appropriate packages,
+       including org.apache.solr.(search|update|request|core|analysis)
+
+       You may also specify a fully qualified Java classname if you
+       have your own custom plugins.
+    -->
+
+  <!-- Controls what version of Lucene various components of Solr
+       adhere to.  Generally, you want to use the latest version to
+       get all bug fixes and improvements. It is highly recommended
+       that you fully re-index after changing this setting as it can
+       affect both how text is indexed and queried.
+  -->
+  <luceneMatchVersion>5.2.1</luceneMatchVersion>
+
+  <!-- Data Directory
+
+       Used to specify an alternate directory to hold all index data
+       other than the default ./data under the Solr home.  If
+       replication is in use, this should match the replication
+       configuration.
+    -->
+  <dataDir>${solr.data.dir:}</dataDir>
+
+
+  <!-- The DirectoryFactory to use for indexes.
+       
+       solr.StandardDirectoryFactory is filesystem
+       based and tries to pick the best implementation for the current
+       JVM and platform.  solr.NRTCachingDirectoryFactory, the default,
+       wraps solr.StandardDirectoryFactory and caches small files in memory
+       for better NRT performance.
+
+       One can force a particular implementation via solr.MMapDirectoryFactory,
+       solr.NIOFSDirectoryFactory, or solr.SimpleFSDirectoryFactory.
+
+       solr.RAMDirectoryFactory is memory based, not
+       persistent, and doesn't work with replication.
+    -->
+  <directoryFactory name="DirectoryFactory" 
+                    class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}">
+  </directoryFactory> 
+
+  <!-- The CodecFactory for defining the format of the inverted index.
+       The default implementation is SchemaCodecFactory, which is the official Lucene
+       index format, but hooks into the schema to provide per-field customization of
+       the postings lists and per-document values in the fieldType element
+       (postingsFormat/docValuesFormat). Note that most of the alternative implementations
+       are experimental, so if you choose to customize the index format, it's a good
+       idea to convert back to the official format e.g. via IndexWriter.addIndexes(IndexReader)
+       before upgrading to a newer version to avoid unnecessary reindexing.
+  -->
+  <codecFactory class="solr.SchemaCodecFactory"/>
+
+  <schemaFactory class="ClassicIndexSchemaFactory"/>
+
+  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+       Index Config - These settings control low-level behavior of indexing
+       Most example settings here show the default value, but are commented
+       out, to more easily see where customizations have been made.
+       
+       Note: This replaces <indexDefaults> and <mainIndex> from older versions
+       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
+  <indexConfig>
+
+    <!-- LockFactory 
+
+         This option specifies which Lucene LockFactory implementation
+         to use.
+      
+         single = SingleInstanceLockFactory - suggested for a
+                  read-only index or when there is no possibility of
+                  another process trying to modify the index.
+         native = NativeFSLockFactory - uses OS native file locking.
+                  Do not use when multiple solr webapps in the same
+                  JVM are attempting to share a single index.
+         simple = SimpleFSLockFactory  - uses a plain file for locking
+
+         Defaults: 'native' is default for Solr3.6 and later, otherwise
+                   'simple' is the default
+
+         More details on the nuances of each LockFactory...
+         http://wiki.apache.org/lucene-java/AvailableLockFactories
+    -->
+    <lockType>${solr.lock.type:native}</lockType>
+
+    <!-- Lucene Infostream
+       
+         To aid in advanced debugging, Lucene provides an "InfoStream"
+         of detailed information when indexing.
+
+         Setting the value to true will instruct the underlying Lucene
+         IndexWriter to write its info stream to solr's log. By default,
+         this is enabled here, and controlled through log4j.properties.
+      -->
+     <infoStream>true</infoStream>
+  </indexConfig>
+
+
+  <!-- JMX
+       
+       This example enables JMX if and only if an existing MBeanServer
+       is found, use this if you want to configure JMX through JVM
+       parameters. Remove this to disable exposing Solr configuration
+       and statistics to JMX.
+
+       For more details see http://wiki.apache.org/solr/SolrJmx
+    -->
+  <jmx />
+  <!-- If you want to connect to a particular server, specify the
+       agentId 
+    -->
+  <!-- <jmx agentId="myAgent" /> -->
+  <!-- If you want to start a new MBeanServer, specify the serviceUrl -->
+  <!-- <jmx serviceUrl="service:jmx:rmi:///jndi/rmi://localhost:9999/solr"/>
+    -->
+
+  <!-- The default high-performance update handler -->
+  <updateHandler class="solr.DirectUpdateHandler2">
+
+    <!-- Enables a transaction log, used for real-time get, durability, and
+         and solr cloud replica recovery.  The log can grow as big as
+         uncommitted changes to the index, so use of a hard autoCommit
+         is recommended (see below).
+         "dir" - the target directory for transaction logs, defaults to the
+                solr data directory.
+         "numVersionBuckets" - sets the number of buckets used to keep
+                track of max version values when checking for re-ordered
+                updates; increase this value to reduce the cost of
+                synchronizing access to version buckets during high-volume
+                indexing, this requires 8 bytes (long) * numVersionBuckets
+                of heap space per Solr core.
+    -->
+    <updateLog>
+      <str name="dir">${solr.ulog.dir:}</str>
+      <int name="numVersionBuckets">${solr.ulog.numVersionBuckets:65536}</int>
+    </updateLog>
+ 
+    <!-- AutoCommit
+
+         Perform a hard commit automatically under certain conditions.
+         Instead of enabling autoCommit, consider using "commitWithin"
+         when adding documents. 
+
+         http://wiki.apache.org/solr/UpdateXmlMessages
+
+         maxDocs - Maximum number of documents to add since the last
+                   commit before automatically triggering a new commit.
+
+         maxTime - Maximum amount of time in ms that is allowed to pass
+                   since a document was added before automatically
+                   triggering a new commit. 
+         openSearcher - if false, the commit causes recent index changes
+           to be flushed to stable storage, but does not cause a new
+           searcher to be opened to make those changes visible.
+
+         If the updateLog is enabled, then it's highly recommended to
+         have some sort of hard autoCommit to limit the log size.
+      -->
+     <autoCommit> 
+       <maxTime>${solr.autoCommit.maxTime:15000}</maxTime> 
+       <openSearcher>false</openSearcher> 
+     </autoCommit>
+
+    <!-- softAutoCommit is like autoCommit except it causes a
+         'soft' commit which only ensures that changes are visible
+         but does not ensure that data is synced to disk.  This is
+         faster and more near-realtime friendly than a hard commit.
+      -->
+     <autoSoftCommit> 
+       <maxTime>${solr.autoSoftCommit.maxTime:-1}</maxTime> 
+     </autoSoftCommit>
+
+  </updateHandler>
+  
+  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+       Query section - these settings control query time things like caches
+       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
+  <query>
+    <!-- Max Boolean Clauses
+
+         Maximum number of clauses in each BooleanQuery,  an exception
+         is thrown if exceeded.
+
+         ** WARNING **
+         
+         This option actually modifies a global Lucene property that
+         will affect all SolrCores.  If multiple solrconfig.xml files
+         disagree on this property, the value at any given moment will
+         be based on the last SolrCore to be initialized.
+         
+      -->
+    <maxBooleanClauses>1024</maxBooleanClauses>
+
+
+    <!-- Solr Internal Query Caches
+
+         There are two implementations of cache available for Solr,
+         LRUCache, based on a synchronized LinkedHashMap, and
+         FastLRUCache, based on a ConcurrentHashMap.  
+
+         FastLRUCache has faster gets and slower puts in single
+         threaded operation and thus is generally faster than LRUCache
+         when the hit ratio of the cache is high (> 75%), and may be
+         faster under other scenarios on multi-cpu systems.
+    -->
+
+    <!-- Filter Cache
+
+         Cache used by SolrIndexSearcher for filters (DocSets),
+         unordered sets of *all* documents that match a query.  When a
+         new searcher is opened, its caches may be prepopulated or
+         "autowarmed" using data from caches in the old searcher.
+         autowarmCount is the number of items to prepopulate.  For
+         LRUCache, the autowarmed items will be the most recently
+         accessed items.
+
+         Parameters:
+           class - the SolrCache implementation LRUCache or
+               (LRUCache or FastLRUCache)
+           size - the maximum number of entries in the cache
+           initialSize - the initial capacity (number of entries) of
+               the cache.  (see java.util.HashMap)
+           autowarmCount - the number of entries to prepopulate from
+               and old cache.  
+      -->
+    <filterCache class="solr.FastLRUCache"
+                 size="512"
+                 initialSize="512"
+                 autowarmCount="0"/>
+
+    <!-- Query Result Cache
+
+        Caches results of searches - ordered lists of document ids
+        (DocList) based on a query, a sort, and the range of documents requested.
+        Additional supported parameter by LRUCache:
+           maxRamMB - the maximum amount of RAM (in MB) that this cache is allowed
+                      to occupy
+     -->
+    <queryResultCache class="solr.LRUCache"
+                     size="512"
+                     initialSize="512"
+                     autowarmCount="0"/>
+   
+    <!-- Document Cache
+
+         Caches Lucene Document objects (the stored fields for each
+         document).  Since Lucene internal document ids are transient,
+         this cache will not be autowarmed.  
+      -->
+    <documentCache class="solr.LRUCache"
+                   size="512"
+                   initialSize="512"
+                   autowarmCount="0"/>
+    
+    <!-- custom cache currently used by block join --> 
+    <cache name="perSegFilter"
+      class="solr.search.LRUCache"
+      size="10"
+      initialSize="0"
+      autowarmCount="10"
+      regenerator="solr.NoOpRegenerator" />
+
+    <!-- Lazy Field Loading
+
+         If true, stored fields that are not requested will be loaded
+         lazily.  This can result in a significant speed improvement
+         if the usual case is to not load all stored fields,
+         especially if the skipped fields are large compressed text
+         fields.
+    -->
+    <enableLazyFieldLoading>true</enableLazyFieldLoading>
+
+   <!-- Result Window Size
+
+        An optimization for use with the queryResultCache.  When a search
+        is requested, a superset of the requested number of document ids
+        are collected.  For example, if a search for a particular query
+        requests matching documents 10 through 19, and queryWindowSize is 50,
+        then documents 0 through 49 will be collected and cached.  Any further
+        requests in that range can be satisfied via the cache.  
+     -->
+   <queryResultWindowSize>20</queryResultWindowSize>
+
+   <!-- Maximum number of documents to cache for any entry in the
+        queryResultCache. 
+     -->
+   <queryResultMaxDocsCached>200</queryResultMaxDocsCached>
+
+    <!-- Use Cold Searcher
+
+         If a search request comes in and there is no current
+         registered searcher, then immediately register the still
+         warming searcher and use it.  If "false" then all requests
+         will block until the first searcher is done warming.
+      -->
+    <useColdSearcher>false</useColdSearcher>
+
+    <!-- Max Warming Searchers
+         
+         Maximum number of searchers that may be warming in the
+         background concurrently.  An error is returned if this limit
+         is exceeded.
+
+         Recommend values of 1-2 for read-only slaves, higher for
+         masters w/o cache warming.
+      -->
+    <maxWarmingSearchers>2</maxWarmingSearchers>
+
+  </query>
+
+
+  <!-- Request Dispatcher
+
+       This section contains instructions for how the SolrDispatchFilter
+       should behave when processing requests for this SolrCore.
+
+       handleSelect is a legacy option that affects the behavior of requests
+       such as /select?qt=XXX
+
+       handleSelect="true" will cause the SolrDispatchFilter to process
+       the request and dispatch the query to a handler specified by the 
+       "qt" param, assuming "/select" isn't already registered.
+
+       handleSelect="false" will cause the SolrDispatchFilter to
+       ignore "/select" requests, resulting in a 404 unless a handler
+       is explicitly registered with the name "/select"
+
+       handleSelect="true" is not recommended for new users, but is the default
+       for backwards compatibility
+    -->
+  <requestDispatcher handleSelect="false" >
+    <!-- Request Parsing
+
+         These settings indicate how Solr Requests may be parsed, and
+         what restrictions may be placed on the ContentStreams from
+         those requests
+
+         enableRemoteStreaming - enables use of the stream.file
+         and stream.url parameters for specifying remote streams.
+
+         multipartUploadLimitInKB - specifies the max size (in KiB) of
+         Multipart File Uploads that Solr will allow in a Request.
+         
+         formdataUploadLimitInKB - specifies the max size (in KiB) of
+         form data (application/x-www-form-urlencoded) sent via
+         POST. You can use POST to pass request parameters not
+         fitting into the URL.
+         
+         addHttpRequestToContext - if set to true, it will instruct
+         the requestParsers to include the original HttpServletRequest
+         object in the context map of the SolrQueryRequest under the 
+         key "httpRequest". It will not be used by any of the existing
+         Solr components, but may be useful when developing custom 
+         plugins.
+         
+         *** WARNING ***
+         The settings below authorize Solr to fetch remote files, You
+         should make sure your system has some authentication before
+         using enableRemoteStreaming="true"
+
+      --> 
+    <requestParsers enableRemoteStreaming="true" 
+                    multipartUploadLimitInKB="2048000"
+                    formdataUploadLimitInKB="2048"
+                    addHttpRequestToContext="false"/>
+
+    <!-- HTTP Caching
+
+         Set HTTP caching related parameters (for proxy caches and clients).
+
+         The options below instruct Solr not to output any HTTP Caching
+         related headers
+      -->
+    <httpCaching never304="true" />
+
+  </requestDispatcher>
+
+  <!-- Request Handlers 
+
+       http://wiki.apache.org/solr/SolrRequestHandler
+
+       Incoming queries will be dispatched to a specific handler by name
+       based on the path specified in the request.
+
+       Legacy behavior: If the request path uses "/select" but no Request
+       Handler has that name, and if handleSelect="true" has been specified in
+       the requestDispatcher, then the Request Handler is dispatched based on
+       the qt parameter.  Handlers without a leading '/' are accessed this way
+       like so: http://host/app/[core/]select?qt=name  If no qt is
+       given, then the requestHandler that declares default="true" will be
+       used or the one named "standard".
+
+       If a Request Handler is declared with startup="lazy", then it will
+       not be initialized until the first request that uses it.
+
+    -->
+  <!-- SearchHandler
+
+       http://wiki.apache.org/solr/SearchHandler
+
+       For processing Search Queries, the primary Request Handler
+       provided with Solr is "SearchHandler" It delegates to a sequent
+       of SearchComponents (see below) and supports distributed
+       queries across multiple shards
+    -->
+  <requestHandler name="/select" class="solr.SearchHandler">
+    <!-- default values for query parameters can be specified, these
+         will be overridden by parameters in the request
+      -->
+     <lst name="defaults">
+       <str name="echoParams">explicit</str>
+       <int name="rows">10</int>
+     </lst>
+
+    </requestHandler>
+
+  <!-- A request handler that returns indented JSON by default -->
+  <requestHandler name="/query" class="solr.SearchHandler">
+     <lst name="defaults">
+       <str name="echoParams">explicit</str>
+       <str name="wt">json</str>
+       <str name="indent">true</str>
+       <str name="df">text</str>
+     </lst>
+  </requestHandler>
+
+  <!--
+    The export request handler is used to export full sorted result sets.
+    Do not change these defaults.
+  -->
+  <requestHandler name="/export" class="solr.SearchHandler">
+    <lst name="invariants">
+      <str name="rq">{!xport}</str>
+      <str name="wt">xsort</str>
+      <str name="distrib">false</str>
+    </lst>
+
+    <arr name="components">
+      <str>query</str>
+    </arr>
+  </requestHandler>
+
+
+  <initParams path="/update/**,/query,/select,/tvrh,/elevate,/spell">
+    <lst name="defaults">
+      <str name="df">text</str>
+    </lst>
+  </initParams>
+
+  <!-- Field Analysis Request Handler
+
+       RequestHandler that provides much the same functionality as
+       analysis.jsp. Provides the ability to specify multiple field
+       types and field names in the same request and outputs
+       index-time and query-time analysis for each of them.
+
+       Request parameters are:
+       analysis.fieldname - field name whose analyzers are to be used
+
+       analysis.fieldtype - field type whose analyzers are to be used
+       analysis.fieldvalue - text for index-time analysis
+       q (or analysis.q) - text for query time analysis
+       analysis.showmatch (true|false) - When set to true and when
+           query analysis is performed, the produced tokens of the
+           field value analysis will be marked as "matched" for every
+           token that is produces by the query analysis
+   -->
+  <requestHandler name="/analysis/field" 
+                  startup="lazy"
+                  class="solr.FieldAnalysisRequestHandler" />
+
+
+  <!-- Document Analysis Handler
+
+       http://wiki.apache.org/solr/AnalysisRequestHandler
+
+       An analysis handler that provides a breakdown of the analysis
+       process of provided documents. This handler expects a (single)
+       content stream with the following format:
+
+       <docs>
+         <doc>
+           <field name="id">1</field>
+           <field name="name">The Name</field>
+           <field name="text">The Text Value</field>
+         </doc>
+         <doc>...</doc>
+         <doc>...</doc>
+         ...
+       </docs>
+
+    Note: Each document must contain a field which serves as the
+    unique key. This key is used in the returned response to associate
+    an analysis breakdown to the analyzed document.
+
+    Like the FieldAnalysisRequestHandler, this handler also supports
+    query analysis by sending either an "analysis.query" or "q"
+    request parameter that holds the query text to be analyzed. It
+    also supports the "analysis.showmatch" parameter which when set to
+    true, all field tokens that match the query tokens will be marked
+    as a "match". 
+  -->
+  <requestHandler name="/analysis/document" 
+                  class="solr.DocumentAnalysisRequestHandler" 
+                  startup="lazy" />
+
+  <!-- Echo the request contents back to the client -->
+  <requestHandler name="/debug/dump" class="solr.DumpRequestHandler" >
+    <lst name="defaults">
+     <str name="echoParams">explicit</str> 
+     <str name="echoHandler">true</str>
+    </lst>
+  </requestHandler>
+  
+
+
+  <!-- Search Components
+
+       Search components are registered to SolrCore and used by 
+       instances of SearchHandler (which can access them by name)
+       
+       By default, the following components are available:
+       
+       <searchComponent name="query"     class="solr.QueryComponent" />
+       <searchComponent name="facet"     class="solr.FacetComponent" />
+       <searchComponent name="mlt"       class="solr.MoreLikeThisComponent" />
+       <searchComponent name="highlight" class="solr.HighlightComponent" />
+       <searchComponent name="stats"     class="solr.StatsComponent" />
+       <searchComponent name="debug"     class="solr.DebugComponent" />
+       
+     -->
+
+  <!-- Terms Component
+
+       http://wiki.apache.org/solr/TermsComponent
+
+       A component to return terms and document frequency of those
+       terms
+    -->
+  <searchComponent name="terms" class="solr.TermsComponent"/>
+
+  <!-- A request handler for demonstrating the terms component -->
+  <requestHandler name="/terms" class="solr.SearchHandler" startup="lazy">
+     <lst name="defaults">
+      <bool name="terms">true</bool>
+      <bool name="distrib">false</bool>
+    </lst>     
+    <arr name="components">
+      <str>terms</str>
+    </arr>
+  </requestHandler>
+
+  <!-- Legacy config for the admin interface -->
+  <admin>
+    <defaultQuery>*:*</defaultQuery>
+  </admin>
+
+</config>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/test/resources/solr/conf/stopwords.txt
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/test/resources/solr/conf/stopwords.txt b/metron-platform/metron-solr/src/test/resources/solr/conf/stopwords.txt
new file mode 100644
index 0000000..ae1e83e
--- /dev/null
+++ b/metron-platform/metron-solr/src/test/resources/solr/conf/stopwords.txt
@@ -0,0 +1,14 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/test/resources/solr/conf/synonyms.txt
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/test/resources/solr/conf/synonyms.txt b/metron-platform/metron-solr/src/test/resources/solr/conf/synonyms.txt
new file mode 100644
index 0000000..7f72128
--- /dev/null
+++ b/metron-platform/metron-solr/src/test/resources/solr/conf/synonyms.txt
@@ -0,0 +1,29 @@
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#-----------------------------------------------------------------------
+#some test synonym mappings unlikely to appear in real input text
+aaafoo => aaabar
+bbbfoo => bbbfoo bbbbar
+cccfoo => cccbar cccbaz
+fooaaa,baraaa,bazaaa
+
+# Some synonym groups specific to this example
+GB,gib,gigabyte,gigabytes
+MB,mib,megabyte,megabytes
+Television, Televisions, TV, TVs
+#notice we use "gib" instead of "GiB" so any WordDelimiterFilter coming
+#after us won't split it into two words.
+
+# Synonym mappings can be used for spelling correction too
+pixima => pixma
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/test/resources/solr/solr.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/test/resources/solr/solr.xml b/metron-platform/metron-solr/src/test/resources/solr/solr.xml
new file mode 100644
index 0000000..318ad09
--- /dev/null
+++ b/metron-platform/metron-solr/src/test/resources/solr/solr.xml
@@ -0,0 +1,14 @@
+<solr>
+    <solrcloud>
+        <str name="host">${host:}</str>
+        <int name="hostPort">${jetty.port:8983}</int>
+        <str name="hostContext">${hostContext:solr}</str>
+        <int name="zkClientTimeout">${zkClientTimeout:15000}</int>
+        <bool name="genericCoreNodeNames">${genericCoreNodeNames:true}</bool>
+    </solrcloud>
+    <shardHandlerFactory name="shardHandlerFactory"
+                         class="HttpShardHandlerFactory">
+        <int name="socketTimeout">${socketTimeout:0}</int>
+        <int name="connTimeout">${connTimeout:0}</int>
+    </shardHandlerFactory>
+</solr>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-test-utilities/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-test-utilities/pom.xml b/metron-platform/metron-test-utilities/pom.xml
new file mode 100644
index 0000000..a4eac92
--- /dev/null
+++ b/metron-platform/metron-test-utilities/pom.xml
@@ -0,0 +1,117 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 
+  Licensed to the Apache Software 
+  Foundation (ASF) under one or more contributor license agreements. See the 
+  NOTICE file distributed with this work for additional information regarding 
+  copyright ownership. The ASF licenses this file to You under the Apache License, 
+  Version 2.0 (the "License"); you may not use this file except in compliance 
+  with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 
+  Unless required by applicable law or agreed to in writing, software distributed 
+  under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES 
+  OR CONDITIONS OF ANY KIND, either express or implied. See the License for 
+  the specific language governing permissions and limitations under the License. 
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.metron</groupId>
+    <artifactId>metron-platform</artifactId>
+    <version>0.1BETA</version>
+  </parent>
+  <artifactId>metron-test-utilities</artifactId>
+  <description>Metron Test Utilities</description>
+  <properties>
+  </properties>
+  <dependencies>
+    <dependency>
+      <groupId>log4j</groupId>
+      <artifactId>log4j</artifactId>
+      <version>1.2.17</version>
+    </dependency>
+    <dependency>
+      <groupId>com.googlecode.json-simple</groupId>
+      <artifactId>json-simple</artifactId>
+      <version>${global_json_simple_version}</version>
+    </dependency>
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+      <version>${global_hbase_guava_version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-client</artifactId>
+      <version>${global_hbase_version}</version>
+      <exclusions>
+        <exclusion>
+          <artifactId>log4j</artifactId>
+          <groupId>log4j</groupId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.storm</groupId>
+      <artifactId>storm-core</artifactId>
+      <version>${global_storm_version}</version>
+      <scope>provided</scope>
+      <exclusions>
+        <exclusion>
+          <artifactId>servlet-api</artifactId>
+          <groupId>javax.servlet</groupId>
+        </exclusion>
+        <exclusion>
+          <artifactId>log4j-over-slf4j</artifactId>
+          <groupId>org.slf4j</groupId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kafka</groupId>
+      <artifactId>kafka_2.9.2</artifactId>
+      <version>${global_kafka_version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>log4j</groupId>
+          <artifactId>log4j</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.curator</groupId>
+      <artifactId>curator-recipes</artifactId>
+      <version>2.7.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.curator</groupId>
+      <artifactId>curator-test</artifactId>
+      <version>2.7.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.mockito</groupId>
+      <artifactId>mockito-all</artifactId>
+      <version>${global_mockito_version}</version>
+    </dependency>
+    <dependency>
+      <groupId>nl.jqno.equalsverifier</groupId>
+      <artifactId>equalsverifier</artifactId>
+      <version>2.0.2</version>
+    </dependency>
+    <dependency>
+      <groupId>org.adrianwalker</groupId>
+      <artifactId>multiline-string</artifactId>
+      <version>0.1.2</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <version>4.12</version>
+    </dependency>
+  </dependencies>
+
+  <build>
+  </build>
+  <reporting>
+  </reporting>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/TestConstants.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/TestConstants.java b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/TestConstants.java
new file mode 100644
index 0000000..c798158
--- /dev/null
+++ b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/TestConstants.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron;
+
+public class TestConstants {
+
+  public final static String SAMPLE_CONFIG_PATH = "../metron-integration-test/src/main/resources/sample/config/";
+  public final static String SAMPLE_DATA_INPUT_PATH = "../metron-integration-test/src/main/resources/sample/data/SampleInput/";
+  public final static String SAMPLE_DATA_PARSED_PATH = "../metron-integration-test/src/main/resources/sample/data/SampleParsed/";
+  public final static String SAMPLE_DATA_INDEXED_PATH = "../metron-integration-test/src/main/resources/sample/data/SampleIndexed/";
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/bolt/BaseBoltTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/bolt/BaseBoltTest.java b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/bolt/BaseBoltTest.java
new file mode 100644
index 0000000..a655e14
--- /dev/null
+++ b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/bolt/BaseBoltTest.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.test.bolt;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Tuple;
+import com.google.common.base.Joiner;
+import com.google.common.collect.ImmutableSet;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.recipes.cache.TreeCache;
+import org.hamcrest.Description;
+import org.json.simple.JSONObject;
+import org.junit.Before;
+import org.mockito.ArgumentMatcher;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+import java.util.Arrays;
+import java.util.List;
+
+public abstract class BaseBoltTest {
+  
+  @Mock
+  protected TopologyContext topologyContext;
+
+  @Mock
+  protected OutputCollector outputCollector;
+
+  @Mock
+  protected Tuple tuple;
+
+  @Mock
+  protected OutputFieldsDeclarer declarer;
+
+  @Mock
+  protected CuratorFramework client;
+
+  @Mock
+  protected TreeCache cache;
+
+  @Before
+  public void initMocks() {
+    MockitoAnnotations.initMocks(this);
+  }
+
+  protected class FieldsMatcher extends ArgumentMatcher<Fields> {
+
+    private List<String> expectedFields;
+
+    public FieldsMatcher(String... fields) {
+      this.expectedFields = Arrays.asList(fields);
+    }
+
+    @Override
+    public boolean matches(Object o) {
+      Fields fields = (Fields) o;
+      return expectedFields.equals(fields.toList());
+    }
+
+    @Override
+    public void describeTo(Description description) {
+      description.appendText(String.format("[%s]", Joiner.on(",").join(expectedFields)));
+    }
+
+  }
+
+  public void removeTimingFields(JSONObject message) {
+    ImmutableSet keys = ImmutableSet.copyOf(message.keySet());
+    for (Object key : keys) {
+      if (key.toString().endsWith(".ts")) {
+        message.remove(key);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/bolt/BaseEnrichmentBoltTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/bolt/BaseEnrichmentBoltTest.java b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/bolt/BaseEnrichmentBoltTest.java
new file mode 100644
index 0000000..83bcb92
--- /dev/null
+++ b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/bolt/BaseEnrichmentBoltTest.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.test.bolt;
+
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.metron.TestConstants;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.junit.Before;
+
+import java.util.HashSet;
+import java.util.Set;
+
+public class BaseEnrichmentBoltTest extends BaseBoltTest {
+
+  public String sampleSensorEnrichmentConfigPath = TestConstants.SAMPLE_CONFIG_PATH + "sensors/yaf.json";
+  protected Set<String> streamIds = new HashSet<>();
+  protected String key = "someKey";
+  protected String sensorType = "yaf";
+
+  /**
+   * {
+   * "ip_src_addr": "ip1",
+   * "ip_dst_addr": "ip2",
+   * "source.type": "yaf"
+   * }
+   */
+  @Multiline
+  protected String sampleMessageString;
+
+  /**
+   * {
+   * "enrichments.geo.ip_src_addr": "ip1",
+   * "enrichments.geo.ip_dst_addr": "ip2",
+   * "source.type": "yaf"
+   * }
+   */
+  @Multiline
+  protected String geoMessageString;
+
+  /**
+   * {
+   * "enrichments.host.ip_src_addr": "ip1",
+   * "enrichments.host.ip_dst_addr": "ip2",
+   * "source.type": "yaf"
+   * }
+   */
+  @Multiline
+  protected String hostMessageString;
+
+  /**
+   * {
+   * "enrichments.hbaseEnrichment.ip_src_addr": "ip1",
+   * "enrichments.hbaseEnrichment.ip_dst_addr": "ip2",
+   * "source.type": "yaf"
+   * }
+   */
+  @Multiline
+  protected String hbaseEnrichmentMessageString;
+
+  protected JSONObject sampleMessage;
+  protected JSONObject geoMessage;
+  protected JSONObject hostMessage;
+  protected JSONObject hbaseEnrichmentMessage;
+
+  @Before
+  public void parseBaseMessages() throws ParseException {
+    JSONParser parser = new JSONParser();
+    sampleMessage = (JSONObject) parser.parse(sampleMessageString);
+    geoMessage = (JSONObject) parser.parse(geoMessageString);
+    hostMessage = (JSONObject) parser.parse(hostMessageString);
+    hbaseEnrichmentMessage = (JSONObject) parser.parse(hbaseEnrichmentMessageString);
+    streamIds.add("geo");
+    streamIds.add("host");
+    streamIds.add("hbaseEnrichment");
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/bolt/PrintingBolt.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/bolt/PrintingBolt.java b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/bolt/PrintingBolt.java
new file mode 100644
index 0000000..83681c7
--- /dev/null
+++ b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/bolt/PrintingBolt.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.test.bolt;
+
+import java.util.Map;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.topology.base.BaseRichBolt;
+import backtype.storm.tuple.Tuple;
+
+@SuppressWarnings("serial")
+public class PrintingBolt extends BaseRichBolt {
+
+	@SuppressWarnings("rawtypes")
+	public void prepare(Map stormConf, TopologyContext context,
+			OutputCollector collector) {
+		// TODO Auto-generated method stub
+		
+	}
+
+	public void execute(Tuple input) {
+		System.out.println("---------[RECEIVED] " + input);
+		
+	}
+
+	public void declareOutputFields(OutputFieldsDeclarer declarer) {
+		// TODO Auto-generated method stub
+		
+	}
+	
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/converters/BinaryConverters.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/converters/BinaryConverters.java b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/converters/BinaryConverters.java
new file mode 100644
index 0000000..d869b40
--- /dev/null
+++ b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/converters/BinaryConverters.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.test.converters;
+
+public enum BinaryConverters implements IConverter {
+    DEFAULT(new IConverter() {
+
+        public byte[] convert(String s) {
+            return s.getBytes();
+        }
+    })
+    , FROM_HEX_STRING(new HexStringConverter());
+    IConverter _underlying;
+    BinaryConverters(IConverter i) {
+        _underlying = i;
+    }
+
+    public byte[] convert(String s) {
+        return _underlying.convert(s);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/converters/HexStringConverter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/converters/HexStringConverter.java b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/converters/HexStringConverter.java
new file mode 100644
index 0000000..526ffba
--- /dev/null
+++ b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/converters/HexStringConverter.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.test.converters;
+
+public class HexStringConverter implements IConverter {
+    public byte[] convert(String s) {
+        int len = s.length();
+        byte[] data = new byte[len / 2];
+        for (int i = 0; i < len; i += 2) {
+            data[i / 2] = (byte) ((Character.digit(s.charAt(i), 16) << 4)
+                    + Character.digit(s.charAt(i+1), 16));
+        }
+        return data;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/converters/IConverter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/converters/IConverter.java b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/converters/IConverter.java
new file mode 100644
index 0000000..eebf219
--- /dev/null
+++ b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/converters/IConverter.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.test.converters;
+
+public interface IConverter {
+    public byte[] convert(String s);
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/filereaders/FileReader.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/filereaders/FileReader.java b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/filereaders/FileReader.java
new file mode 100644
index 0000000..423e940
--- /dev/null
+++ b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/filereaders/FileReader.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.test.filereaders;
+
+import java.io.*;
+import java.util.LinkedList;
+import java.util.List;
+
+public class FileReader {
+	public List<String> readFromFile(String filename) throws IOException 
+	{
+		
+		System.out.println("Reading stream from " + filename);
+
+		List<String> lines = new LinkedList<String>();
+
+		InputStream stream = null;
+		if(new File(filename).exists()) {
+			stream = new FileInputStream(filename);
+		}
+		else {
+			stream = Thread.currentThread().getContextClassLoader()
+					.getResourceAsStream(filename);
+		}
+		DataInputStream in = new DataInputStream(stream);
+		BufferedReader br = new BufferedReader(new InputStreamReader(in));
+		String strLine;
+		while ((strLine = br.readLine()) != null) 
+		{
+			//System.out.println("-----------------I READ: " + strLine);
+			lines.add(strLine);
+		}
+		//br.close();
+
+		return lines;
+
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/mock/MockHTable.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/mock/MockHTable.java b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/mock/MockHTable.java
new file mode 100644
index 0000000..f9f764e
--- /dev/null
+++ b/metron-platform/metron-test-utilities/src/main/java/org/apache/metron/test/mock/MockHTable.java
@@ -0,0 +1,672 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.test.mock;
+
+
+import com.google.protobuf.Descriptors;
+import com.google.protobuf.Message;
+import com.google.protobuf.Service;
+import com.google.protobuf.ServiceException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.client.coprocessor.Batch;
+import org.apache.hadoop.hbase.filter.CompareFilter;
+import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.*;
+
+/**
+ * MockHTable.
+ *
+ * This implementation is a selected excerpt from https://gist.github.com/agaoglu/613217
+ */
+public class MockHTable implements HTableInterface {
+
+    public static class Provider implements Serializable {
+        private static Map<String, HTableInterface> _cache = new HashMap<>();
+        public HTableInterface getTable(Configuration config, String tableName) throws IOException {
+            return _cache.get(tableName);
+        }
+        public static HTableInterface getFromCache(String tableName) {
+            return _cache.get(tableName);
+        }
+        public static HTableInterface addToCache(String tableName, String... columnFamilies) {
+            MockHTable ret =  new MockHTable(tableName, columnFamilies);
+            _cache.put(tableName, ret);
+            return ret;
+        }
+
+        public static void clear() {
+            _cache.clear();
+        }
+    }
+
+    private final String tableName;
+    private final List<String> columnFamilies = new ArrayList<>();
+    private HColumnDescriptor[] descriptors;
+
+    private NavigableMap<byte[], NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>> data
+            = new TreeMap<>(Bytes.BYTES_COMPARATOR);
+
+    private static List<KeyValue> toKeyValue(byte[] row, NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowdata, int maxVersions) {
+        return toKeyValue(row, rowdata, 0, Long.MAX_VALUE, maxVersions);
+    }
+
+    private static List<KeyValue> toKeyValue(byte[] row, NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowdata, long timestampStart, long timestampEnd, int maxVersions) {
+        List<KeyValue> ret = new ArrayList<KeyValue>();
+        for (byte[] family : rowdata.keySet())
+            for (byte[] qualifier : rowdata.get(family).keySet()) {
+                int versionsAdded = 0;
+                for (Map.Entry<Long, byte[]> tsToVal : rowdata.get(family).get(qualifier).descendingMap().entrySet()) {
+                    if (versionsAdded++ == maxVersions)
+                        break;
+                    Long timestamp = tsToVal.getKey();
+                    if (timestamp < timestampStart)
+                        continue;
+                    if (timestamp > timestampEnd)
+                        continue;
+                    byte[] value = tsToVal.getValue();
+                    ret.add(new KeyValue(row, family, qualifier, timestamp, value));
+                }
+            }
+        return ret;
+    }
+    public MockHTable(String tableName) {
+        this.tableName = tableName;
+    }
+
+    public MockHTable(String tableName, String... columnFamilies) {
+        this.tableName = tableName;
+        for(String cf : columnFamilies) {
+            addColumnFamily(cf);
+        }
+    }
+
+    public void addColumnFamily(String columnFamily) {
+        this.columnFamilies.add(columnFamily);
+        descriptors = new HColumnDescriptor[columnFamilies.size()];
+        int i = 0;
+        for(String cf : columnFamilies) {
+            descriptors[i++] = new HColumnDescriptor(cf);
+        }
+    }
+
+
+    @Override
+    public byte[] getTableName() {
+        return Bytes.toBytes(tableName);
+    }
+
+    @Override
+    public TableName getName() {
+        return TableName.valueOf(tableName);
+    }
+
+    @Override
+    public Configuration getConfiguration() {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public HTableDescriptor getTableDescriptor() throws IOException {
+        HTableDescriptor ret = new HTableDescriptor(tableName);
+        for(HColumnDescriptor c : descriptors) {
+            ret.addFamily(c);
+        }
+        return ret;
+    }
+
+    @Override
+    public boolean exists(Get get) throws IOException {
+        if(get.getFamilyMap() == null || get.getFamilyMap().size() == 0) {
+            return data.containsKey(get.getRow());
+        } else {
+            byte[] row = get.getRow();
+            if(!data.containsKey(row)) {
+                return false;
+            }
+            for(byte[] family : get.getFamilyMap().keySet()) {
+                if(!data.get(row).containsKey(family)) {
+                    return false;
+                } else {
+                    return true;
+                }
+            }
+            return true;
+        }
+    }
+
+    /**
+     * Test for the existence of columns in the table, as specified by the Gets.
+     * <p/>
+     * <p/>
+     * This will return an array of booleans. Each value will be true if the related Get matches
+     * one or more keys, false if not.
+     * <p/>
+     * <p/>
+     * This is a server-side call so it prevents any data from being transferred to
+     * the client.
+     *
+     * @param gets the Gets
+     * @return Array of boolean.  True if the specified Get matches one or more keys, false if not.
+     * @throws IOException e
+     */
+    @Override
+    public boolean[] existsAll(List<Get> gets) throws IOException {
+        boolean[] ret = new boolean[gets.size()];
+        int i = 0;
+        for(boolean b : exists(gets)) {
+           ret[i++] = b;
+        }
+        return ret;
+    }
+
+    @Override
+    public Boolean[] exists(List<Get> list) throws IOException {
+        Boolean[] ret = new Boolean[list.size()];
+        int i = 0;
+        for(Get g : list) {
+           ret[i++] = exists(g);
+        }
+        return ret;
+    }
+
+    @Override
+    public void batch(List<? extends Row> list, Object[] objects) throws IOException, InterruptedException {
+        throw new UnsupportedOperationException();
+
+    }
+
+    /**
+     * @param actions
+     * @deprecated
+     */
+    @Override
+    public Object[] batch(List<? extends Row> actions) throws IOException, InterruptedException {
+        List<Result> results = new ArrayList<Result>();
+        for (Row r : actions) {
+            if (r instanceof Delete) {
+                delete((Delete) r);
+                continue;
+            }
+            if (r instanceof Put) {
+                put((Put) r);
+                continue;
+            }
+            if (r instanceof Get) {
+                results.add(get((Get) r));
+            }
+        }
+        return results.toArray();
+    }
+
+    @Override
+    public <R> void batchCallback(List<? extends Row> list, Object[] objects, Batch.Callback<R> callback) throws IOException, InterruptedException {
+        throw new UnsupportedOperationException();
+
+    }
+
+    /**
+     * @param list
+     * @param callback
+     * @deprecated
+     */
+    @Override
+    public <R> Object[] batchCallback(List<? extends Row> list, Batch.Callback<R> callback) throws IOException, InterruptedException {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public Result get(Get get) throws IOException {
+        if (!data.containsKey(get.getRow()))
+            return new Result();
+        byte[] row = get.getRow();
+        List<KeyValue> kvs = new ArrayList<KeyValue>();
+        if (!get.hasFamilies()) {
+            kvs = toKeyValue(row, data.get(row), get.getMaxVersions());
+        } else {
+            for (byte[] family : get.getFamilyMap().keySet()){
+                if (data.get(row).get(family) == null)
+                    continue;
+                NavigableSet<byte[]> qualifiers = get.getFamilyMap().get(family);
+                if (qualifiers == null || qualifiers.isEmpty())
+                    qualifiers = data.get(row).get(family).navigableKeySet();
+                for (byte[] qualifier : qualifiers){
+                    if (qualifier == null)
+                        qualifier = "".getBytes();
+                    if (!data.get(row).containsKey(family) ||
+                            !data.get(row).get(family).containsKey(qualifier) ||
+                            data.get(row).get(family).get(qualifier).isEmpty())
+                        continue;
+                    Map.Entry<Long, byte[]> timestampAndValue = data.get(row).get(family).get(qualifier).lastEntry();
+                    kvs.add(new KeyValue(row,family, qualifier, timestampAndValue.getKey(), timestampAndValue.getValue()));
+                }
+            }
+        }
+        Filter filter = get.getFilter();
+        if (filter != null) {
+            filter.reset();
+            List<KeyValue> nkvs = new ArrayList<KeyValue>(kvs.size());
+            for (KeyValue kv : kvs) {
+                if (filter.filterAllRemaining()) {
+                    break;
+                }
+                if (filter.filterRowKey(kv.getBuffer(), kv.getRowOffset(), kv.getRowLength())) {
+                    continue;
+                }
+                if (filter.filterKeyValue(kv) == Filter.ReturnCode.INCLUDE) {
+                    nkvs.add(kv);
+                }
+                // ignoring next key hint which is a optimization to reduce file system IO
+            }
+            if (filter.hasFilterRow()) {
+                filter.filterRow();
+            }
+            kvs = nkvs;
+        }
+
+        return new Result(kvs);
+    }
+
+    @Override
+    public Result[] get(List<Get> list) throws IOException {
+        Result[] ret = new Result[list.size()];
+        int i = 0;
+        for(Get g : list) {
+            ret[i++] = get(g);
+        }
+        return ret;
+    }
+
+    /**
+     * @param bytes
+     * @param bytes1
+     * @deprecated
+     */
+    @Override
+    public Result getRowOrBefore(byte[] bytes, byte[] bytes1) throws IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public ResultScanner getScanner(Scan scan) throws IOException {
+        final List<Result> ret = new ArrayList<Result>();
+        byte[] st = scan.getStartRow();
+        byte[] sp = scan.getStopRow();
+        Filter filter = scan.getFilter();
+
+        for (byte[] row : data.keySet()){
+            // if row is equal to startRow emit it. When startRow (inclusive) and
+            // stopRow (exclusive) is the same, it should not be excluded which would
+            // happen w/o this control.
+            if (st != null && st.length > 0 &&
+                    Bytes.BYTES_COMPARATOR.compare(st, row) != 0) {
+                // if row is before startRow do not emit, pass to next row
+                if (st != null && st.length > 0 &&
+                        Bytes.BYTES_COMPARATOR.compare(st, row) > 0)
+                    continue;
+                // if row is equal to stopRow or after it do not emit, stop iteration
+                if (sp != null && sp.length > 0 &&
+                        Bytes.BYTES_COMPARATOR.compare(sp, row) <= 0)
+                    break;
+            }
+
+            List<KeyValue> kvs = null;
+            if (!scan.hasFamilies()) {
+                kvs = toKeyValue(row, data.get(row), scan.getTimeRange().getMin(), scan.getTimeRange().getMax(), scan.getMaxVersions());
+            } else {
+                kvs = new ArrayList<KeyValue>();
+                for (byte[] family : scan.getFamilyMap().keySet()){
+                    if (data.get(row).get(family) == null)
+                        continue;
+                    NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(family);
+                    if (qualifiers == null || qualifiers.isEmpty())
+                        qualifiers = data.get(row).get(family).navigableKeySet();
+                    for (byte[] qualifier : qualifiers){
+                        if (data.get(row).get(family).get(qualifier) == null)
+                            continue;
+                        for (Long timestamp : data.get(row).get(family).get(qualifier).descendingKeySet()){
+                            if (timestamp < scan.getTimeRange().getMin())
+                                continue;
+                            if (timestamp > scan.getTimeRange().getMax())
+                                continue;
+                            byte[] value = data.get(row).get(family).get(qualifier).get(timestamp);
+                            kvs.add(new KeyValue(row, family, qualifier, timestamp, value));
+                            if(kvs.size() == scan.getMaxVersions()) {
+                                break;
+                            }
+                        }
+                    }
+                }
+            }
+            if (filter != null) {
+                filter.reset();
+                List<KeyValue> nkvs = new ArrayList<KeyValue>(kvs.size());
+                for (KeyValue kv : kvs) {
+                    if (filter.filterAllRemaining()) {
+                        break;
+                    }
+                    if (filter.filterRowKey(kv.getBuffer(), kv.getRowOffset(), kv.getRowLength())) {
+                        continue;
+                    }
+                    Filter.ReturnCode filterResult = filter.filterKeyValue(kv);
+                    if (filterResult == Filter.ReturnCode.INCLUDE) {
+                        nkvs.add(kv);
+                    } else if (filterResult == Filter.ReturnCode.NEXT_ROW) {
+                        break;
+                    }
+                    // ignoring next key hint which is a optimization to reduce file system IO
+                }
+                if (filter.hasFilterRow()) {
+                    filter.filterRow();
+                }
+                kvs = nkvs;
+            }
+            if (!kvs.isEmpty()) {
+                ret.add(new Result(kvs));
+            }
+        }
+
+        return new ResultScanner() {
+            private final Iterator<Result> iterator = ret.iterator();
+            public Iterator<Result> iterator() {
+                return iterator;
+            }
+            public Result[] next(int nbRows) throws IOException {
+                ArrayList<Result> resultSets = new ArrayList<Result>(nbRows);
+                for(int i = 0; i < nbRows; i++) {
+                    Result next = next();
+                    if (next != null) {
+                        resultSets.add(next);
+                    } else {
+                        break;
+                    }
+                }
+                return resultSets.toArray(new Result[resultSets.size()]);
+            }
+            public Result next() throws IOException {
+                try {
+                    return iterator().next();
+                } catch (NoSuchElementException e) {
+                    return null;
+                }
+            }
+            public void close() {}
+        };
+    }
+    @Override
+    public ResultScanner getScanner(byte[] family) throws IOException {
+        Scan scan = new Scan();
+        scan.addFamily(family);
+        return getScanner(scan);
+    }
+
+    @Override
+    public ResultScanner getScanner(byte[] family, byte[] qualifier)
+            throws IOException {
+        Scan scan = new Scan();
+        scan.addColumn(family, qualifier);
+        return getScanner(scan);
+    }
+
+    List<Put> putLog = new ArrayList<>();
+
+    public List<Put> getPutLog() {
+        return putLog;
+    }
+
+    @Override
+    public void put(Put put) throws IOException {
+        putLog.add(put);
+        byte[] row = put.getRow();
+        NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowData = forceFind(data, row, new TreeMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>(Bytes.BYTES_COMPARATOR));
+        for (byte[] family : put.getFamilyMap().keySet()){
+            NavigableMap<byte[], NavigableMap<Long, byte[]>> familyData = forceFind(rowData, family, new TreeMap<byte[], NavigableMap<Long, byte[]>>(Bytes.BYTES_COMPARATOR));
+            for (KeyValue kv : put.getFamilyMap().get(family)){
+                kv.updateLatestStamp(Bytes.toBytes(System.currentTimeMillis()));
+                byte[] qualifier = kv.getQualifier();
+                NavigableMap<Long, byte[]> qualifierData = forceFind(familyData, qualifier, new TreeMap<Long, byte[]>());
+                qualifierData.put(kv.getTimestamp(), kv.getValue());
+            }
+        }
+    }
+
+    /**
+     * Helper method to find a key in a map. If key is not found, newObject is
+     * added to map and returned
+     *
+     * @param map
+     *          map to extract value from
+     * @param key
+     *          key to look for
+     * @param newObject
+     *          set key to this if not found
+     * @return found value or newObject if not found
+     */
+    private <K, V> V forceFind(NavigableMap<K, V> map, K key, V newObject){
+        V data = map.get(key);
+        if (data == null){
+            data = newObject;
+            map.put(key, data);
+        }
+        return data;
+    }
+
+    @Override
+    public void put(List<Put> puts) throws IOException {
+        for (Put put : puts)
+            put(put);
+    }
+
+    @Override
+    public boolean checkAndPut(byte[] bytes, byte[] bytes1, byte[] bytes2, byte[] bytes3, Put put) throws IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * Atomically checks if a row/family/qualifier value matches the expected
+     * value. If it does, it adds the put.  If the passed value is null, the check
+     * is for the lack of column (ie: non-existance)
+     *
+     * @param row       to check
+     * @param family    column family to check
+     * @param qualifier column qualifier to check
+     * @param compareOp comparison operator to use
+     * @param value     the expected value
+     * @param put       data to put if check succeeds
+     * @return true if the new put was executed, false otherwise
+     * @throws IOException e
+     */
+    @Override
+    public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, CompareFilter.CompareOp compareOp, byte[] value, Put put) throws IOException {
+        return false;
+    }
+
+    @Override
+    public void delete(Delete delete) throws IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public void delete(List<Delete> list) throws IOException {
+        throw new UnsupportedOperationException();
+
+    }
+
+    @Override
+    public boolean checkAndDelete(byte[] bytes, byte[] bytes1, byte[] bytes2, byte[] bytes3, Delete delete) throws IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * Atomically checks if a row/family/qualifier value matches the expected
+     * value. If it does, it adds the delete.  If the passed value is null, the
+     * check is for the lack of column (ie: non-existance)
+     *
+     * @param row       to check
+     * @param family    column family to check
+     * @param qualifier column qualifier to check
+     * @param compareOp comparison operator to use
+     * @param value     the expected value
+     * @param delete    data to delete if check succeeds
+     * @return true if the new delete was executed, false otherwise
+     * @throws IOException e
+     */
+    @Override
+    public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, CompareFilter.CompareOp compareOp, byte[] value, Delete delete) throws IOException {
+        return false;
+    }
+
+    @Override
+    public void mutateRow(RowMutations rowMutations) throws IOException {
+        throw new UnsupportedOperationException();
+
+    }
+
+    @Override
+    public Result append(Append append) throws IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public Result increment(Increment increment) throws IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public long incrementColumnValue(byte[] bytes, byte[] bytes1, byte[] bytes2, long l) throws IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public long incrementColumnValue(byte[] bytes, byte[] bytes1, byte[] bytes2, long l, Durability durability) throws IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * @param bytes
+     * @param bytes1
+     * @param bytes2
+     * @param l
+     * @param b
+     * @deprecated
+     */
+    @Override
+    public long incrementColumnValue(byte[] bytes, byte[] bytes1, byte[] bytes2, long l, boolean b) throws IOException {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public boolean isAutoFlush() {
+        return autoflush;
+    }
+
+    @Override
+    public void flushCommits() throws IOException {
+
+    }
+
+    @Override
+    public void close() throws IOException {
+
+    }
+
+    @Override
+    public CoprocessorRpcChannel coprocessorService(byte[] bytes) {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public <T extends Service, R> Map<byte[], R> coprocessorService(Class<T> aClass, byte[] bytes, byte[] bytes1, Batch.Call<T, R> call) throws ServiceException, Throwable {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public <T extends Service, R> void coprocessorService(Class<T> aClass, byte[] bytes, byte[] bytes1, Batch.Call<T, R> call, Batch.Callback<R> callback) throws ServiceException, Throwable {
+        throw new UnsupportedOperationException();
+    }
+
+    boolean autoflush = true;
+
+    /**
+     * @param b
+     * @deprecated
+     */
+    @Override
+    public void setAutoFlush(boolean b) {
+        autoflush = b;
+    }
+
+    @Override
+    public void setAutoFlush(boolean b, boolean b1) {
+        autoflush = b;
+    }
+
+    @Override
+    public void setAutoFlushTo(boolean b) {
+        autoflush = b;
+    }
+
+    long writeBufferSize = 0;
+    @Override
+    public long getWriteBufferSize() {
+        return writeBufferSize;
+    }
+
+    @Override
+    public void setWriteBufferSize(long l) throws IOException {
+        writeBufferSize = l;
+    }
+
+    @Override
+    public <R extends Message> Map<byte[], R> batchCoprocessorService(Descriptors.MethodDescriptor methodDescriptor, Message message, byte[] bytes, byte[] bytes1, R r) throws ServiceException, Throwable {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public <R extends Message> void batchCoprocessorService(Descriptors.MethodDescriptor methodDescriptor, Message message, byte[] bytes, byte[] bytes1, R r, Batch.Callback<R> callback) throws ServiceException, Throwable {
+        throw new UnsupportedOperationException();
+    }
+
+    /**
+     * Atomically checks if a row/family/qualifier value matches the expected value.
+     * If it does, it performs the row mutations.  If the passed value is null, the check
+     * is for the lack of column (ie: non-existence)
+     *
+     * @param row       to check
+     * @param family    column family to check
+     * @param qualifier column qualifier to check
+     * @param compareOp the comparison operator
+     * @param value     the expected value
+     * @param mutation  mutations to perform if check succeeds
+     * @return true if the new put was executed, false otherwise
+     * @throws IOException e
+     */
+    @Override
+    public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareFilter.CompareOp compareOp, byte[] value, RowMutations mutation) throws IOException {
+        return false;
+    }
+}



[41/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/pycapa/tasks/dependencies.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/pycapa/tasks/dependencies.yml b/metron-deployment/roles/pycapa/tasks/dependencies.yml
new file mode 100644
index 0000000..19efdcd
--- /dev/null
+++ b/metron-deployment/roles/pycapa/tasks/dependencies.yml
@@ -0,0 +1,34 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Install epel-release repository
+  yum: name=epel-release
+
+- name: Install python and tcpdump
+  yum:
+    name: "{{item}}"
+  with_items:
+    - python
+    - tcpdump
+    - git
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10
+
+- name: Install pip
+  easy_install: name=pip

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/pycapa/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/pycapa/tasks/main.yml b/metron-deployment/roles/pycapa/tasks/main.yml
new file mode 100644
index 0000000..76bdc1c
--- /dev/null
+++ b/metron-deployment/roles/pycapa/tasks/main.yml
@@ -0,0 +1,19 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- include: dependencies.yml
+- include: pycapa.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/pycapa/tasks/pycapa.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/pycapa/tasks/pycapa.yml b/metron-deployment/roles/pycapa/tasks/pycapa.yml
new file mode 100644
index 0000000..4d1a64e
--- /dev/null
+++ b/metron-deployment/roles/pycapa/tasks/pycapa.yml
@@ -0,0 +1,37 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Clone pycapa repo
+  git: repo={{ pycapa_repo }} dest={{ pycapa_home }}
+
+- name: Build pycapa
+  shell: "{{ item }}"
+  args:
+    chdir: "{{ pycapa_home }}"
+  with_items:
+    - pip install -r requirements.txt
+    - pip install argparse
+    - python setup.py install
+
+- name: Turn on promiscuous mode for {{ pycapa_sniff_interface }}
+  shell: "ip link set {{ pycapa_sniff_interface }} promisc on"
+
+- name: Install service script
+  template: src=pycapa dest=/etc/init.d/pycapa mode=0755
+
+- name: Start pycapa
+  service: name=pycapa state=restarted

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/pycapa/templates/pycapa
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/pycapa/templates/pycapa b/metron-deployment/roles/pycapa/templates/pycapa
new file mode 100644
index 0000000..fffa13f
--- /dev/null
+++ b/metron-deployment/roles/pycapa/templates/pycapa
@@ -0,0 +1,84 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# metron pycapa service
+# chkconfig: 345 20 80
+# description: Metron Pycapa Packet Capture Daemon
+# processname: pycapa
+#
+NAME=pycapa
+DESC="Metron Pycapa Packet Capture"
+PIDFILE=/var/run/$NAME.pid
+SCRIPTNAME=/etc/init.d/$NAME
+LOGFILE="{{ pycapa_log }}"
+EXTRA_ARGS="${@:2}"
+DAEMON_PATH="{{ pycapa_home }}"
+DAEMON="/usr/bin/python"
+DAEMONOPTS="{{ pycapa_main }} -z {{ zookeeper_url }} -t {{ pycapa_topic }} -i {{ pycapa_sniff_interface }}"
+
+case "$1" in
+  start)
+    printf "%-50s" "Starting $NAME..."
+
+    # kick-off the daemon
+    cd $DAEMON_PATH
+    PID=`$DAEMON $DAEMONOPTS >> $LOGFILE 2>&1 & echo $!`
+    if [ -z $PID ]; then
+        printf "%s\n" "Fail"
+    else
+        echo $PID > $PIDFILE
+        printf "%s\n" "Ok"
+    fi
+  ;;
+
+  status)
+    printf "%-50s" "Checking $NAME..."
+    if [ -f $PIDFILE ]; then
+      PID=`cat $PIDFILE`
+      if [ -z "`ps axf | grep ${PID} | grep -v grep`" ]; then
+        printf "%s\n" "Process dead but pidfile exists"
+      else
+        echo "Running"
+      fi
+    else
+      printf "%s\n" "Service not running"
+    fi
+  ;;
+
+  stop)
+    printf "%-50s" "Stopping $NAME"
+    PID=`cat $PIDFILE`
+    cd $DAEMON_PATH
+    if [ -f $PIDFILE ]; then
+        kill -HUP $PID
+        printf "%s\n" "Ok"
+        rm -f $PIDFILE
+    else
+        printf "%s\n" "pidfile not found"
+    fi
+  ;;
+
+  restart)
+    $0 stop
+    $0 start
+  ;;
+
+  *)
+    echo "Usage: $0 {status|start|stop|restart}"
+    exit 1
+esac

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/pycapa/vars/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/pycapa/vars/main.yml b/metron-deployment/roles/pycapa/vars/main.yml
new file mode 100644
index 0000000..5618a8e
--- /dev/null
+++ b/metron-deployment/roles/pycapa/vars/main.yml
@@ -0,0 +1,23 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+pycapa_repo: https://github.com/OpenSOC/pycapa.git
+pycapa_home: /usr/local/pycapa
+pycapa_main: "{{ pycapa_home }}/pycapa/pycapa_cli.py"
+pycapa_log: /var/log/pycapa.log
+pycapa_topic: pcap
+pycapa_sniff_interface: "{{ sniff_interface }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/python-pip/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/python-pip/tasks/main.yml b/metron-deployment/roles/python-pip/tasks/main.yml
new file mode 100644
index 0000000..809aca4
--- /dev/null
+++ b/metron-deployment/roles/python-pip/tasks/main.yml
@@ -0,0 +1,25 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Install python-pip
+  yum:
+    name: python-pip
+    state: installed
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/sensor-test-mode/README.md
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/sensor-test-mode/README.md b/metron-deployment/roles/sensor-test-mode/README.md
new file mode 100644
index 0000000..76a3220
--- /dev/null
+++ b/metron-deployment/roles/sensor-test-mode/README.md
@@ -0,0 +1,27 @@
+Sensor Test Mode
+================
+
+A role that configures each of the sensors to produce the maximum amount of telemetry data.  This role is useful only for testing.  It can be useful to support functional, performance, and load testing of Apache Metron.
+
+The role does the following to maximize the amount of telemetry data produced by each Metron sensor.
+
+- Plays a packet capture file through a network interface to simulate live network traffic.
+- Configures [YAF](https://tools.netsa.cert.org/yaf/yaf.html) with `idle-timeout=0`.  This causes a flow record to be produced for every network packet received.
+- Configures [Snort](https://www.snort.org/) to produce an alert for every network packet received.
+
+Getting Started
+---------------
+
+To enable the `sensor-test-mode` role apply the role to the `sensors` host group in your Ansible playbook.
+
+```
+- hosts: sensors
+  roles:
+    - role: sensor-test-mode
+```
+
+The role has also been added to the default `metron_install.yml` playbook so that it can be turned on/off with a property in both the local Virtualbox and the remote EC2 deployments.
+
+```
+sensor_test_mode: True
+```

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/sensor-test-mode/files/example.pcap
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/sensor-test-mode/files/example.pcap b/metron-deployment/roles/sensor-test-mode/files/example.pcap
new file mode 100644
index 0000000..06594ec
Binary files /dev/null and b/metron-deployment/roles/sensor-test-mode/files/example.pcap differ

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/sensor-test-mode/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/sensor-test-mode/meta/main.yml b/metron-deployment/roles/sensor-test-mode/meta/main.yml
new file mode 100644
index 0000000..0e9e5b3
--- /dev/null
+++ b/metron-deployment/roles/sensor-test-mode/meta/main.yml
@@ -0,0 +1,19 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - pcap_replay

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/sensor-test-mode/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/sensor-test-mode/tasks/main.yml b/metron-deployment/roles/sensor-test-mode/tasks/main.yml
new file mode 100644
index 0000000..26b4e2a
--- /dev/null
+++ b/metron-deployment/roles/sensor-test-mode/tasks/main.yml
@@ -0,0 +1,56 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+#
+# load example pcap data to replay
+#
+- name: Install example pcap file
+  copy: src=example.pcap dest={{ pcap_path }}/
+
+- name: Start the pcap-replay service
+  service: name=pcap-replay state=restarted
+
+#
+# configure yaf to generate a flow record for every packet
+#
+- name: Stop running instances of yaf
+  become: True
+  service: name=yaf state=stopped
+
+- name: Configure yaf to generate a flow record for every network packet
+  become: True
+  service: name=yaf state=started args="--idle-timeout 0"
+
+#
+# configure snort to alert on every packet
+#
+- name: Configure snort to use a set of test rules
+  become: True
+  lineinfile:
+    dest: /etc/snort/snort.conf
+    line: "include $RULE_PATH/test.rules"
+
+- name: Create a snort alert for testing that alerts on every packet
+  become: True
+  lineinfile:
+    dest: /etc/snort/rules/test.rules
+    line: "alert tcp any any -> any any (msg:'snort test alert'; sid:999158; )"
+    create: yes
+
+- name: Restart snort
+  become: True
+  service: name=snortd state=restarted

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/snort/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/snort/defaults/main.yml b/metron-deployment/roles/snort/defaults/main.yml
new file mode 100644
index 0000000..6c6c0ea
--- /dev/null
+++ b/metron-deployment/roles/snort/defaults/main.yml
@@ -0,0 +1,25 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+snort_version: 2.9.8.0-1
+daq_version: 2.0.6-1
+snort_topic: snort
+snort_alert_csv_path: /var/log/snort/alert.csv
+snort_src_url: "https://snort.org/downloads/archive/snort/snort-{{ snort_version }}.src.rpm"
+snort_community_rules_url: "https://www.snort.org/downloads/community/community-rules.tar.gz"
+dag_src_url: "https://snort.org/downloads/snort/daq-{{ daq_version }}.src.rpm"
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/snort/files/flume-snort.conf
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/snort/files/flume-snort.conf b/metron-deployment/roles/snort/files/flume-snort.conf
new file mode 100644
index 0000000..7dea516
--- /dev/null
+++ b/metron-deployment/roles/snort/files/flume-snort.conf
@@ -0,0 +1,44 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+snort.sources = exec-source
+snort.channels = memory-channel
+snort.sinks = kafka-sink logger-sink
+
+# snort alerts are logged to a file
+snort.sources.exec-source.type = exec
+snort.sources.exec-source.command = tail -F /var/log/snort/alert
+snort.sources.exec-source.restart = true
+snort.sources.exec-source.logStdErr = true
+
+# snort alerts are sent to kafka
+snort.sinks.kafka-sink.type = org.apache.flume.sink.kafka.KafkaSink
+snort.sinks.kafka-sink.brokerList = localhost:9092
+snort.sinks.kafka-sink.topic = snort
+
+# also log events
+snort.sinks.logger-sink.type = logger
+
+# buffer events in memory
+snort.channels.memory-channel.type = memory
+snort.channels.memory-channel.capacity = 1000
+snort.channels.memory-channel.transactionCapacity = 100
+
+# bind the source and sink to the channel
+snort.sources.exec-source.channels = memory-channel
+snort.sinks.kafka-sink.channel = memory-channel
+snort.sinks.logger-sink.channel = memory-channel

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/snort/files/snort.conf
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/snort/files/snort.conf b/metron-deployment/roles/snort/files/snort.conf
new file mode 100644
index 0000000..8a24e0c
--- /dev/null
+++ b/metron-deployment/roles/snort/files/snort.conf
@@ -0,0 +1,726 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+###################################################
+# This file contains a sample snort configuration.
+# You should take the following steps to create your own custom configuration:
+#
+#  1) Set the network variables.
+#  2) Configure the decoder
+#  3) Configure the base detection engine
+#  4) Configure dynamic loaded libraries
+#  5) Configure preprocessors
+#  6) Configure output plugins
+#  7) Customize your rule set
+#  8) Customize preprocessor and decoder rule set
+#  9) Customize shared object rule set
+###################################################
+
+###################################################
+# Step #1: Set the network variables.  For more information, see README.variables
+###################################################
+
+# Setup the network addresses you are protecting
+ipvar HOME_NET 10.0.0.16
+
+# Set up the external network addresses. Leave as "any" in most situations
+ipvar EXTERNAL_NET any
+
+# List of DNS servers on your network
+ipvar DNS_SERVERS $HOME_NET
+
+# List of SMTP servers on your network
+ipvar SMTP_SERVERS $HOME_NET
+
+# List of web servers on your network
+ipvar HTTP_SERVERS $HOME_NET
+
+# List of sql servers on your network
+ipvar SQL_SERVERS $HOME_NET
+
+# List of telnet servers on your network
+ipvar TELNET_SERVERS $HOME_NET
+
+# List of ssh servers on your network
+ipvar SSH_SERVERS $HOME_NET
+
+# List of ftp servers on your network
+ipvar FTP_SERVERS $HOME_NET
+
+# List of sip servers on your network
+ipvar SIP_SERVERS $HOME_NET
+
+# List of ports you run web servers on
+portvar HTTP_PORTS [36,80,81,82,83,84,85,86,87,88,89,90,311,383,555,591,593,631,801,808,818,901,972,1158,1220,1414,1533,1741,1830,1942,2231,2301,2381,2578,2809,2980,3029,3037,3057,3128,3443,3702,4000,4343,4848,5000,5117,5250,5600,5814,6080,6173,6988,7000,7001,7005,7071,7144,7145,7510,7770,7777,7778,7779,8000,8001,8008,8014,8015,8020,8028,8040,8080,8081,8082,8085,8088,8090,8118,8123,8180,8181,8182,8222,8243,8280,8300,8333,8344,8400,8443,8500,8509,8787,8800,8888,8899,8983,9000,9002,9060,9080,9090,9091,9111,9290,9443,9447,9710,9788,9999,10000,11371,12601,13014,15489,19980,29991,33300,34412,34443,34444,40007,41080,44449,50000,50002,51423,53331,55252,55555,56712]
+
+# List of ports you want to look for SHELLCODE on.
+portvar SHELLCODE_PORTS !80
+
+# List of ports you might see oracle attacks on
+portvar ORACLE_PORTS 1024:
+
+# List of ports you want to look for SSH connections on:
+portvar SSH_PORTS 22
+
+# List of ports you run ftp servers on
+portvar FTP_PORTS [21,2100,3535]
+
+# List of ports you run SIP servers on
+portvar SIP_PORTS [5060,5061,5600]
+
+# List of file data ports for file inspection
+portvar FILE_DATA_PORTS [$HTTP_PORTS,110,143]
+
+# List of GTP ports for GTP preprocessor
+portvar GTP_PORTS [2123,2152,3386]
+
+# other variables, these should not be modified
+ipvar AIM_SERVERS [64.12.24.0/23,64.12.28.0/23,64.12.161.0/24,64.12.163.0/24,64.12.200.0/24,205.188.3.0/24,205.188.5.0/24,205.188.7.0/24,205.188.9.0/24,205.188.153.0/24,205.188.179.0/24,205.188.248.0/24]
+
+# Path to your rules files (this can be a relative path)
+# Note for Windows users:  You are advised to make this an absolute path,
+# such as:  c:\snort\rules
+var RULE_PATH rules
+var SO_RULE_PATH so_rules
+var PREPROC_RULE_PATH preproc_rules
+
+# If you are using reputation preprocessor set these
+var WHITE_LIST_PATH /etc/snort/rules
+var BLACK_LIST_PATH /etc/snort/rules
+
+###################################################
+# Step #2: Configure the decoder.  For more information, see README.decode
+###################################################
+
+# Stop generic decode events:
+config disable_decode_alerts
+
+# Stop Alerts on experimental TCP options
+config disable_tcpopt_experimental_alerts
+
+# Stop Alerts on obsolete TCP options
+config disable_tcpopt_obsolete_alerts
+
+# Stop Alerts on T/TCP alerts
+config disable_tcpopt_ttcp_alerts
+
+# Stop Alerts on all other TCPOption type events:
+config disable_tcpopt_alerts
+
+# Stop Alerts on invalid ip options
+config disable_ipopt_alerts
+
+# Alert if value in length field (IP, TCP, UDP) is greater th elength of the packet
+# config enable_decode_oversized_alerts
+
+# Same as above, but drop packet if in Inline mode (requires enable_decode_oversized_alerts)
+# config enable_decode_oversized_drops
+
+# Configure IP / TCP checksum mode
+config checksum_mode: all
+
+# Configure maximum number of flowbit references.  For more information, see README.flowbits
+# config flowbits_size: 64
+
+# Configure ports to ignore
+# config ignore_ports: tcp 21 6667:6671 1356
+# config ignore_ports: udp 1:17 53
+
+# Configure active response for non inline operation. For more information, see REAMDE.active
+# config response: eth0 attempts 2
+
+# Configure DAQ related options for inline operation. For more information, see README.daq
+#
+# config daq: <type>
+# config daq_dir: <dir>
+# config daq_mode: <mode>
+# config daq_var: <var>
+#
+# <type> ::= pcap | afpacket | dump | nfq | ipq | ipfw
+# <mode> ::= read-file | passive | inline
+# <var> ::= arbitrary <name>=<value passed to DAQ
+# <dir> ::= path as to where to look for DAQ module so's
+
+# Configure specific UID and GID to run snort as after dropping privs. For more information see snort -h command line options
+#
+# config set_gid:
+# config set_uid:
+
+# Configure default snaplen. Snort defaults to MTU of in use interface. For more information see README
+#
+# config snaplen:
+#
+
+# Configure default bpf_file to use for filtering what traffic reaches snort. For more information see snort -h command line options (-F)
+#
+# config bpf_file:
+#
+
+# Configure default log directory for snort to log to.  For more information see snort -h command line options (-l)
+#
+# config logdir:
+
+
+###################################################
+# Step #3: Configure the base detection engine.  For more information, see  README.decode
+###################################################
+
+# Configure PCRE match limitations
+config pcre_match_limit: 3500
+config pcre_match_limit_recursion: 1500
+
+# Configure the detection engine  See the Snort Manual, Configuring Snort - Includes - Config
+config detection: search-method ac-split search-optimize max-pattern-len 20
+
+# Configure the event queue.  For more information, see README.event_queue
+config event_queue: max_queue 8 log 5 order_events content_length
+
+###################################################
+## Configure GTP if it is to be used.
+## For more information, see README.GTP
+####################################################
+
+# config enable_gtp
+
+###################################################
+# Per packet and rule latency enforcement
+# For more information see README.ppm
+###################################################
+
+# Per Packet latency configuration
+#config ppm: max-pkt-time 250, \
+#   fastpath-expensive-packets, \
+#   pkt-log
+
+# Per Rule latency configuration
+#config ppm: max-rule-time 200, \
+#   threshold 3, \
+#   suspend-expensive-rules, \
+#   suspend-timeout 20, \
+#   rule-log alert
+
+###################################################
+# Configure Perf Profiling for debugging
+# For more information see README.PerfProfiling
+###################################################
+
+#config profile_rules: print all, sort avg_ticks
+#config profile_preprocs: print all, sort avg_ticks
+
+###################################################
+# Configure protocol aware flushing
+# For more information see README.stream5
+###################################################
+config paf_max: 16000
+
+###################################################
+# Step #4: Configure dynamic loaded libraries.
+# For more information, see Snort Manual, Configuring Snort - Dynamic Modules
+###################################################
+
+# path to dynamic preprocessor libraries
+dynamicpreprocessor directory /usr/lib64/snort-2.9.8.0_dynamicpreprocessor
+
+# path to base preprocessor engine
+dynamicengine /usr/lib64/snort-2.9.8.0_dynamicengine/libsf_engine.so
+
+# path to dynamic rules libraries
+#dynamicdetection directory /usr/local/lib/snort_dynamicrules
+
+###################################################
+# Step #5: Configure preprocessors
+# For more information, see the Snort Manual, Configuring Snort - Preprocessors
+###################################################
+
+# GTP Control Channle Preprocessor. For more information, see README.GTP
+# preprocessor gtp: ports { 2123 3386 2152 }
+
+# Inline packet normalization. For more information, see README.normalize
+# Does nothing in IDS mode
+preprocessor normalize_ip4
+preprocessor normalize_tcp: ips ecn stream
+preprocessor normalize_icmp4
+preprocessor normalize_ip6
+preprocessor normalize_icmp6
+
+# Target-based IP defragmentation.  For more inforation, see README.frag3
+preprocessor frag3_global: max_frags 65536
+preprocessor frag3_engine: policy windows detect_anomalies overlap_limit 10 min_fragment_length 100 timeout 180
+
+# Target-Based stateful inspection/stream reassembly.  For more inforation, see README.stream5
+preprocessor stream5_global: track_tcp yes, \
+   track_udp yes, \
+   track_icmp no, \
+   max_tcp 262144, \
+   max_udp 131072, \
+   max_active_responses 2, \
+   min_response_seconds 5
+preprocessor stream5_tcp: policy windows, detect_anomalies, require_3whs 180, \
+   overlap_limit 10, small_segments 3 bytes 150, timeout 180, \
+    ports client 21 22 23 25 42 53 70 79 109 110 111 113 119 135 136 137 139 143 \
+        161 445 513 514 587 593 691 1433 1521 1741 2100 3306 6070 6665 6666 6667 6668 6669 \
+        7000 8181 32770 32771 32772 32773 32774 32775 32776 32777 32778 32779, \
+    ports both 36 80 81 82 83 84 85 86 87 88 89 90 110 311 383 443 465 563 555 591 593 631 636 801 808 818 901 972 989 992 993 994 995 1158 1220 1414 1533 1741 1830 1942 2231 2301 2381 2578 2809 2980 3029 3037 3057 3128 3443 3702 4000 4343 4848 5000 5117 5250 5600 5814 6080 6173 6988 7907 7000 7001 7005 7071 7144 7145 7510 7802 7770 7777 7778 7779 \
+        7801 7900 7901 7902 7903 7904 7905 7906 7908 7909 7910 7911 7912 7913 7914 7915 7916 \
+        7917 7918 7919 7920 8000 8001 8008 8014 8015 8020 8028 8040 8080 8081 8082 8085 8088 8090 8118 8123 8180 8181 8182 8222 8243 8280 8300 8333 8344 8400 8443 8500 8509 8787 8800 8888 8899 8983 9000 9002 9060 9080 9090 9091 9111 9290 9443 9447 9710 9788 9999 10000 11371 12601 13014 15489 19980 29991 33300 34412 34443 34444 40007 41080 44449 50000 50002 51423 53331 55252 55555 56712
+preprocessor stream5_udp: timeout 180
+
+# performance statistics.  For more information, see the Snort Manual, Configuring Snort - Preprocessors - Performance Monitor
+# preprocessor perfmonitor: time 300 file /var/snort/snort.stats pktcnt 10000
+
+# HTTP normalization and anomaly detection.  For more information, see README.http_inspect
+preprocessor http_inspect: global iis_unicode_map unicode.map 1252 compress_depth 65535 decompress_depth 65535
+preprocessor http_inspect_server: server default \
+    http_methods { GET POST PUT SEARCH MKCOL COPY MOVE LOCK UNLOCK NOTIFY POLL BCOPY BDELETE BMOVE LINK UNLINK OPTIONS HEAD DELETE TRACE TRACK CONNECT SOURCE SUBSCRIBE UNSUBSCRIBE PROPFIND PROPPATCH BPROPFIND BPROPPATCH RPC_CONNECT PROXY_SUCCESS BITS_POST CCM_POST SMS_POST RPC_IN_DATA RPC_OUT_DATA RPC_ECHO_DATA } \
+    chunk_length 500000 \
+    server_flow_depth 0 \
+    client_flow_depth 0 \
+    post_depth 65495 \
+    oversize_dir_length 500 \
+    max_header_length 750 \
+    max_headers 100 \
+    max_spaces 200 \
+    small_chunk_length { 10 5 } \
+    ports { 36 80 81 82 83 84 85 86 87 88 89 90 311 383 555 591 593 631 801 808 818 901 972 1158 1220 1414 1533 1741 1830 1942 2231 2301 2381 2578 2809 2980 3029 3037 3057 3128 3443 3702 4000 4343 4848 5000 5117 5250 5600 5814 6080 6173 6988 7000 7001 7005 7071 7144 7145 7510 7770 7777 7778 7779 8000 8001 8008 8014 8015 8020 8028 8040 8080 8081 8082 8085 8088 8090 8118 8123 8180 8181 8182 8222 8243 8280 8300 8333 8344 8400 8443 8500 8509 8787 8800 8888 8899 8983 9000 9002 9060 9080 9090 9091 9111 9290 9443 9447 9710 9788 9999 10000 11371 12601 13014 15489 19980 29991 33300 34412 34443 34444 40007 41080 44449 50000 50002 51423 53331 55252 55555 56712 } \
+    non_rfc_char { 0x00 0x01 0x02 0x03 0x04 0x05 0x06 0x07 } \
+    enable_cookie \
+    extended_response_inspection \
+    inspect_gzip \
+    normalize_utf \
+    unlimited_decompress \
+    normalize_javascript \
+    apache_whitespace no \
+    ascii no \
+    bare_byte no \
+    directory no \
+    double_decode no \
+    iis_backslash no \
+    iis_delimiter no \
+    iis_unicode no \
+    multi_slash no \
+    utf_8 no \
+    u_encode yes \
+    webroot no
+
+# ONC-RPC normalization and anomaly detection.  For more information, see the Snort Manual, Configuring Snort - Preprocessors - RPC Decode
+preprocessor rpc_decode: 111 32770 32771 32772 32773 32774 32775 32776 32777 32778 32779 no_alert_multiple_requests no_alert_large_fragments no_alert_incomplete
+
+# Back Orifice detection.
+preprocessor bo
+
+# FTP / Telnet normalization and anomaly detection.  For more information, see README.ftptelnet
+preprocessor ftp_telnet: global inspection_type stateful encrypted_traffic no check_encrypted
+preprocessor ftp_telnet_protocol: telnet \
+    ayt_attack_thresh 20 \
+    normalize ports { 23 } \
+    detect_anomalies
+preprocessor ftp_telnet_protocol: ftp server default \
+    def_max_param_len 100 \
+    ports { 21 2100 3535 } \
+    telnet_cmds yes \
+    ignore_telnet_erase_cmds yes \
+    ftp_cmds { ABOR ACCT ADAT ALLO APPE AUTH CCC CDUP } \
+    ftp_cmds { CEL CLNT CMD CONF CWD DELE ENC EPRT } \
+    ftp_cmds { EPSV ESTA ESTP FEAT HELP LANG LIST LPRT } \
+    ftp_cmds { LPSV MACB MAIL MDTM MIC MKD MLSD MLST } \
+    ftp_cmds { MODE NLST NOOP OPTS PASS PASV PBSZ PORT } \
+    ftp_cmds { PROT PWD QUIT REIN REST RETR RMD RNFR } \
+    ftp_cmds { RNTO SDUP SITE SIZE SMNT STAT STOR STOU } \
+    ftp_cmds { STRU SYST TEST TYPE USER XCUP XCRC XCWD } \
+    ftp_cmds { XMAS XMD5 XMKD XPWD XRCP XRMD XRSQ XSEM } \
+    ftp_cmds { XSEN XSHA1 XSHA256 } \
+    alt_max_param_len 0 { ABOR CCC CDUP ESTA FEAT LPSV NOOP PASV PWD QUIT REIN STOU SYST XCUP XPWD } \
+    alt_max_param_len 200 { ALLO APPE CMD HELP NLST RETR RNFR STOR STOU XMKD } \
+    alt_max_param_len 256 { CWD RNTO } \
+    alt_max_param_len 400 { PORT } \
+    alt_max_param_len 512 { SIZE } \
+    chk_str_fmt { ACCT ADAT ALLO APPE AUTH CEL CLNT CMD } \
+    chk_str_fmt { CONF CWD DELE ENC EPRT EPSV ESTP HELP } \
+    chk_str_fmt { LANG LIST LPRT MACB MAIL MDTM MIC MKD } \
+    chk_str_fmt { MLSD MLST MODE NLST OPTS PASS PBSZ PORT } \
+    chk_str_fmt { PROT REST RETR RMD RNFR RNTO SDUP SITE } \
+    chk_str_fmt { SIZE SMNT STAT STOR STRU TEST TYPE USER } \
+    chk_str_fmt { XCRC XCWD XMAS XMD5 XMKD XRCP XRMD XRSQ } \
+    chk_str_fmt { XSEM XSEN XSHA1 XSHA256 } \
+    cmd_validity ALLO < int [ char R int ] > \
+    cmd_validity EPSV < [ { char 12 | char A char L char L } ] > \
+    cmd_validity MACB < string > \
+    cmd_validity MDTM < [ date nnnnnnnnnnnnnn[.n[n[n]]] ] string > \
+    cmd_validity MODE < char ASBCZ > \
+    cmd_validity PORT < host_port > \
+    cmd_validity PROT < char CSEP > \
+    cmd_validity STRU < char FRPO [ string ] > \
+    cmd_validity TYPE < { char AE [ char NTC ] | char I | char L [ number ] } >
+preprocessor ftp_telnet_protocol: ftp client default \
+    max_resp_len 256 \
+    bounce yes \
+    ignore_telnet_erase_cmds yes \
+    telnet_cmds yes
+
+
+# SMTP normalization and anomaly detection.  For more information, see README.SMTP
+preprocessor smtp: ports { 25 465 587 691 } \
+    inspection_type stateful \
+    b64_decode_depth 0 \
+    qp_decode_depth 0 \
+    bitenc_decode_depth 0 \
+    uu_decode_depth 0 \
+    log_mailfrom \
+    log_rcptto \
+    log_filename \
+    log_email_hdrs \
+    normalize cmds \
+    normalize_cmds { ATRN AUTH BDAT CHUNKING DATA DEBUG EHLO EMAL ESAM ESND ESOM ETRN EVFY } \
+    normalize_cmds { EXPN HELO HELP IDENT MAIL NOOP ONEX QUEU QUIT RCPT RSET SAML SEND SOML } \
+    normalize_cmds { STARTTLS TICK TIME TURN TURNME VERB VRFY X-ADAT X-DRCP X-ERCP X-EXCH50 } \
+    normalize_cmds { X-EXPS X-LINK2STATE XADR XAUTH XCIR XEXCH50 XGEN XLICENSE XQUE XSTA XTRN XUSR } \
+    max_command_line_len 512 \
+    max_header_line_len 1000 \
+    max_response_line_len 512 \
+    alt_max_command_line_len 260 { MAIL } \
+    alt_max_command_line_len 300 { RCPT } \
+    alt_max_command_line_len 500 { HELP HELO ETRN EHLO } \
+    alt_max_command_line_len 255 { EXPN VRFY ATRN SIZE BDAT DEBUG EMAL ESAM ESND ESOM EVFY IDENT NOOP RSET } \
+    alt_max_command_line_len 246 { SEND SAML SOML AUTH TURN ETRN DATA RSET QUIT ONEX QUEU STARTTLS TICK TIME TURNME VERB X-EXPS X-LINK2STATE XADR XAUTH XCIR XEXCH50 XGEN XLICENSE XQUE XSTA XTRN XUSR } \
+    valid_cmds { ATRN AUTH BDAT CHUNKING DATA DEBUG EHLO EMAL ESAM ESND ESOM ETRN EVFY } \
+    valid_cmds { EXPN HELO HELP IDENT MAIL NOOP ONEX QUEU QUIT RCPT RSET SAML SEND SOML } \
+    valid_cmds { STARTTLS TICK TIME TURN TURNME VERB VRFY X-ADAT X-DRCP X-ERCP X-EXCH50 } \
+    valid_cmds { X-EXPS X-LINK2STATE XADR XAUTH XCIR XEXCH50 XGEN XLICENSE XQUE XSTA XTRN XUSR } \
+    xlink2state { enabled }
+
+# Portscan detection.  For more information, see README.sfportscan
+preprocessor sfportscan: proto  { all } memcap { 10000000 } sense_level { low }
+
+# ARP spoof detection.  For more information, see the Snort Manual - Configuring Snort - Preprocessors - ARP Spoof Preprocessor
+# preprocessor arpspoof
+# preprocessor arpspoof_detect_host: 192.168.40.1 f0:0f:00:f0:0f:00
+
+# SSH anomaly detection.  For more information, see README.ssh
+preprocessor ssh: server_ports { 22 } \
+                  autodetect \
+                  max_client_bytes 19600 \
+                  max_encrypted_packets 20 \
+                  max_server_version_len 100 \
+                  enable_respoverflow enable_ssh1crc32 \
+                  enable_srvoverflow enable_protomismatch
+
+# SMB / DCE-RPC normalization and anomaly detection.  For more information, see README.dcerpc2
+preprocessor dcerpc2: memcap 102400, events [co ]
+preprocessor dcerpc2_server: default, policy WinXP, \
+    detect [smb [139,445], tcp 135, udp 135, rpc-over-http-server 593], \
+    autodetect [tcp 1025:, udp 1025:, rpc-over-http-server 1025:], \
+    smb_max_chain 3, smb_invalid_shares ["C$", "D$", "ADMIN$"]
+
+# DNS anomaly detection.  For more information, see README.dns
+preprocessor dns: ports { 53 } enable_rdata_overflow
+
+# SSL anomaly detection and traffic bypass.  For more information, see README.ssl
+preprocessor ssl: ports { 443 465 563 636 989 992 993 994 995 5061 7801 7802 7900 7901 7902 7903 7904 7905 7906 7907 7908 7909 7910 7911 7912 7913 7914 7915 7916 7917 7918 7919 7920 }, trustservers, noinspect_encrypted
+
+# SDF sensitive data preprocessor.  For more information see README.sensitive_data
+preprocessor sensitive_data: alert_threshold 25
+
+# SIP Session Initiation Protocol preprocessor.  For more information see README.sip
+preprocessor sip: max_sessions 40000, \
+   ports { 5060 5061 5600 }, \
+   methods { invite \
+             cancel \
+             ack \
+             bye \
+             register \
+             options \
+             refer \
+             subscribe \
+             update \
+             join \
+             info \
+             message \
+             notify \
+             benotify \
+             do \
+             qauth \
+             sprack \
+             publish \
+             service \
+             unsubscribe \
+             prack }, \
+   max_uri_len 512, \
+   max_call_id_len 80, \
+   max_requestName_len 20, \
+   max_from_len 256, \
+   max_to_len 256, \
+   max_via_len 1024, \
+   max_contact_len 512, \
+   max_content_len 2048
+
+# IMAP preprocessor.  For more information see README.imap
+preprocessor imap: \
+   ports { 143 } \
+   b64_decode_depth 0 \
+   qp_decode_depth 0 \
+   bitenc_decode_depth 0 \
+   uu_decode_depth 0
+
+# POP preprocessor. For more information see README.pop
+preprocessor pop: \
+   ports { 110 } \
+   b64_decode_depth 0 \
+   qp_decode_depth 0 \
+   bitenc_decode_depth 0 \
+   uu_decode_depth 0
+
+# Modbus preprocessor. For more information see README.modbus
+preprocessor modbus: ports { 502 }
+
+# DNP3 preprocessor. For more information see README.dnp3
+preprocessor dnp3: ports { 20000 } \
+   memcap 262144 \
+   check_crc
+
+# Reputation preprocessor. For more information see README.reputation
+preprocessor reputation: \
+   memcap 500, \
+   priority whitelist, \
+   nested_ip inner, \
+   whitelist $WHITE_LIST_PATH/white_list.rules, \
+   blacklist $BLACK_LIST_PATH/black_list.rules
+
+###################################################
+# Step #6: Configure output plugins
+# For more information, see Snort Manual, Configuring Snort - Output Modules
+###################################################
+
+# unified2
+# Recommended for most installs
+# output unified2: filename merged.log, limit 128, nostamp, mpls_event_types, vlan_event_types
+
+# Additional configuration for specific types of installs
+# output alert_unified2: filename snort.alert, limit 128, nostamp
+# output log_unified2: filename snort.log, limit 128, nostamp
+
+# syslog
+# output alert_syslog: LOG_AUTH LOG_ALERT
+
+# pcap
+# output log_tcpdump: tcpdump.log
+
+# metadata reference data.  do not modify these lines
+include classification.config
+include reference.config
+
+
+###################################################
+# Step #7: Customize your rule set
+# For more information, see Snort Manual, Writing Snort Rules
+#
+# NOTE: All categories are enabled in this conf file
+###################################################
+
+include $RULE_PATH/community.rules
+
+# site specific rules
+# include $RULE_PATH/local.rules
+# include $RULE_PATH/app-detect.rules
+# include $RULE_PATH/attack-responses.rules
+# include $RULE_PATH/backdoor.rules
+# include $RULE_PATH/bad-traffic.rules
+# include $RULE_PATH/blacklist.rules
+# include $RULE_PATH/botnet-cnc.rules
+# include $RULE_PATH/browser-chrome.rules
+# include $RULE_PATH/browser-firefox.rules
+# include $RULE_PATH/browser-ie.rules
+# include $RULE_PATH/browser-other.rules
+# include $RULE_PATH/browser-plugins.rules
+# include $RULE_PATH/browser-webkit.rules
+# include $RULE_PATH/chat.rules
+# include $RULE_PATH/content-replace.rules
+# include $RULE_PATH/ddos.rules
+# include $RULE_PATH/dns.rules
+# include $RULE_PATH/dos.rules
+# include $RULE_PATH/experimental.rules
+# include $RULE_PATH/exploit-kit.rules
+# include $RULE_PATH/exploit.rules
+# include $RULE_PATH/file-executable.rules
+# include $RULE_PATH/file-flash.rules
+# include $RULE_PATH/file-identify.rules
+# include $RULE_PATH/file-image.rules
+# include $RULE_PATH/file-java.rules
+# include $RULE_PATH/file-multimedia.rules
+# include $RULE_PATH/file-office.rules
+# include $RULE_PATH/file-other.rules
+# include $RULE_PATH/file-pdf.rules
+# include $RULE_PATH/finger.rules
+# include $RULE_PATH/ftp.rules
+# include $RULE_PATH/icmp-info.rules
+# include $RULE_PATH/icmp.rules
+# include $RULE_PATH/imap.rules
+# include $RULE_PATH/indicator-compromise.rules
+# include $RULE_PATH/indicator-obfuscation.rules
+# include $RULE_PATH/indicator-scan.rules
+# include $RULE_PATH/indicator-shellcode.rules
+# include $RULE_PATH/info.rules
+# include $RULE_PATH/malware-backdoor.rules
+# include $RULE_PATH/malware-cnc.rules
+# include $RULE_PATH/malware-other.rules
+# include $RULE_PATH/malware-tools.rules
+# include $RULE_PATH/misc.rules
+# include $RULE_PATH/multimedia.rules
+# include $RULE_PATH/mysql.rules
+# include $RULE_PATH/netbios.rules
+# include $RULE_PATH/nntp.rules
+# include $RULE_PATH/oracle.rules
+# include $RULE_PATH/os-linux.rules
+# include $RULE_PATH/os-mobile.rules
+# include $RULE_PATH/os-other.rules
+# include $RULE_PATH/os-solaris.rules
+# include $RULE_PATH/os-windows.rules
+# include $RULE_PATH/other-ids.rules
+# include $RULE_PATH/p2p.rules
+# include $RULE_PATH/phishing-spam.rules
+# include $RULE_PATH/policy-multimedia.rules
+# include $RULE_PATH/policy-other.rules
+# include $RULE_PATH/policy.rules
+# include $RULE_PATH/policy-social.rules
+# include $RULE_PATH/policy-spam.rules
+# include $RULE_PATH/pop2.rules
+# include $RULE_PATH/pop3.rules
+# include $RULE_PATH/protocol-dns.rules
+# include $RULE_PATH/protocol-finger.rules
+# include $RULE_PATH/protocol-ftp.rules
+# include $RULE_PATH/protocol-icmp.rules
+# include $RULE_PATH/protocol-imap.rules
+# include $RULE_PATH/protocol-nntp.rules
+# include $RULE_PATH/protocol-other.rules
+# include $RULE_PATH/protocol-pop.rules
+# include $RULE_PATH/protocol-rpc.rules
+# include $RULE_PATH/protocol-scada.rules
+# include $RULE_PATH/protocol-services.rules
+# include $RULE_PATH/protocol-snmp.rules
+# include $RULE_PATH/protocol-telnet.rules
+# include $RULE_PATH/protocol-tftp.rules
+# include $RULE_PATH/protocol-voip.rules
+# include $RULE_PATH/pua-adware.rules
+# include $RULE_PATH/pua-other.rules
+# include $RULE_PATH/pua-p2p.rules
+# include $RULE_PATH/pua-toolbars.rules
+# include $RULE_PATH/rpc.rules
+# include $RULE_PATH/rservices.rules
+# include $RULE_PATH/scada.rules
+# include $RULE_PATH/scan.rules
+# include $RULE_PATH/server-apache.rules
+# include $RULE_PATH/server-iis.rules
+# include $RULE_PATH/server-mail.rules
+# include $RULE_PATH/server-mssql.rules
+# include $RULE_PATH/server-mysql.rules
+# include $RULE_PATH/server-oracle.rules
+# include $RULE_PATH/server-other.rules
+# include $RULE_PATH/server-samba.rules
+# include $RULE_PATH/server-webapp.rules
+# include $RULE_PATH/shellcode.rules
+# include $RULE_PATH/smtp.rules
+# include $RULE_PATH/snmp.rules
+# include $RULE_PATH/specific-threats.rules
+# include $RULE_PATH/spyware-put.rules
+# include $RULE_PATH/sql.rules
+# include $RULE_PATH/telnet.rules
+# include $RULE_PATH/tftp.rules
+# include $RULE_PATH/virus.rules
+# include $RULE_PATH/voip.rules
+# include $RULE_PATH/web-activex.rules
+# include $RULE_PATH/web-attacks.rules
+# include $RULE_PATH/web-cgi.rules
+# include $RULE_PATH/web-client.rules
+# include $RULE_PATH/web-coldfusion.rules
+# include $RULE_PATH/web-frontpage.rules
+# include $RULE_PATH/web-iis.rules
+# include $RULE_PATH/web-misc.rules
+# include $RULE_PATH/web-php.rules
+# include $RULE_PATH/x11.rules
+
+###################################################
+# Step #8: Customize your preprocessor and decoder alerts
+# For more information, see README.decoder_preproc_rules
+###################################################
+
+# decoder and preprocessor event rules
+# include $PREPROC_RULE_PATH/preprocessor.rules
+# include $PREPROC_RULE_PATH/decoder.rules
+# include $PREPROC_RULE_PATH/sensitive-data.rules
+
+###################################################
+# Step #9: Customize your Shared Object Snort Rules
+# For more information, see http://vrt-blog.snort.org/2009/01/using-vrt-certified-shared-object-rules.html
+###################################################
+
+# dynamic library rules
+# include $SO_RULE_PATH/browser-ie.rules
+# include $SO_RULE_PATH/browser-other.rules
+# include $SO_RULE_PATH/exploit-kit.rules
+# include $SO_RULE_PATH/file-flash.rules
+# include $SO_RULE_PATH/file-image.rules
+# include $SO_RULE_PATH/file-java.rules
+# include $SO_RULE_PATH/file-multimedia.rules
+# include $SO_RULE_PATH/file-office.rules
+# include $SO_RULE_PATH/file-other.rules
+# include $SO_RULE_PATH/file-pdf.rules
+# include $SO_RULE_PATH/indicator-shellcode.rules
+# include $SO_RULE_PATH/malware-cnc.rules
+# include $SO_RULE_PATH/malware-other.rules
+# include $SO_RULE_PATH/netbios.rules
+# include $SO_RULE_PATH/os-linux.rules
+# include $SO_RULE_PATH/os-other.rules
+# include $SO_RULE_PATH/os-windows.rules
+# include $SO_RULE_PATH/policy-social.rules
+# include $SO_RULE_PATH/protocol-dns.rules
+# include $SO_RULE_PATH/protocol-nntp.rules
+# include $SO_RULE_PATH/protocol-other.rules
+# include $SO_RULE_PATH/protocol-snmp.rules
+# include $SO_RULE_PATH/protocol-voip.rules
+# include $SO_RULE_PATH/pua-p2p.rules
+# include $SO_RULE_PATH/server-apache.rules
+# include $SO_RULE_PATH/server-iis.rules
+# include $SO_RULE_PATH/server-mail.rules
+# include $SO_RULE_PATH/server-mysql.rules
+# include $SO_RULE_PATH/server-oracle.rules
+# include $SO_RULE_PATH/server-other.rules
+# include $SO_RULE_PATH/server-webapp.rules
+
+# legacy dynamic library rule files
+# include $SO_RULE_PATH/bad-traffic.rules
+# include $SO_RULE_PATH/browser-ie.rules
+# include $SO_RULE_PATH/chat.rules
+# include $SO_RULE_PATH/dos.rules
+# include $SO_RULE_PATH/exploit.rules
+# include $SO_RULE_PATH/file-flash.rules
+# include $SO_RULE_PATH/icmp.rules
+# include $SO_RULE_PATH/imap.rules
+# include $SO_RULE_PATH/misc.rules
+# include $SO_RULE_PATH/multimedia.rules
+# include $SO_RULE_PATH/netbios.rules
+# include $SO_RULE_PATH/nntp.rules
+# include $SO_RULE_PATH/p2p.rules
+# include $SO_RULE_PATH/smtp.rules
+# include $SO_RULE_PATH/snmp.rules
+# include $SO_RULE_PATH/specific-threats.rules
+# include $SO_RULE_PATH/web-activex.rules
+# include $SO_RULE_PATH/web-client.rules
+# include $SO_RULE_PATH/web-iis.rules
+# include $SO_RULE_PATH/web-misc.rules
+
+# Event thresholding or suppression commands. See threshold.conf
+include threshold.conf

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/snort/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/snort/meta/main.yml b/metron-deployment/roles/snort/meta/main.yml
new file mode 100644
index 0000000..f742973
--- /dev/null
+++ b/metron-deployment/roles/snort/meta/main.yml
@@ -0,0 +1,24 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - ambari_gather_facts
+  - epel
+  - libselinux-python
+  - build-tools
+  - kafka-client
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/snort/tasks/daq.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/snort/tasks/daq.yml b/metron-deployment/roles/snort/tasks/daq.yml
new file mode 100644
index 0000000..c8bd4b0
--- /dev/null
+++ b/metron-deployment/roles/snort/tasks/daq.yml
@@ -0,0 +1,36 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Download daq
+  get_url:
+    url: "{{ dag_src_url }}"
+    dest: "/tmp/daq-{{ daq_version }}.src.rpm"
+
+- name: Build daq
+  shell: "rpmbuild --rebuild daq-{{ daq_version }}.src.rpm"
+  args:
+    chdir: /tmp
+    creates: /root/rpmbuild/RPMS/x86_64/daq-{{ daq_version }}.x86_64.rpm
+
+- name: Install daq
+  yum:
+    name: /root/rpmbuild/RPMS/x86_64/daq-{{ daq_version }}.x86_64.rpm
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/snort/tasks/flume.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/snort/tasks/flume.yml b/metron-deployment/roles/snort/tasks/flume.yml
new file mode 100644
index 0000000..e5bd593
--- /dev/null
+++ b/metron-deployment/roles/snort/tasks/flume.yml
@@ -0,0 +1,31 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Install flume configurations
+  copy: src={{ item.src }} dest={{ item.dest }}
+  with_items:
+    - { src: flume-snort.conf, dest: /etc/flume/conf/flume-snort.conf }
+
+- name: Configure destination for snort alerts
+  lineinfile: dest=/etc/flume/conf/flume-snort.conf regexp={{ item.regexp }} line={{ item.line }}
+  with_items:
+    - { regexp: '^snort\.sinks\.kafka-sink\.brokerList.*$',
+        line: 'snort.sinks.kafka-sink.brokerList = {{ kafka_broker_url }}' }
+    - { regexp: '^snort\.sinks\.kafka-sink\.topic.*$',
+        line: 'snort.sinks.kafka-sink.topic = {{ snort_topic }}'}
+    - { regexp: '^snort.sources.exec-source.command.*$',
+        line: 'snort.sources.exec-source.command = tail -F {{ snort_alert_csv_path }}' }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/snort/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/snort/tasks/main.yml b/metron-deployment/roles/snort/tasks/main.yml
new file mode 100644
index 0000000..80755be
--- /dev/null
+++ b/metron-deployment/roles/snort/tasks/main.yml
@@ -0,0 +1,31 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- include: daq.yml
+
+- include: snort.yml
+
+- include: flume.yml
+
+- name: Turn on promiscuous mode for {{ sniff_interface }}
+  shell: "ip link set {{ sniff_interface }} promisc on"
+
+- name: Start snort
+  service: name=snortd state=restarted
+
+- name: Start flume service to consume snort alerts
+  service: name=flume-agent state=restarted args=snort

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/snort/tasks/snort.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/snort/tasks/snort.yml b/metron-deployment/roles/snort/tasks/snort.yml
new file mode 100644
index 0000000..6bfecc2
--- /dev/null
+++ b/metron-deployment/roles/snort/tasks/snort.yml
@@ -0,0 +1,85 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Download snort
+  get_url:
+    url: "{{ snort_src_url }}"
+    dest: "/tmp/snort-{{ snort_version }}.src.rpm"
+
+- name: Build snort
+  shell: "rpmbuild --rebuild snort-{{ snort_version }}.src.rpm"
+  args:
+    chdir: /tmp
+    creates: /root/rpmbuild/RPMS/x86_64/snort-{{ snort_version }}.x86_64.rpm
+
+- name: Install snort
+  yum:
+    name: /root/rpmbuild/RPMS/x86_64/snort-{{ snort_version }}.x86_64.rpm
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10
+
+- name: Download snort community rules
+  get_url:
+    url: "{{ snort_community_rules_url }}"
+    dest: "/tmp/community-rules.tar.gz"
+
+- name: Extract tarball
+  unarchive:
+    src: "/tmp/community-rules.tar.gz"
+    dest: /tmp
+    copy: no
+    creates: "/tmp/community-rules"
+
+- name: Install snort rules
+  shell: "{{ item }}"
+  args:
+    chdir: /tmp
+  with_items:
+    - cp -r community-rules/community.rules /etc/snort/rules
+    - touch /etc/snort/rules/white_list.rules
+    - touch /etc/snort/rules/black_list.rules
+    - touch /var/log/snort/alerts
+    - chown -R snort:snort /etc/snort
+
+- name: Uncomment all snort community rules
+  shell: sed -i 's/^# alert/alert/' /etc/snort/rules/community.rules
+
+- name: Download snort configuration
+  copy: src=snort.conf dest=/etc/snort/snort.conf
+
+- name: Configure network
+  lineinfile:
+    dest: /etc/snort/snort.conf
+    regexp: "^ipvar HOME_NET.*$"
+    line: "ipvar HOME_NET {{ ansible_eth0.ipv4.address }}"
+
+- name: Configure alerting
+  lineinfile:
+    dest: /etc/snort/snort.conf
+    line: "output alert_csv: {{ snort_alert_csv_path }} default"
+
+- name: Configure sysconfig
+  lineinfile:
+    dest: /etc/sysconfig/snort
+    regexp: "{{ item.regexp }}"
+    line: "{{ item.line }}"
+  with_items:
+    - { regexp: "^ALERTMODE=.*$",     line: "ALERTMODE=" }
+    - { regexp: "^NO_PACKET_LOG=.*$", line: "NO_PACKET_LOG=1" }
+    - { regexp: "^INTERFACE=.*$",     line: "INTERFACE={{ sniff_interface }}" }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/solr/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/solr/defaults/main.yml b/metron-deployment/roles/solr/defaults/main.yml
new file mode 100644
index 0000000..b40d534
--- /dev/null
+++ b/metron-deployment/roles/solr/defaults/main.yml
@@ -0,0 +1,29 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+rhel_hdp_utils_install_url: http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6/hdp-util.repo
+solr_install_path: /opt/lucidworks-hdpsearch/solr
+solr_user: solr
+solr_collection_name: Metron
+solr_config_dir: "{{ solr_install_path }}/server/solr/configsets/basic_configs/conf"
+solr_bin_dir: "/opt/lucidworks-hdpsearch/solr/bin"
+solr_config_name: "metron_conf"
+solr_number_shards: "{{ groups['search'] | length }}"
+solr_replication_factor: 1
+solr_autoSoftCommit_maxTime: 60
+solr_cmd: "{{ solr_bin_dir}}/solr create_collection -c  {{ solr_collection_name }} -d {{ solr_config_dir }} -n {{ solr_config_name }} -shards {{ solr_number_shards }} -replicationFactor {{ solr_replication_factor }}"
+hdp_utils_repo_path: /etc/yum.repos.d/HDP-UTILS.repo
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/solr/files/schema.xml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/solr/files/schema.xml b/metron-deployment/roles/solr/files/schema.xml
new file mode 100644
index 0000000..43452a2
--- /dev/null
+++ b/metron-deployment/roles/solr/files/schema.xml
@@ -0,0 +1,191 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<schema name="metron" version="1.5">
+
+    <field name="_version_" type="long" indexed="true" stored="true"/>
+    <field name="id" type="string" indexed="true" stored="true" required="true" multiValued="false"/>
+    <field name="sensorType" type="string" indexed="true" stored="true" required="true"/>;
+
+    <dynamicField name="*_i" type="int" indexed="true" stored="true"/>
+    <dynamicField name="*_is" type="int" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_s" type="string" indexed="true" stored="true"/>
+    <dynamicField name="*_ss" type="string" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_l" type="long" indexed="true" stored="true"/>
+    <dynamicField name="*_ls" type="long" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_t" type="text_general" indexed="true" stored="true"/>
+    <dynamicField name="*_txt" type="text_general" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_en" type="text_en" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_b" type="boolean" indexed="true" stored="true"/>
+    <dynamicField name="*_bs" type="boolean" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_f" type="float" indexed="true" stored="true"/>
+    <dynamicField name="*_fs" type="float" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_d" type="double" indexed="true" stored="true"/>
+    <dynamicField name="*_ds" type="double" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_coordinate" type="tdouble" indexed="true" stored="false"/>
+    <dynamicField name="*_dt" type="date" indexed="true" stored="true"/>
+    <dynamicField name="*_dts" type="date" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="*_p" type="location" indexed="true" stored="true"/>
+    <dynamicField name="*_ti" type="tint" indexed="true" stored="true"/>
+    <dynamicField name="*_tl" type="tlong" indexed="true" stored="true"/>
+    <dynamicField name="*_tf" type="tfloat" indexed="true" stored="true"/>
+    <dynamicField name="*_td" type="tdouble" indexed="true" stored="true"/>
+    <dynamicField name="*_tdt" type="tdate" indexed="true" stored="true"/>
+    <dynamicField name="*_c" type="currency" indexed="true" stored="true"/>
+    <dynamicField name="ignored_*" type="ignored" multiValued="true"/>
+    <dynamicField name="attr_*" type="text_general" indexed="true" stored="true" multiValued="true"/>
+    <dynamicField name="random_*" type="random"/>
+
+    <uniqueKey>id</uniqueKey>
+
+    <fieldType name="string" class="solr.StrField" sortMissingLast="true"/>
+    <fieldType name="boolean" class="solr.BoolField" sortMissingLast="true"/>
+    <fieldType name="int" class="solr.TrieIntField" precisionStep="0" positionIncrementGap="0"/>
+    <fieldType name="float" class="solr.TrieFloatField" precisionStep="0" positionIncrementGap="0"/>
+    <fieldType name="long" class="solr.TrieLongField" precisionStep="0" positionIncrementGap="0"/>
+    <fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" positionIncrementGap="0"/>
+    <fieldType name="tint" class="solr.TrieIntField" precisionStep="8" positionIncrementGap="0"/>
+    <fieldType name="tfloat" class="solr.TrieFloatField" precisionStep="8" positionIncrementGap="0"/>
+    <fieldType name="tlong" class="solr.TrieLongField" precisionStep="8" positionIncrementGap="0"/>
+    <fieldType name="tdouble" class="solr.TrieDoubleField" precisionStep="8" positionIncrementGap="0"/>
+    <fieldType name="date" class="solr.TrieDateField" precisionStep="0" positionIncrementGap="0"/>
+    <fieldType name="tdate" class="solr.TrieDateField" precisionStep="6" positionIncrementGap="0"/>
+    <fieldType name="binary" class="solr.BinaryField"/>
+    <fieldType name="random" class="solr.RandomSortField" indexed="true"/>
+    <fieldType name="text_ws" class="solr.TextField" positionIncrementGap="100">
+        <analyzer>
+            <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+        </analyzer>
+    </fieldType>
+    <fieldType name="text_general" class="solr.TextField" positionIncrementGap="100">
+        <analyzer type="index">
+            <tokenizer class="solr.StandardTokenizerFactory"/>
+            <filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+        </analyzer>
+        <analyzer type="query">
+            <tokenizer class="solr.StandardTokenizerFactory"/>
+            <filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>
+            <filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+        </analyzer>
+    </fieldType>
+    <fieldType name="text_en" class="solr.TextField" positionIncrementGap="100">
+        <analyzer type="index">
+            <tokenizer class="solr.StandardTokenizerFactory"/>
+            <filter class="solr.StopFilterFactory"
+                    ignoreCase="true"
+                    words="lang/stopwords_en.txt"
+            />
+            <filter class="solr.LowerCaseFilterFactory"/>
+            <filter class="solr.EnglishPossessiveFilterFactory"/>
+            <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
+            <filter class="solr.PorterStemFilterFactory"/>
+        </analyzer>
+        <analyzer type="query">
+            <tokenizer class="solr.StandardTokenizerFactory"/>
+            <filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
+            <filter class="solr.StopFilterFactory"
+                    ignoreCase="true"
+                    words="lang/stopwords_en.txt"
+            />
+            <filter class="solr.LowerCaseFilterFactory"/>
+            <filter class="solr.EnglishPossessiveFilterFactory"/>
+            <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
+            <filter class="solr.PorterStemFilterFactory"/>
+        </analyzer>
+    </fieldType>
+    <fieldType name="text_en_splitting" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="true">
+        <analyzer type="index">
+            <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+            <filter class="solr.StopFilterFactory"
+                    ignoreCase="true"
+                    words="lang/stopwords_en.txt"
+            />
+            <filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="1" catenateNumbers="1" catenateAll="0" splitOnCaseChange="1"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+            <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
+            <filter class="solr.PorterStemFilterFactory"/>
+        </analyzer>
+        <analyzer type="query">
+            <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+            <filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
+            <filter class="solr.StopFilterFactory"
+                    ignoreCase="true"
+                    words="lang/stopwords_en.txt"
+            />
+            <filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="0" catenateNumbers="0" catenateAll="0" splitOnCaseChange="1"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+            <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
+            <filter class="solr.PorterStemFilterFactory"/>
+        </analyzer>
+    </fieldType>
+
+    <fieldType name="text_en_splitting_tight" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="true">
+        <analyzer>
+            <tokenizer class="solr.WhitespaceTokenizerFactory"/>
+            <filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="false"/>
+            <filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_en.txt"/>
+            <filter class="solr.WordDelimiterFilterFactory" generateWordParts="0" generateNumberParts="0" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+            <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
+            <filter class="solr.EnglishMinimalStemFilterFactory"/>
+            <filter class="solr.RemoveDuplicatesTokenFilterFactory"/>
+        </analyzer>
+    </fieldType>
+    <fieldType name="text_general_rev" class="solr.TextField" positionIncrementGap="100">
+        <analyzer type="index">
+            <tokenizer class="solr.StandardTokenizerFactory"/>
+            <filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+            <filter class="solr.ReversedWildcardFilterFactory" withOriginal="true"
+                    maxPosAsterisk="3" maxPosQuestion="2" maxFractionAsterisk="0.33"/>
+        </analyzer>
+        <analyzer type="query">
+            <tokenizer class="solr.StandardTokenizerFactory"/>
+            <filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
+            <filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+        </analyzer>
+    </fieldType>
+    <fieldType name="alphaOnlySort" class="solr.TextField" sortMissingLast="true" omitNorms="true">
+        <analyzer>
+            <tokenizer class="solr.KeywordTokenizerFactory"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+            <filter class="solr.TrimFilterFactory"/>
+            <filter class="solr.PatternReplaceFilterFactory"
+                    pattern="([^a-z])" replacement="" replace="all"
+            />
+        </analyzer>
+    </fieldType>
+    <fieldType name="lowercase" class="solr.TextField" positionIncrementGap="100">
+        <analyzer>
+            <tokenizer class="solr.KeywordTokenizerFactory"/>
+            <filter class="solr.LowerCaseFilterFactory"/>
+        </analyzer>
+    </fieldType>
+    <fieldType name="ignored" stored="false" indexed="false" multiValued="true" class="solr.StrField"/>
+    <fieldType name="point" class="solr.PointType" dimension="2" subFieldSuffix="_d"/>
+    <fieldType name="location" class="solr.LatLonType" subFieldSuffix="_coordinate"/>
+    <fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
+               geo="true" distErrPct="0.025" maxDistErr="0.001" distanceUnits="kilometers"/>
+    <fieldType name="bbox" class="solr.BBoxField"
+               geo="true" distanceUnits="kilometers" numberType="_bbox_coord"/>
+    <fieldType name="_bbox_coord" class="solr.TrieDoubleField" precisionStep="8" docValues="true" stored="false"/>
+    <fieldType name="currency" class="solr.CurrencyField" precisionStep="8" defaultCurrency="USD" currencyConfig="currency.xml"/>
+</schema>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/solr/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/solr/meta/main.yml b/metron-deployment/roles/solr/meta/main.yml
new file mode 100644
index 0000000..454dd37
--- /dev/null
+++ b/metron-deployment/roles/solr/meta/main.yml
@@ -0,0 +1,21 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - ambari_gather_facts
+  - java_jdk
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/solr/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/solr/tasks/main.yml b/metron-deployment/roles/solr/tasks/main.yml
new file mode 100644
index 0000000..cfbb6b5
--- /dev/null
+++ b/metron-deployment/roles/solr/tasks/main.yml
@@ -0,0 +1,74 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Check for Metron jar path
+  stat: path={{ hdp_utils_repo_path }}
+  register: hdp_utils
+
+
+- name: Install HDP-UTILs Repo
+  get_url:
+    url: "{{ rhel_hdp_utils_install_url }}"
+    dest: /etc/yum.repos.d/HDP-UTILS.repo
+  when: hdp_utils.stat.exists == False
+
+- name: Install HDP-UTIL gpg key
+  rpm_key:
+    state: present
+    key: http://pgp.mit.edu/pks/lookup?op=get&search=0xB9733A7A07513CAD
+  when: hdp_utils.stat.exists == False
+
+- name: Install Solr
+  yum:
+    name: lucidworks-hdpsearch
+    state: present
+
+- name: Create solr.xml from template
+  template:
+    src: solr.xml
+    dest: "{{ solr_install_path }}/server/solr"
+    mode: 0644
+    owner: "{{ solr_user }}"
+    group: "{{ solr_user }}"
+
+- name: Copy solrschema.xml to {{ inventory_hostname }}
+  copy:
+    src: schema.xml
+    dest: "{{ solr_config_dir }}"
+    mode: 0644
+    owner: "{{ solr_user }}"
+    group: "{{ solr_user }}"
+
+- name: Create solrconfig.xml from template
+  template:
+    src: solrconfig.xml
+    dest: "{{ solr_config_dir }}"
+    mode: 0644
+    owner: "{{ solr_user }}"
+    group: "{{ solr_user }}"
+
+- name: Start Solr
+  service:
+    name: solr
+    state: restarted
+    enabled: yes
+
+- name: Create Collection {{ solr_collection_name }} with {{ solr_number_shards }} shard(s) and replication factor {{ solr_replication_factor }}
+  shell: "{{ solr_cmd }}"
+  ignore_errors: yes
+  register: result
+  failed_when: result.rc == 1 and result.stderr.find("already exists!") == -1

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/solr/templates/solr.xml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/solr/templates/solr.xml b/metron-deployment/roles/solr/templates/solr.xml
new file mode 100644
index 0000000..407df13
--- /dev/null
+++ b/metron-deployment/roles/solr/templates/solr.xml
@@ -0,0 +1,52 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!--
+   This is an example of a simple "solr.xml" file for configuring one or 
+   more Solr Cores, as well as allowing Cores to be added, removed, and 
+   reloaded via HTTP requests.
+
+   More information about options available in this configuration file, 
+   and Solr Core administration can be found online:
+   http://wiki.apache.org/solr/CoreAdmin
+-->
+
+<solr>
+
+  <solrcloud>
+
+    <str name="host">${host:}</str>
+    <int name="hostPort">${jetty.port:8983}</int>
+    <str name="hostContext">${hostContext:solr}</str>
+
+    <bool name="genericCoreNodeNames">${genericCoreNodeNames:true}</bool>
+
+    <str name="zkHost">{{ zookeeper_url }}</str>
+    <int name="zkClientTimeout">${zkClientTimeout:30000}</int>
+    <int name="distribUpdateSoTimeout">${distribUpdateSoTimeout:600000}</int>
+    <int name="distribUpdateConnTimeout">${distribUpdateConnTimeout:60000}</int>
+
+  </solrcloud>
+
+  <shardHandlerFactory name="shardHandlerFactory"
+    class="HttpShardHandlerFactory">
+    <int name="socketTimeout">${socketTimeout:600000}</int>
+    <int name="connTimeout">${connTimeout:60000}</int>
+  </shardHandlerFactory>
+
+</solr>


[36/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/ConfigurationsTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/ConfigurationsTest.java b/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/ConfigurationsTest.java
new file mode 100644
index 0000000..18e6ee8
--- /dev/null
+++ b/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/ConfigurationsTest.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.configuration;
+
+import junit.framework.Assert;
+import nl.jqno.equalsverifier.EqualsVerifier;
+import nl.jqno.equalsverifier.Warning;
+import org.apache.metron.common.configuration.Configurations;
+import org.junit.Test;
+
+import java.io.IOException;
+
+public class ConfigurationsTest {
+
+  @Test
+  public void test() throws IOException {
+    EqualsVerifier.forClass(Configurations.class).suppress(Warning.NONFINAL_FIELDS, Warning.NULL_FIELDS).usingGetClass().verify();
+    Configurations configurations = new Configurations();
+    try {
+      configurations.updateConfig("someConfig", (byte[]) null);
+      Assert.fail("Updating a config with null should throw an IllegalStateException");
+    } catch(IllegalStateException e) {}
+    Assert.assertTrue(configurations.toString() != null && configurations.toString().length() > 0);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/EnrichmentConfigTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/EnrichmentConfigTest.java b/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/EnrichmentConfigTest.java
new file mode 100644
index 0000000..b7e3e02
--- /dev/null
+++ b/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/EnrichmentConfigTest.java
@@ -0,0 +1,211 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.configuration;
+
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.metron.common.Constants;
+import org.apache.metron.common.utils.JSONUtils;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class EnrichmentConfigTest {
+  /**
+   {
+      "index": "bro",
+      "batchSize": 5,
+      "enrichmentFieldMap": {
+        "geo": ["ip_dst_addr", "ip_src_addr"],
+        "host": ["host"]
+                            },
+      "threatIntelFieldMap": {
+        "hbaseThreatIntel": ["ip_dst_addr", "ip_src_addr"]
+                             },
+      "fieldToThreatIntelTypeMap": {
+        "ip_dst_addr" : [ "malicious_ip" ]
+       ,"ip_src_addr" : [ "malicious_ip" ]
+                                   }
+    }
+   */
+  @Multiline
+  public static String sourceConfigStr;
+
+  /**
+{
+  "zkQuorum" : "localhost:2181"
+ ,"sensorToFieldList" : {
+  "bro" : {
+           "type" : "THREAT_INTEL"
+          ,"fieldToEnrichmentTypes" : {
+            "ip_src_addr" : [ "playful" ]
+           ,"ip_dst_addr" : [ "playful" ]
+                                      }
+          }
+                        }
+}
+     */
+  @Multiline
+  public static String threatIntelConfigStr;
+
+  @Test
+  public void testThreatIntel() throws Exception {
+
+    SensorEnrichmentConfig broSc = JSONUtils.INSTANCE.load(sourceConfigStr, SensorEnrichmentConfig.class);
+
+
+    EnrichmentConfig config = JSONUtils.INSTANCE.load(threatIntelConfigStr, EnrichmentConfig.class);
+    final Map<String, SensorEnrichmentConfig> outputScs = new HashMap<>();
+    EnrichmentConfig.SourceConfigHandler scHandler = new EnrichmentConfig.SourceConfigHandler() {
+      @Override
+      public SensorEnrichmentConfig readConfig(String sensor) throws Exception {
+        if(sensor.equals("bro")) {
+          return JSONUtils.INSTANCE.load(sourceConfigStr, SensorEnrichmentConfig.class);
+        }
+        else {
+          throw new IllegalStateException("Tried to retrieve an unexpected sensor: " + sensor);
+        }
+      }
+
+      @Override
+      public void persistConfig(String sensor, SensorEnrichmentConfig config) throws Exception {
+        outputScs.put(sensor, config);
+      }
+    };
+    EnrichmentConfig.updateSensorConfigs(scHandler, config.getSensorToFieldList());
+    Assert.assertNotNull(outputScs.get("bro"));
+    Assert.assertNotSame(outputScs.get("bro"), broSc);
+    Assert.assertEquals( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getThreatIntelFieldMap().get(Constants.SIMPLE_HBASE_THREAT_INTEL).size()
+                       , 2
+                       );
+    Assert.assertTrue( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getThreatIntelFieldMap()
+                                  .get(Constants.SIMPLE_HBASE_THREAT_INTEL)
+                                  .contains("ip_src_addr")
+                       );
+    Assert.assertTrue( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getThreatIntelFieldMap()
+                                  .get(Constants.SIMPLE_HBASE_THREAT_INTEL)
+                                  .contains("ip_dst_addr")
+                       );
+    Assert.assertEquals( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getFieldToThreatIntelTypeMap().keySet().size()
+                       , 2
+                       );
+    Assert.assertEquals( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getFieldToThreatIntelTypeMap().get("ip_src_addr").size()
+                       , 2
+                       );
+    Assert.assertTrue( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getFieldToThreatIntelTypeMap().get("ip_src_addr").contains("playful")
+                       );
+    Assert.assertTrue( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getFieldToThreatIntelTypeMap().get("ip_src_addr").contains("malicious_ip")
+                       );
+    Assert.assertEquals( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getFieldToThreatIntelTypeMap().get("ip_dst_addr").size()
+                       , 2
+                       );
+    Assert.assertTrue( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getFieldToThreatIntelTypeMap().get("ip_dst_addr").contains("playful")
+                       );
+    Assert.assertTrue( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getFieldToThreatIntelTypeMap().get("ip_dst_addr").contains("malicious_ip")
+                       );
+  }
+
+  /**
+   {
+  "zkQuorum" : "localhost:2181"
+ ,"sensorToFieldList" : {
+  "bro" : {
+           "type" : "ENRICHMENT"
+          ,"fieldToEnrichmentTypes" : {
+            "ip_src_addr" : [ "playful" ]
+           ,"ip_dst_addr" : [ "playful" ]
+                                      }
+          }
+                        }
+   }
+   */
+  @Multiline
+  public static String enrichmentConfigStr;
+  @Test
+  public void testEnrichment() throws Exception {
+
+    SensorEnrichmentConfig broSc = JSONUtils.INSTANCE.load(sourceConfigStr, SensorEnrichmentConfig.class);
+
+    EnrichmentConfig config = JSONUtils.INSTANCE.load(enrichmentConfigStr, EnrichmentConfig.class);
+    final Map<String, SensorEnrichmentConfig> outputScs = new HashMap<>();
+    EnrichmentConfig.SourceConfigHandler scHandler = new EnrichmentConfig.SourceConfigHandler() {
+      @Override
+      public SensorEnrichmentConfig readConfig(String sensor) throws Exception {
+        if(sensor.equals("bro")) {
+          return JSONUtils.INSTANCE.load(sourceConfigStr, SensorEnrichmentConfig.class);
+        }
+        else {
+          throw new IllegalStateException("Tried to retrieve an unexpected sensor: " + sensor);
+        }
+      }
+
+      @Override
+      public void persistConfig(String sensor, SensorEnrichmentConfig config) throws Exception {
+        outputScs.put(sensor, config);
+      }
+    };
+    EnrichmentConfig.updateSensorConfigs(scHandler, config.getSensorToFieldList());
+    Assert.assertNotNull(outputScs.get("bro"));
+    Assert.assertNotSame(outputScs.get("bro"), broSc);
+    Assert.assertEquals( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getEnrichmentFieldMap().get(Constants.SIMPLE_HBASE_ENRICHMENT).size()
+                       , 2
+                       );
+    Assert.assertTrue( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getEnrichmentFieldMap()
+                                  .get(Constants.SIMPLE_HBASE_ENRICHMENT)
+                                  .contains("ip_src_addr")
+                       );
+    Assert.assertTrue( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getEnrichmentFieldMap()
+                                  .get(Constants.SIMPLE_HBASE_ENRICHMENT)
+                                  .contains("ip_dst_addr")
+                       );
+    Assert.assertEquals( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getFieldToEnrichmentTypeMap().keySet().size()
+                       , 2
+                       );
+    Assert.assertEquals( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getFieldToEnrichmentTypeMap().get("ip_src_addr").size()
+                       , 1
+                       );
+    Assert.assertEquals( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getFieldToEnrichmentTypeMap().get("ip_src_addr").get(0)
+                       , "playful"
+                       );
+    Assert.assertEquals( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getFieldToEnrichmentTypeMap().get("ip_dst_addr").size()
+                       , 1
+                       );
+    Assert.assertEquals( outputScs.get("bro").toJSON()
+                       , outputScs.get("bro").getFieldToEnrichmentTypeMap().get("ip_dst_addr").get(0)
+                       , "playful"
+                       );
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/SensorEnrichmentConfigTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/SensorEnrichmentConfigTest.java b/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/SensorEnrichmentConfigTest.java
new file mode 100644
index 0000000..93a3d54
--- /dev/null
+++ b/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/SensorEnrichmentConfigTest.java
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.configuration;
+
+import junit.framework.Assert;
+import nl.jqno.equalsverifier.EqualsVerifier;
+import nl.jqno.equalsverifier.Warning;
+import org.apache.metron.TestConstants;
+import org.apache.metron.common.cli.ConfigurationsUtils;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.Map;
+
+public class SensorEnrichmentConfigTest {
+
+  @Test
+  public void test() throws IOException {
+    EqualsVerifier.forClass(SensorEnrichmentConfig.class).suppress(Warning.NONFINAL_FIELDS).usingGetClass().verify();
+    Map<String, byte[]> testSensorConfigMap = ConfigurationsUtils.readSensorEnrichmentConfigsFromFile(TestConstants.SAMPLE_CONFIG_PATH);
+    byte[] sensorConfigBytes = testSensorConfigMap.get("yaf");
+    SensorEnrichmentConfig sensorEnrichmentConfig = SensorEnrichmentConfig.fromBytes(sensorConfigBytes);
+    Assert.assertNotNull(sensorEnrichmentConfig);
+    Assert.assertTrue(sensorEnrichmentConfig.toString() != null && sensorEnrichmentConfig.toString().length() > 0);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/test/resources/config/BasicTldExtractorTest.config
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/test/resources/config/BasicTldExtractorTest.config b/metron-platform/metron-common/src/test/resources/config/BasicTldExtractorTest.config
new file mode 100644
index 0000000..317add3
--- /dev/null
+++ b/metron-platform/metron-common/src/test/resources/config/BasicTldExtractorTest.config
@@ -0,0 +1,20 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#BasicTldExtractorConfig
+logFile=effective_tld_names.dat

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/test/resources/config/global.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/test/resources/config/global.json b/metron-platform/metron-common/src/test/resources/config/global.json
new file mode 100644
index 0000000..44ce6b1
--- /dev/null
+++ b/metron-platform/metron-common/src/test/resources/config/global.json
@@ -0,0 +1,3 @@
+{
+  "configuration.class.test.property": "Configuration"
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/test/resources/config/sensors/bro.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/test/resources/config/sensors/bro.json b/metron-platform/metron-common/src/test/resources/config/sensors/bro.json
new file mode 100644
index 0000000..8886495
--- /dev/null
+++ b/metron-platform/metron-common/src/test/resources/config/sensors/bro.json
@@ -0,0 +1,19 @@
+{
+  "index": "bro",
+  "batchSize": 5,
+  "enrichmentFieldMap":
+  {
+    "geo": ["ip_dst_addr", "ip_src_addr"],
+    "host": ["host"]
+  },
+  "threatIntelFieldMap":
+  {
+    "hbaseThreatIntel": ["ip_dst_addr", "ip_src_addr"]
+  },
+  "fieldToThreatIntelTypeMap":
+  {
+    "ip_dst_addr" : [ "malicious_ip" ]
+    ,"ip_src_addr" : [ "malicious_ip" ]
+  }
+}
+


[27/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/HBaseBolt.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/HBaseBolt.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/HBaseBolt.java
new file mode 100644
index 0000000..1eff028
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/HBaseBolt.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.bolt;
+
+
+
+import java.io.IOException;
+import java.util.Map;
+
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.base.Splitter;
+import com.google.common.collect.Iterables;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.log4j.Logger;
+import org.apache.metron.hbase.Connector;
+import org.apache.metron.hbase.HTableConnector;
+import org.apache.metron.hbase.TupleTableConfig;
+import org.json.simple.JSONObject;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.IRichBolt;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Tuple;
+import backtype.storm.tuple.Values;
+
+import org.apache.metron.common.utils.ErrorUtils;
+
+/**
+ * A Storm bolt for putting data into HBase.
+ * <p>
+ * By default works in batch mode by enabling HBase's client-side write buffer. Enabling batch mode
+ * is recommended for high throughput, but it can be disabled in {@link TupleTableConfig}.
+ * <p>
+ * The HBase configuration is picked up from the first <tt>hbase-site.xml</tt> encountered in the
+ * classpath
+ * @see TupleTableConfig
+ * @see HTableConnector
+ */
+@SuppressWarnings("serial")
+public class HBaseBolt implements IRichBolt {
+  private static final Logger LOG = Logger.getLogger(HBaseBolt.class);
+  private static final String DEFAULT_ZK_PORT = "2181";
+
+  protected OutputCollector collector;
+  protected TupleTableConfig conf;
+  protected boolean autoAck = true;
+  protected Connector connector;
+  private String _quorum;
+  private String _port;
+
+  public HBaseBolt(TupleTableConfig conf, String quorum, String port) {
+    this.conf = conf;
+    _quorum = quorum;
+    _port = port;
+  }
+  public HBaseBolt(final TupleTableConfig conf, String zkConnectString) throws IOException {
+    this(conf, zkConnectStringToHosts(zkConnectString), zkConnectStringToPort(zkConnectString));
+  }
+  public static String zkConnectStringToHosts(String connString) {
+    Iterable<String> hostPortPairs = Splitter.on(',').split(connString);
+    return Joiner.on(',').join(Iterables.transform(hostPortPairs, new Function<String, String>() {
+
+      @Override
+      public String apply(String hostPortPair) {
+        return Iterables.getFirst(Splitter.on(':').split(hostPortPair), "");
+      }
+    }));
+  }
+  public static String zkConnectStringToPort(String connString) {
+    String hostPortPair = Iterables.getFirst(Splitter.on(",").split(connString), "");
+    return Iterables.getLast(Splitter.on(":").split(hostPortPair),DEFAULT_ZK_PORT);
+  }
+
+
+  public Connector createConnector() throws IOException{
+    initialize();
+    return new HTableConnector(conf, _quorum, _port);
+  }
+
+  public void initialize() {
+    TupleTableConfig hbaseBoltConfig = conf;
+    String allColumnFamiliesColumnQualifiers = conf.getFields();
+    String[] tokenizedColumnFamiliesWithColumnQualifiers = StringUtils
+            .split(allColumnFamiliesColumnQualifiers, "\\|");
+    for (String tokenizedColumnFamilyWithColumnQualifiers : tokenizedColumnFamiliesWithColumnQualifiers) {
+      String[] cfCqTokens = StringUtils.split( tokenizedColumnFamilyWithColumnQualifiers, ":");
+      String columnFamily = cfCqTokens[0];
+      String[] columnQualifiers = StringUtils.split(cfCqTokens[1], ",");
+      for (String columnQualifier : columnQualifiers) {
+        hbaseBoltConfig.addColumn(columnFamily, columnQualifier);
+      }
+      setAutoAck(true);
+    }
+  }
+
+  /** {@inheritDoc} */
+  @SuppressWarnings("rawtypes")
+  
+  public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+    this.collector = collector;
+
+    try {
+      if(connector == null) {
+        this.connector = createConnector();
+      }
+		
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+
+    LOG.info("Preparing HBaseBolt for table: " + this.conf.getTableName());
+  }
+
+  /** {@inheritDoc} */
+  
+  public void execute(Tuple input) {
+    try {
+      Put p = conf.getPutFromTuple(input);
+      this.connector.put(p);
+    } catch (IOException ex) {
+
+  		JSONObject error = ErrorUtils.generateErrorMessage(
+  				"Alerts problem: " + input.toString(), ex);
+  		collector.emit("error", new Values(error));
+  		
+      throw new RuntimeException(ex);
+    }
+
+    if (this.autoAck) {
+      this.collector.ack(input);
+    }
+  }
+
+  /** {@inheritDoc} */
+  
+  public void cleanup() {
+    this.connector.close();
+  }
+
+  /** {@inheritDoc} */
+  
+  public void declareOutputFields(OutputFieldsDeclarer declarer) {
+	  declarer.declareStream("error", new Fields("HBase"));
+  }
+
+  /** {@inheritDoc} */
+  
+  public Map<String, Object> getComponentConfiguration() {
+    return null;
+  }
+
+  /**
+   * @return the autoAck
+   */
+  public boolean isAutoAck() {
+    return autoAck;
+  }
+
+  /**
+   * @param autoAck the autoAck to set
+   */
+  public void setAutoAck(boolean autoAck) {
+    this.autoAck = autoAck;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/JoinBolt.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/JoinBolt.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/JoinBolt.java
new file mode 100644
index 0000000..68e56ed
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/JoinBolt.java
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.bolt;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Tuple;
+import backtype.storm.tuple.Values;
+import com.google.common.base.Joiner;
+import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.CacheLoader;
+import com.google.common.cache.LoadingCache;
+import com.google.common.collect.Sets;
+import org.apache.metron.common.bolt.ConfiguredBolt;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+
+public abstract class JoinBolt<V> extends ConfiguredBolt {
+
+  private static final Logger LOG = LoggerFactory
+          .getLogger(JoinBolt.class);
+  protected OutputCollector collector;
+
+  protected transient CacheLoader<String, Map<String, V>> loader;
+  protected transient LoadingCache<String, Map<String, V>> cache;
+  protected Long maxCacheSize;
+  protected Long maxTimeRetain;
+
+  public JoinBolt(String zookeeperUrl) {
+    super(zookeeperUrl);
+  }
+
+  public JoinBolt withMaxCacheSize(long maxCacheSize) {
+    this.maxCacheSize = maxCacheSize;
+    return this;
+  }
+
+  public JoinBolt withMaxTimeRetain(long maxTimeRetain) {
+    this.maxTimeRetain = maxTimeRetain;
+    return this;
+  }
+
+  @Override
+  public void prepare(Map map, TopologyContext topologyContext, OutputCollector outputCollector) {
+    super.prepare(map, topologyContext, outputCollector);
+    this.collector = outputCollector;
+    if (this.maxCacheSize == null)
+      throw new IllegalStateException("maxCacheSize must be specified");
+    if (this.maxTimeRetain == null)
+      throw new IllegalStateException("maxTimeRetain must be specified");
+    loader = new CacheLoader<String, Map<String, V>>() {
+      public Map<String, V> load(String key) throws Exception {
+        return new HashMap<>();
+      }
+    };
+    cache = CacheBuilder.newBuilder().maximumSize(maxCacheSize)
+            .expireAfterWrite(maxTimeRetain, TimeUnit.MINUTES)
+            .build(loader);
+    prepare(map, topologyContext);
+  }
+
+  @SuppressWarnings("unchecked")
+  @Override
+  public void execute(Tuple tuple) {
+    String streamId = tuple.getSourceStreamId();
+    String key = (String) tuple.getValueByField("key");
+    V message = (V) tuple.getValueByField("message");
+    try {
+      Map<String, V> streamMessageMap = cache.get(key);
+      if (streamMessageMap.containsKey(streamId)) {
+        LOG.warn(String.format("Received key %s twice for " +
+                "stream %s", key, streamId));
+      }
+      streamMessageMap.put(streamId, message);
+      Set<String> streamIds = getStreamIds(message);
+      Set<String> streamMessageKeys = streamMessageMap.keySet();
+      if (streamMessageKeys.size() == streamIds.size() && Sets.symmetricDifference
+              (streamMessageKeys, streamIds)
+              .isEmpty()) {
+        collector.emit("message", tuple, new Values(key, joinMessages
+                (streamMessageMap)));
+        collector.ack(tuple);
+        cache.invalidate(key);
+      } else {
+        cache.put(key, streamMessageMap);
+        if(LOG.isDebugEnabled()) {
+          LOG.debug(getClass().getSimpleName() + ": Missed joining portions for "+ key + ". Expected " + Joiner.on(",").join(streamIds)
+                  + " != " + Joiner.on(",").join(streamMessageKeys)
+                   );
+        }
+      }
+    } catch (ExecutionException e) {
+      collector.reportError(e);
+      LOG.error(e.getMessage(), e);
+    }
+  }
+
+  @Override
+  public void declareOutputFields(OutputFieldsDeclarer declarer) {
+    declarer.declareStream("message", new Fields("key", "message"));
+  }
+
+  public abstract void prepare(Map map, TopologyContext topologyContext);
+
+  public abstract Set<String> getStreamIds(V value);
+
+  public abstract V joinMessages(Map<String, V> streamMessageMap);
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/SplitBolt.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/SplitBolt.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/SplitBolt.java
new file mode 100644
index 0000000..4ff387c
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/SplitBolt.java
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.bolt;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Tuple;
+import backtype.storm.tuple.Values;
+import org.apache.metron.common.bolt.ConfiguredBolt;
+
+import java.util.Map;
+import java.util.Set;
+
+public abstract class SplitBolt<T> extends
+        ConfiguredBolt {
+
+  protected OutputCollector collector;
+
+  public SplitBolt(String zookeeperUrl) {
+    super(zookeeperUrl);
+  }
+
+  @Override
+  public final void prepare(Map map, TopologyContext topologyContext,
+                       OutputCollector outputCollector) {
+    super.prepare(map, topologyContext, outputCollector);
+    collector = outputCollector;
+    prepare(map, topologyContext);
+  }
+
+  @Override
+  public final void execute(Tuple tuple) {
+    emit(tuple, generateMessage(tuple));
+  }
+
+  @Override
+  public final void declareOutputFields(OutputFieldsDeclarer declarer) {
+    declarer.declareStream("message", new Fields("key", "message"));
+    for (String streamId : getStreamIds()) {
+      declarer.declareStream(streamId, new Fields("key", "message"));
+    }
+    declarer.declareStream("error", new Fields("message"));
+    declareOther(declarer);
+  }
+
+  public void emit(Tuple tuple, T message) {
+    if (message == null) return;
+    String key = getKey(tuple, message);
+    collector.emit("message", tuple, new Values(key, message));
+    Map<String, T> streamMessageMap = splitMessage(message);
+    for (String streamId : streamMessageMap.keySet()) {
+      T streamMessage = streamMessageMap.get(streamId);
+      if (streamMessage == null) {
+        streamMessage = getDefaultMessage(streamId);
+      }
+      collector.emit(streamId, new Values(key, streamMessage));
+    }
+    collector.ack(tuple);
+    emitOther(tuple, message);
+  }
+
+  protected T getDefaultMessage(String streamId) {
+    throw new IllegalArgumentException("Could not find a message for" +
+            " stream: " + streamId);
+  }
+
+  public abstract void prepare(Map map, TopologyContext topologyContext);
+
+  public abstract Set<String> getStreamIds();
+
+  public abstract String getKey(Tuple tuple, T message);
+
+  public abstract T generateMessage(Tuple tuple);
+
+  public abstract Map<String, T> splitMessage(T message);
+
+  public abstract void declareOther(OutputFieldsDeclarer declarer);
+
+  public abstract void emitOther(Tuple tuple, T message);
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBolt.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBolt.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBolt.java
new file mode 100644
index 0000000..a2b0e78
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBolt.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.bolt;
+
+import org.apache.metron.common.configuration.SensorEnrichmentConfig;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+import java.util.Map;
+
+public class ThreatIntelJoinBolt extends EnrichmentJoinBolt {
+
+  protected static final Logger LOG = LoggerFactory
+          .getLogger(ThreatIntelJoinBolt.class);
+
+  public ThreatIntelJoinBolt(String zookeeperUrl) {
+    super(zookeeperUrl);
+  }
+
+  @Override
+  public Map<String, List<String>> getFieldMap(String sourceType) {
+    SensorEnrichmentConfig config = configurations.getSensorEnrichmentConfig(sourceType);
+    if(config != null) {
+      return config.getThreatIntelFieldMap();
+    }
+    else {
+      LOG.error("Unable to retrieve sensor config: " + sourceType);
+      return null;
+    }
+  }
+
+  @Override
+  public JSONObject joinMessages(Map<String, JSONObject> streamMessageMap) {
+    JSONObject ret = super.joinMessages(streamMessageMap);
+    for(Object key : ret.keySet()) {
+      if(key.toString().startsWith("threatintels") && !key.toString().endsWith(".ts")) {
+        ret.put("is_alert" , "true");
+        break;
+      }
+    }
+    return ret;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/ThreatIntelSplitterBolt.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/ThreatIntelSplitterBolt.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/ThreatIntelSplitterBolt.java
new file mode 100644
index 0000000..692c327
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/ThreatIntelSplitterBolt.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.bolt;
+
+import org.apache.metron.enrichment.utils.ThreatIntelUtils;
+
+import java.util.List;
+import java.util.Map;
+
+public class ThreatIntelSplitterBolt extends EnrichmentSplitterBolt {
+
+  public ThreatIntelSplitterBolt(String zookeeperUrl) {
+    super(zookeeperUrl);
+  }
+
+  @Override
+  protected Map<String, List<String>> getFieldMap(String sensorType) {
+    return configurations.getSensorEnrichmentConfig(sensorType).getThreatIntelFieldMap();
+  }
+
+  @Override
+  protected String getKeyName(String type, String field) {
+    return ThreatIntelUtils.getThreatIntelKey(type, field);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/cli/LatencySummarizer.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/cli/LatencySummarizer.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/cli/LatencySummarizer.java
new file mode 100644
index 0000000..b40f2ad
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/cli/LatencySummarizer.java
@@ -0,0 +1,189 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.cli;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.google.common.base.Joiner;
+import com.google.common.base.Splitter;
+import com.google.common.collect.Iterables;
+import org.apache.commons.cli.*;
+import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
+import org.apache.metron.common.utils.JSONUtils;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.PrintStream;
+import java.util.*;
+
+public class LatencySummarizer {
+  public static class Pair extends AbstractMap.SimpleEntry<String, String> {
+    public Pair(String key, String value) {
+      super(key, value);
+    }
+  }
+
+  public static class LatencyStats {
+    private NavigableMap<Integer, Map<Pair, DescriptiveStatistics>> depthMap = new TreeMap<>();
+    private List<String> metrics;
+    public void updateMetrics(List<String> metrics) {
+      this.metrics = metrics;
+    }
+    public Map<Pair, DescriptiveStatistics> getStatsMap(int depth) {
+      Map<Pair, DescriptiveStatistics> statsMap = depthMap.get(depth);
+      if(statsMap == null) {
+        statsMap = new HashMap<>();
+        depthMap.put(depth, statsMap);
+      }
+      return statsMap;
+    }
+    public DescriptiveStatistics getStats( int depth, Pair p) {
+      Map<Pair, DescriptiveStatistics> statsMap = getStatsMap(depth);
+      DescriptiveStatistics stats = statsMap.get(p);
+      if(stats == null) {
+        stats = new DescriptiveStatistics();
+        statsMap.put(p, stats);
+      }
+      return stats;
+    }
+    public void put(int depth, Pair p, double val) {
+      getStats(depth, p).addValue(val);
+    }
+
+    public static void summary(String title, DescriptiveStatistics statistics, PrintStream pw, boolean meanOnly) {
+      if(meanOnly) {
+        pw.println(title + ": "
+                + "\n\tMean: " + statistics.getMean()
+        );
+      }
+      else {
+        pw.println(title + ": "
+                + "\n\tMean: " + statistics.getMean()
+                + "\n\tMin: " + statistics.getMin()
+                + "\n\t1th: " + statistics.getPercentile(1)
+                + "\n\t5th: " + statistics.getPercentile(5)
+                + "\n\t10th: " + statistics.getPercentile(10)
+                + "\n\t25th: " + statistics.getPercentile(25)
+                + "\n\t50th: " + statistics.getPercentile(50)
+                + "\n\t90th: " + statistics.getPercentile(90)
+                + "\n\t95th: " + statistics.getPercentile(95)
+                + "\n\t99th: " + statistics.getPercentile(99)
+                + "\n\tMax: " + statistics.getMax()
+                + "\n\tStdDev: " + statistics.getStandardDeviation()
+        );
+      }
+    }
+    public void printDepthSummary(int depth, boolean meanOnly) {
+      Map<Pair, DescriptiveStatistics> statsMap = depthMap.get(depth);
+      System.out.println("\nDistance " + depth);
+      System.out.println("----------------\n");
+      List<Map.Entry<Pair, DescriptiveStatistics>> sortedStats = new ArrayList<>();
+      for(Map.Entry<Pair, DescriptiveStatistics> stats : statsMap.entrySet()) {
+        sortedStats.add(stats);
+      }
+      Collections.sort(sortedStats, new Comparator<Map.Entry<Pair, DescriptiveStatistics>>() {
+        @Override
+        public int compare(Map.Entry<Pair, DescriptiveStatistics> o1, Map.Entry<Pair, DescriptiveStatistics> o2) {
+          return -1*Double.compare(o1.getValue().getMean(), o2.getValue().getMean());
+        }
+      });
+      for(Map.Entry<Pair, DescriptiveStatistics> stats : sortedStats) {
+        summary(stats.getKey().getKey() + " -> " + stats.getKey().getValue(), stats.getValue(), System.out, meanOnly);
+      }
+    }
+    public void printSummary(boolean meanOnly) {
+      System.out.println("Flow:");
+      System.out.println("\t" + Joiner.on(" -> ").join(metrics));
+      System.out.println("\nSUMMARY BY DISTANCE\n--------------------------");
+      for(int depth : depthMap.keySet()) {
+        printDepthSummary(depth, meanOnly);
+      }
+    }
+
+  }
+
+  public static String getBaseMetric(String s) {
+    Iterable<String> tokenIt = Splitter.on('.').split(s);
+    int num = Iterables.size(tokenIt);
+    return Joiner.on('.').join(Iterables.limit(tokenIt, num-1));
+  }
+
+  public static void updateStats(LatencyStats stats, Map<String, Object> doc) {
+    Map<String, Long> latencyMap = new HashMap<>();
+    NavigableMap<Long, String> latencyInvMap = new TreeMap<>();
+    for(Map.Entry<String, Object> kv : doc.entrySet()) {
+      if(kv.getKey().endsWith(".ts")) {
+        String base = getBaseMetric(kv.getKey());
+        long latency = Long.parseLong(kv.getValue().toString());
+        latencyInvMap.put(latency, base);
+        latencyMap.put( base, latency);
+      }
+    }
+    List<String> metrics = new ArrayList<>();
+    for(Map.Entry<Long, String> kv : latencyInvMap.entrySet()) {
+      metrics.add(kv.getValue());
+    }
+    stats.updateMetrics(metrics);
+    for(int i = 0;i < metrics.size();++i) {
+      for(int j = i+1;j < metrics.size();++j) {
+        Pair p = new Pair(metrics.get(i), metrics.get(j));
+        long ms = latencyMap.get(metrics.get(j)) - latencyMap.get(metrics.get(i));
+        stats.put(j-i, p, ms);
+      }
+    }
+  }
+
+
+
+  public static void main(String... argv) throws IOException {
+    Options options = new Options();
+    {
+      Option o = new Option("h", "help", false, "This screen");
+      o.setRequired(false);
+      options.addOption(o);
+    }
+    {
+      Option o = new Option("m", "mean_only", false, "Print the mean only when we summarize");
+      o.setRequired(false);
+      options.addOption(o);
+    }
+    CommandLineParser parser = new PosixParser();
+    CommandLine cmd = null;
+    try {
+      cmd = parser.parse(options, argv);
+    }
+    catch(ParseException pe) {
+      pe.printStackTrace();
+      final HelpFormatter usageFormatter = new HelpFormatter();
+      usageFormatter.printHelp(LatencySummarizer.class.getSimpleName().toLowerCase(), null, options, null, true);
+      System.exit(-1);
+    }
+    if( cmd.hasOption("h") ){
+      final HelpFormatter usageFormatter = new HelpFormatter();
+      usageFormatter.printHelp(LatencySummarizer.class.getSimpleName().toLowerCase(), null, options, null, true);
+      System.exit(0);
+    }
+    LatencyStats statsMap = new LatencyStats();
+    BufferedReader reader = new BufferedReader(new InputStreamReader(System.in));
+    for(String line = null;(line = reader.readLine()) != null;) {
+      Map<String, Object> doc = JSONUtils.INSTANCE.load(line, new TypeReference<HashMap<String, Object>>() {});
+      updateStats(statsMap, doc);
+    }
+    statsMap.printSummary(cmd.hasOption('m'));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/configuration/Enrichment.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/configuration/Enrichment.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/configuration/Enrichment.java
new file mode 100644
index 0000000..736a911
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/configuration/Enrichment.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.configuration;
+
+import org.apache.metron.enrichment.interfaces.EnrichmentAdapter;
+
+import java.io.Serializable;
+import java.util.List;
+
+public class Enrichment<T extends EnrichmentAdapter> implements Serializable {
+
+  private String type;
+  private List<String> fields;
+  private T adapter;
+
+  public Enrichment() {}
+
+  public Enrichment(String type, T adapter) {
+    this.type = type;
+    this.adapter = adapter;
+  }
+
+
+  public List<String> getFields() {
+    return fields;
+  }
+
+  public void setFields(List<String> fields) {
+    this.fields = fields;
+  }
+
+  public String getType() {
+    return type;
+  }
+
+  public void setType(String type) {
+    this.type = type;
+  }
+
+  public T getAdapter() {
+    return adapter;
+  }
+
+  public void setAdapter(T adapter) {
+    this.adapter = adapter;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/AbstractConverter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/AbstractConverter.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/AbstractConverter.java
new file mode 100644
index 0000000..4b57677
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/AbstractConverter.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.enrichment.converter;
+
+import com.google.common.base.Function;
+import com.google.common.collect.Iterables;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.apache.metron.enrichment.lookup.LookupKey;
+import org.apache.metron.enrichment.lookup.LookupValue;
+
+import javax.annotation.Nullable;
+import java.io.IOException;
+import java.util.*;
+
+
+public abstract class AbstractConverter<KEY_T extends LookupKey, VALUE_T extends LookupValue> implements HbaseConverter<KEY_T,VALUE_T> {
+  public static Function<Cell, Map.Entry<byte[], byte[]>> CELL_TO_ENTRY  = new Function<Cell, Map.Entry<byte[], byte[]>>() {
+
+    @Nullable
+    @Override
+    public Map.Entry<byte[], byte[]> apply(@Nullable Cell cell) {
+      return new AbstractMap.SimpleEntry<>(cell.getQualifier(), cell.getValue());
+    }
+  };
+  @Override
+  public Put toPut(String columnFamily, KEY_T key, VALUE_T values) throws IOException {
+    Put put = new Put(key.toBytes());
+    byte[] cf = Bytes.toBytes(columnFamily);
+    for(Map.Entry<byte[], byte[]> kv : values.toColumns()) {
+      put.add(cf, kv.getKey(), kv.getValue());
+    }
+    return put;
+  }
+
+  public LookupKV<KEY_T, VALUE_T> fromPut(Put put, String columnFamily, KEY_T key, VALUE_T value) throws IOException {
+    key.fromBytes(put.getRow());
+    byte[] cf = Bytes.toBytes(columnFamily);
+    value.fromColumns(Iterables.transform(put.getFamilyCellMap().get(cf), CELL_TO_ENTRY));
+    return new LookupKV<>(key, value);
+  }
+
+  @Override
+  public Result toResult(String columnFamily, KEY_T key, VALUE_T values) throws IOException {
+    Put put = toPut(columnFamily, key, values);
+    return Result.create(put.getFamilyCellMap().get(Bytes.toBytes(columnFamily)));
+  }
+
+  public LookupKV<KEY_T, VALUE_T> fromResult(Result result, String columnFamily, KEY_T key, VALUE_T value) throws IOException {
+    if(result == null || result.getRow() == null) {
+      return null;
+    }
+    key.fromBytes(result.getRow());
+    byte[] cf = Bytes.toBytes(columnFamily);
+    NavigableMap<byte[], byte[]> cols = result.getFamilyMap(cf);
+    value.fromColumns(cols.entrySet());
+    return new LookupKV<>(key, value);
+  }
+  @Override
+  public Get toGet(String columnFamily, KEY_T key) {
+    Get ret = new Get(key.toBytes());
+    ret.addFamily(Bytes.toBytes(columnFamily));
+    return ret;
+  }
+
+  public static Iterable<Map.Entry<byte[], byte[]>> toEntries(byte[]... kvs) {
+    if(kvs.length % 2 != 0)  {
+      throw new IllegalStateException("Must be an even size");
+    }
+    List<Map.Entry<byte[], byte[]>> ret = new ArrayList<>(kvs.length/2);
+    for(int i = 0;i < kvs.length;i += 2) {
+      ret.add(new AbstractMap.SimpleImmutableEntry<>(kvs[i], kvs[i+1])) ;
+    }
+    return ret;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/EnrichmentConverter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/EnrichmentConverter.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/EnrichmentConverter.java
new file mode 100644
index 0000000..6f19781
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/EnrichmentConverter.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.converter;
+
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.metron.enrichment.lookup.LookupKV;
+
+import java.io.IOException;
+
+public class EnrichmentConverter extends AbstractConverter<EnrichmentKey, EnrichmentValue> {
+
+  @Override
+  public LookupKV<EnrichmentKey, EnrichmentValue> fromPut(Put put, String columnFamily) throws IOException {
+    return fromPut(put, columnFamily, new EnrichmentKey(), new EnrichmentValue());
+  }
+
+  @Override
+  public LookupKV<EnrichmentKey, EnrichmentValue> fromResult(Result result, String columnFamily) throws IOException {
+    return fromResult(result, columnFamily, new EnrichmentKey(), new EnrichmentValue());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/EnrichmentHelper.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/EnrichmentHelper.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/EnrichmentHelper.java
new file mode 100644
index 0000000..475ee8c
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/EnrichmentHelper.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.converter;
+
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.metron.enrichment.lookup.LookupKV;
+
+import java.io.IOException;
+
+public enum EnrichmentHelper {
+    INSTANCE;
+    EnrichmentConverter converter = new EnrichmentConverter();
+
+    public void load(HTableInterface table, String cf, Iterable<LookupKV<EnrichmentKey, EnrichmentValue>> results) throws IOException {
+        for(LookupKV<EnrichmentKey, EnrichmentValue> result : results) {
+            Put put = converter.toPut(cf, result.getKey(), result.getValue());
+            table.put(put);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/EnrichmentKey.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/EnrichmentKey.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/EnrichmentKey.java
new file mode 100644
index 0000000..6201ad1
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/EnrichmentKey.java
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.converter;
+
+import com.google.common.hash.HashFunction;
+import com.google.common.hash.Hasher;
+import com.google.common.hash.Hashing;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.metron.enrichment.lookup.LookupKey;
+
+import java.io.*;
+
+public class EnrichmentKey implements LookupKey {
+  private static final int SEED = 0xDEADBEEF;
+  private static final int HASH_PREFIX_SIZE=16;
+  ThreadLocal<HashFunction> hFunction= new ThreadLocal<HashFunction>() {
+    @Override
+    protected HashFunction initialValue() {
+      return Hashing.murmur3_128(SEED);
+    }
+  };
+
+  public String indicator;
+  public String type;
+
+  public EnrichmentKey() {
+
+  }
+  public EnrichmentKey(String type, String indicator) {
+    this.indicator = indicator;
+    this.type = type;
+  }
+
+  private byte[] typedIndicatorToBytes() throws IOException {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    DataOutputStream w = new DataOutputStream(baos);
+    w.writeUTF(type);
+    w.writeUTF(indicator);
+    w.flush();
+    return baos.toByteArray();
+  }
+
+  @Override
+  public byte[] toBytes() {
+    byte[] indicatorBytes = new byte[0];
+    try {
+      indicatorBytes = typedIndicatorToBytes();
+    } catch (IOException e) {
+      throw new RuntimeException("Unable to convert type and indicator to bytes", e);
+    }
+    Hasher hasher = hFunction.get().newHasher();
+    hasher.putBytes(Bytes.toBytes(indicator));
+    byte[] prefix = hasher.hash().asBytes();
+    byte[] val = new byte[indicatorBytes.length + prefix.length];
+    int offset = 0;
+    System.arraycopy(prefix, 0, val, offset, prefix.length);
+    offset += prefix.length;
+    System.arraycopy(indicatorBytes, 0, val, offset, indicatorBytes.length);
+    return val;
+  }
+
+  @Override
+  public void fromBytes(byte[] row) {
+    ByteArrayInputStream baos = new ByteArrayInputStream(row);
+    baos.skip(HASH_PREFIX_SIZE);
+    DataInputStream w = new DataInputStream(baos);
+    try {
+      type = w.readUTF();
+      indicator = w.readUTF();
+    } catch (IOException e) {
+      throw new RuntimeException("Unable to convert type and indicator from bytes", e);
+    }
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    EnrichmentKey that = (EnrichmentKey) o;
+
+    if (indicator != null ? !indicator.equals(that.indicator) : that.indicator != null) return false;
+    return type != null ? type.equals(that.type) : that.type == null;
+
+  }
+
+  @Override
+  public int hashCode() {
+    int result = indicator != null ? indicator.hashCode() : 0;
+    result = 31 * result + (type != null ? type.hashCode() : 0);
+    return result;
+  }
+
+  @Override
+  public String toString() {
+    return "EnrichmentKey{" +
+            "indicator='" + indicator + '\'' +
+            ", type='" + type + '\'' +
+            '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/EnrichmentValue.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/EnrichmentValue.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/EnrichmentValue.java
new file mode 100644
index 0000000..d9b7b38
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/EnrichmentValue.java
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.converter;
+
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.metron.enrichment.lookup.LookupValue;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.type.TypeReference;
+
+import java.io.IOException;
+import java.util.Map;
+
+public class EnrichmentValue implements LookupValue {
+   private static final ThreadLocal<ObjectMapper> _mapper = new ThreadLocal<ObjectMapper>() {
+             @Override
+             protected ObjectMapper initialValue() {
+                return new ObjectMapper();
+             }
+    };
+    public static final String VALUE_COLUMN_NAME = "v";
+    public static final byte[] VALUE_COLUMN_NAME_B = Bytes.toBytes(VALUE_COLUMN_NAME);
+
+    private Map<String, String> metadata = null;
+
+    public EnrichmentValue()
+    {
+
+    }
+
+    public EnrichmentValue(Map<String, String> metadata) {
+        this.metadata = metadata;
+    }
+
+
+
+    public Map<String, String> getMetadata() {
+        return metadata;
+    }
+
+    @Override
+    public Iterable<Map.Entry<byte[], byte[]>> toColumns() {
+        return AbstractConverter.toEntries( VALUE_COLUMN_NAME_B, Bytes.toBytes(valueToString(metadata))
+                                  );
+    }
+
+    @Override
+    public void fromColumns(Iterable<Map.Entry<byte[], byte[]>> values) {
+        for(Map.Entry<byte[], byte[]> cell : values) {
+            if(Bytes.equals(cell.getKey(), VALUE_COLUMN_NAME_B)) {
+                metadata = stringToValue(Bytes.toString(cell.getValue()));
+            }
+        }
+    }
+    public Map<String, String> stringToValue(String s){
+        try {
+            return _mapper.get().readValue(s, new TypeReference<Map<String, String>>(){});
+        } catch (IOException e) {
+            throw new RuntimeException("Unable to convert string to metadata: " + s);
+        }
+    }
+    public String valueToString(Map<String, String> value) {
+        try {
+            return _mapper.get().writeValueAsString(value);
+        } catch (IOException e) {
+            throw new RuntimeException("Unable to convert metadata to string: " + value);
+        }
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) return true;
+        if (o == null || getClass() != o.getClass()) return false;
+
+        EnrichmentValue that = (EnrichmentValue) o;
+
+        return getMetadata() != null ? getMetadata().equals(that.getMetadata()) : that.getMetadata() == null;
+
+    }
+
+    @Override
+    public int hashCode() {
+        return getMetadata() != null ? getMetadata().hashCode() : 0;
+    }
+
+    @Override
+    public String toString() {
+        return "EnrichmentValue{" +
+                "metadata=" + metadata +
+                '}';
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/HbaseConverter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/HbaseConverter.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/HbaseConverter.java
new file mode 100644
index 0000000..7300b76
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/converter/HbaseConverter.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.enrichment.converter;
+
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.apache.metron.enrichment.lookup.LookupKey;
+import org.apache.metron.enrichment.lookup.LookupValue;
+
+import java.io.IOException;
+
+public interface HbaseConverter<KEY_T extends LookupKey, VALUE_T extends LookupValue> {
+    Put toPut(String columnFamily, KEY_T key, VALUE_T values) throws IOException;
+
+    LookupKV<KEY_T, VALUE_T> fromPut(Put put, String columnFamily) throws IOException;
+
+    Result toResult(String columnFamily, KEY_T key, VALUE_T values) throws IOException;
+
+    LookupKV<KEY_T, VALUE_T> fromResult(Result result, String columnFamily) throws IOException;
+
+    Get toGet(String columnFamily, KEY_T key);
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/interfaces/EnrichmentAdapter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/interfaces/EnrichmentAdapter.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/interfaces/EnrichmentAdapter.java
new file mode 100644
index 0000000..28f9956
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/interfaces/EnrichmentAdapter.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.enrichment.interfaces;
+
+import org.json.simple.JSONObject;
+
+public interface EnrichmentAdapter<T>
+{
+	void logAccess(T value);
+	JSONObject enrich(T value);
+	boolean initializeAdapter();
+	void cleanup();
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/EnrichmentLookup.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/EnrichmentLookup.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/EnrichmentLookup.java
new file mode 100644
index 0000000..f43f854
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/EnrichmentLookup.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.lookup;
+
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.metron.enrichment.converter.HbaseConverter;
+import org.apache.metron.enrichment.converter.EnrichmentConverter;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.enrichment.lookup.accesstracker.AccessTracker;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+
+public class EnrichmentLookup extends Lookup<HTableInterface, EnrichmentKey, LookupKV<EnrichmentKey,EnrichmentValue>> implements AutoCloseable {
+
+  public static class Handler implements org.apache.metron.enrichment.lookup.handler.Handler<HTableInterface,EnrichmentKey,LookupKV<EnrichmentKey,EnrichmentValue>> {
+    String columnFamily;
+    HbaseConverter<EnrichmentKey, EnrichmentValue> converter = new EnrichmentConverter();
+    public Handler(String columnFamily) {
+      this.columnFamily = columnFamily;
+    }
+    @Override
+    public boolean exists(EnrichmentKey key, HTableInterface table, boolean logAccess) throws IOException {
+      return table.exists(converter.toGet(columnFamily, key));
+    }
+
+    @Override
+    public LookupKV<EnrichmentKey, EnrichmentValue> get(EnrichmentKey key, HTableInterface table, boolean logAccess) throws IOException {
+      return converter.fromResult(table.get(converter.toGet(columnFamily, key)), columnFamily);
+    }
+
+    private List<Get> keysToGets(Iterable<EnrichmentKey> keys) {
+      List<Get> ret = new ArrayList<>();
+      for(EnrichmentKey key : keys) {
+        ret.add(converter.toGet(columnFamily, key));
+      }
+      return ret;
+    }
+
+    @Override
+    public Iterable<Boolean> exists(Iterable<EnrichmentKey> key, HTableInterface table, boolean logAccess) throws IOException {
+      List<Boolean> ret = new ArrayList<>();
+      for(boolean b : table.existsAll(keysToGets(key))) {
+        ret.add(b);
+      }
+      return ret;
+    }
+
+    @Override
+    public Iterable<LookupKV<EnrichmentKey, EnrichmentValue>> get( Iterable<EnrichmentKey> keys
+                                                                 , HTableInterface table
+                                                                 , boolean logAccess
+                                                                 ) throws IOException
+    {
+      List<LookupKV<EnrichmentKey, EnrichmentValue>> ret = new ArrayList<>();
+      for(Result result : table.get(keysToGets(keys))) {
+        ret.add(converter.fromResult(result, columnFamily));
+      }
+      return ret;
+    }
+
+
+    @Override
+    public void close() throws Exception {
+
+    }
+  }
+  private HTableInterface table;
+  public EnrichmentLookup(HTableInterface table, String columnFamily, AccessTracker tracker) {
+    this.table = table;
+    this.setLookupHandler(new Handler(columnFamily));
+    this.setAccessTracker(tracker);
+  }
+
+  public HTableInterface getTable() {
+    return table;
+  }
+
+  @Override
+  public void close() throws Exception {
+    super.close();
+    table.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/Lookup.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/Lookup.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/Lookup.java
new file mode 100644
index 0000000..265fccd
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/Lookup.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.lookup;
+
+import org.apache.metron.enrichment.lookup.accesstracker.AccessTracker;
+import org.apache.metron.enrichment.lookup.handler.Handler;
+
+import java.io.IOException;
+
+public class Lookup<CONTEXT_T, KEY_T extends LookupKey, RESULT_T> implements Handler<CONTEXT_T, KEY_T, RESULT_T> {
+  private String name;
+  private AccessTracker accessTracker;
+  private Handler<CONTEXT_T, KEY_T, RESULT_T> lookupHandler;
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public AccessTracker getAccessTracker() {
+    return accessTracker;
+  }
+
+  public void setAccessTracker(AccessTracker accessTracker) {
+    this.accessTracker = accessTracker;
+  }
+
+  public Handler< CONTEXT_T, KEY_T, RESULT_T > getLookupHandler() {
+    return lookupHandler;
+  }
+
+  public void setLookupHandler(Handler< CONTEXT_T, KEY_T, RESULT_T > lookupHandler) {
+    this.lookupHandler = lookupHandler;
+  }
+
+  @Override
+  public boolean exists(KEY_T key, CONTEXT_T context, boolean logAccess) throws IOException {
+    if(logAccess) {
+      accessTracker.logAccess(key);
+    }
+    return lookupHandler.exists(key, context, logAccess);
+  }
+
+  @Override
+  public RESULT_T get(KEY_T key, CONTEXT_T context, boolean logAccess) throws IOException {
+    if(logAccess) {
+      accessTracker.logAccess(key);
+    }
+    return lookupHandler.get(key, context, logAccess);
+  }
+
+  @Override
+  public Iterable<Boolean> exists(Iterable<KEY_T> key, CONTEXT_T context, boolean logAccess) throws IOException {
+    if(logAccess) {
+      for (KEY_T k : key) {
+        accessTracker.logAccess(k);
+      }
+    }
+    return lookupHandler.exists(key, context, logAccess);
+  }
+
+
+  @Override
+  public Iterable<RESULT_T> get(Iterable<KEY_T> key, CONTEXT_T context, boolean logAccess) throws IOException {
+    if(logAccess) {
+      for (KEY_T k : key) {
+        accessTracker.logAccess(k);
+      }
+    }
+    return lookupHandler.get(key, context, logAccess);
+  }
+
+  @Override
+  public void close() throws Exception {
+    accessTracker.cleanup();
+    lookupHandler.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/LookupKV.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/LookupKV.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/LookupKV.java
new file mode 100644
index 0000000..3538aab
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/LookupKV.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.enrichment.lookup;
+
+import java.io.Serializable;
+
+public class LookupKV<KEY_T extends LookupKey, VALUE_T extends LookupValue> implements Serializable {
+    private KEY_T key;
+    private VALUE_T value;
+    public LookupKV(KEY_T key, VALUE_T value) {
+        this.key = key;
+        this.value = value;
+    }
+
+    public KEY_T getKey() {
+        return key;
+    }
+
+    public VALUE_T getValue() {
+        return value;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (this == o) return true;
+        if (o == null || getClass() != o.getClass()) return false;
+
+        LookupKV<?, ?> lookupKV = (LookupKV<?, ?>) o;
+
+        if (key != null ? !key.equals(lookupKV.key) : lookupKV.key != null) return false;
+        return value != null ? value.equals(lookupKV.value) : lookupKV.value == null;
+
+    }
+
+    @Override
+    public int hashCode() {
+        int result = key != null ? key.hashCode() : 0;
+        result = 31 * result + (value != null ? value.hashCode() : 0);
+        return result;
+    }
+
+    @Override
+    public String toString() {
+        return "LookupKV{" +
+                "key=" + key +
+                ", value=" + value +
+                '}';
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/LookupKey.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/LookupKey.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/LookupKey.java
new file mode 100644
index 0000000..b7ea00c
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/LookupKey.java
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.lookup;
+
+public interface LookupKey {
+    byte[] toBytes();
+    void fromBytes(byte[] in);
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/LookupValue.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/LookupValue.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/LookupValue.java
new file mode 100644
index 0000000..24fbffd
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/LookupValue.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.metron.enrichment.lookup;
+
+import java.util.Map;
+import java.util.NavigableMap;
+
+public interface LookupValue {
+    Iterable<Map.Entry<byte[], byte[]>> toColumns();
+    void fromColumns(Iterable<Map.Entry<byte[], byte[]>> values);
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/AccessTracker.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/AccessTracker.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/AccessTracker.java
new file mode 100644
index 0000000..bde6604
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/AccessTracker.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.lookup.accesstracker;
+
+import org.apache.metron.enrichment.lookup.LookupKey;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.Map;
+
+public interface AccessTracker extends Serializable{
+    void logAccess(LookupKey key);
+    void configure(Map<String, Object> config);
+    boolean hasSeen(LookupKey key);
+    String getName();
+    AccessTracker union(AccessTracker tracker);
+    void reset();
+    boolean isFull();
+    void cleanup() throws IOException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/AccessTrackerUtil.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/AccessTrackerUtil.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/AccessTrackerUtil.java
new file mode 100644
index 0000000..5d880f2
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/AccessTrackerUtil.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.lookup.accesstracker;
+
+import com.google.common.base.Function;
+import com.google.common.collect.Iterables;
+import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import javax.annotation.Nullable;
+import java.io.*;
+
+public enum AccessTrackerUtil {
+    INSTANCE;
+
+    public static byte[] COLUMN = Bytes.toBytes("v");
+
+    public AccessTracker deserializeTracker(byte[] bytes) throws IOException, ClassNotFoundException {
+        ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(bytes));
+        return (AccessTracker) ois.readObject();
+    }
+    public byte[] serializeTracker(AccessTracker tracker) throws IOException {
+        ByteArrayOutputStream bos = new ByteArrayOutputStream();
+        ObjectOutputStream oos = new ObjectOutputStream(bos);
+        oos.writeObject(tracker);
+        oos.flush();
+        oos.close();
+        return bos.toByteArray();
+    }
+
+
+    public void persistTracker(HTableInterface accessTrackerTable, String columnFamily, PersistentAccessTracker.AccessTrackerKey key, AccessTracker underlyingTracker) throws IOException {
+        Put put = new Put(key.toRowKey());
+        put.add(Bytes.toBytes(columnFamily), COLUMN, serializeTracker(underlyingTracker));
+        accessTrackerTable.put(put);
+    }
+
+    public Iterable<AccessTracker> loadAll(HTableInterface accessTrackerTable, final String columnFamily, final String name, final long earliest) throws IOException {
+        Scan scan = new Scan(PersistentAccessTracker.AccessTrackerKey.getTimestampScanKey(name, earliest));
+        ResultScanner scanner = accessTrackerTable.getScanner(scan);
+        return Iterables.transform(scanner, new Function<Result, AccessTracker>() {
+
+            @Nullable
+            @Override
+            public AccessTracker apply(@Nullable Result result) {
+                try {
+                    return deserializeTracker(result.getValue(Bytes.toBytes(columnFamily), COLUMN));
+                } catch (Exception e) {
+                    throw new RuntimeException("Unable to deserialize " + name + " @ " + earliest);
+                }
+            }
+        });
+    }
+
+
+    public AccessTracker loadAll(Iterable<AccessTracker> trackers) throws IOException, ClassNotFoundException {
+        AccessTracker tracker = null;
+        for(AccessTracker t : trackers) {
+            if(tracker == null) {
+                tracker = t;
+            }
+            else {
+                tracker = tracker.union(t);
+            }
+        }
+        return tracker;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/BloomAccessTracker.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/BloomAccessTracker.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/BloomAccessTracker.java
new file mode 100644
index 0000000..763ba59
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/BloomAccessTracker.java
@@ -0,0 +1,146 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.lookup.accesstracker;
+
+import com.google.common.hash.BloomFilter;
+import com.google.common.hash.Funnel;
+import com.google.common.hash.PrimitiveSink;
+import org.apache.metron.enrichment.lookup.LookupKey;
+
+import java.io.*;
+import java.util.Map;
+
+public class BloomAccessTracker implements AccessTracker {
+    private static final long serialVersionUID = 1L;
+    public static final String EXPECTED_INSERTIONS_KEY = "expected_insertions";
+    public static final String FALSE_POSITIVE_RATE_KEY = "false_positive_rate";
+    public static final String NAME_KEY = "name";
+
+    private static class LookupKeyFunnel implements Funnel<LookupKey> {
+        @Override
+        public void funnel(LookupKey lookupKey, PrimitiveSink primitiveSink) {
+            primitiveSink.putBytes(lookupKey.toBytes());
+        }
+
+
+        @Override
+        public boolean equals(Object obj) {
+            return this.getClass().equals(obj.getClass());
+        }
+
+    }
+
+    private static Funnel<LookupKey> LOOKUPKEY_FUNNEL = new LookupKeyFunnel();
+
+    BloomFilter<LookupKey> filter;
+    String name;
+    int expectedInsertions;
+    double falsePositiveRate;
+    int numInsertions = 0;
+
+    public BloomAccessTracker(String name, int expectedInsertions, double falsePositiveRate) {
+        this.name = name;
+        this.expectedInsertions = expectedInsertions;
+        this.falsePositiveRate = falsePositiveRate;
+        filter = BloomFilter.create(LOOKUPKEY_FUNNEL, expectedInsertions, falsePositiveRate);
+    }
+    public BloomAccessTracker() {}
+    public BloomAccessTracker(Map<String, Object> config) {
+        configure(config);
+    }
+
+    protected BloomFilter<LookupKey> getFilter() {
+        return filter;
+    }
+    @Override
+    public void logAccess(LookupKey key) {
+        numInsertions++;
+        filter.put(key);
+    }
+
+    @Override
+    public void configure(Map<String, Object> config) {
+        expectedInsertions = toInt(config.get(EXPECTED_INSERTIONS_KEY));
+        falsePositiveRate = toDouble(config.get(FALSE_POSITIVE_RATE_KEY));
+        name = config.get(NAME_KEY).toString();
+        filter = BloomFilter.create(LOOKUPKEY_FUNNEL, expectedInsertions, falsePositiveRate);
+    }
+
+    @Override
+    public boolean hasSeen(LookupKey key) {
+        return filter.mightContain(key);
+    }
+
+    @Override
+    public void reset() {
+        filter = BloomFilter.create(LOOKUPKEY_FUNNEL, expectedInsertions, falsePositiveRate);
+    }
+
+    private static double toDouble(Object o) {
+        if(o instanceof String) {
+            return Double.parseDouble((String)o);
+        }
+        else if(o instanceof Number) {
+            return ((Number) o).doubleValue();
+        }
+        else {
+            throw new IllegalStateException("Unable to convert " + o + " to a double.");
+        }
+    }
+    private static int toInt(Object o) {
+        if(o instanceof String) {
+            return Integer.parseInt((String)o);
+        }
+        else if(o instanceof Number) {
+            return ((Number) o).intValue();
+        }
+        else {
+            throw new IllegalStateException("Unable to convert " + o + " to a double.");
+        }
+    }
+
+    @Override
+    public String getName() {
+        return name;
+    }
+
+
+    @Override
+    public AccessTracker union(AccessTracker tracker) {
+        if(filter == null) {
+            throw new IllegalStateException("Unable to union access tracker, because this tracker is not initialized.");
+        }
+        if(tracker instanceof BloomAccessTracker ) {
+            filter.putAll(((BloomAccessTracker)tracker).getFilter());
+            return this;
+        }
+        else {
+            throw new IllegalStateException("Unable to union access tracker, because it's not of the right type (BloomAccessTracker)");
+        }
+    }
+
+    @Override
+    public boolean isFull() {
+        return numInsertions >= expectedInsertions;
+    }
+
+    @Override
+    public void cleanup() throws IOException {
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/NoopAccessTracker.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/NoopAccessTracker.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/NoopAccessTracker.java
new file mode 100644
index 0000000..18cad3c
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/lookup/accesstracker/NoopAccessTracker.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.lookup.accesstracker;
+
+import org.apache.metron.enrichment.lookup.LookupKey;
+
+import java.io.IOException;
+import java.util.Map;
+
+public class NoopAccessTracker implements AccessTracker {
+  @Override
+  public void logAccess(LookupKey key) {
+
+  }
+
+  @Override
+  public void configure(Map<String, Object> config) {
+
+  }
+
+  @Override
+  public boolean hasSeen(LookupKey key) {
+    return false;
+  }
+
+  @Override
+  public String getName() {
+    return "noop";
+  }
+
+  @Override
+  public AccessTracker union(AccessTracker tracker) {
+    return null;
+  }
+
+  @Override
+  public void reset() {
+
+  }
+
+  @Override
+  public boolean isFull() {
+    return false;
+  }
+
+  @Override
+  public void cleanup() throws IOException {
+
+  }
+}


[12/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/resources/effective_tld_names.dat
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/resources/effective_tld_names.dat b/metron-platform/metron-parsers/src/test/resources/effective_tld_names.dat
new file mode 100644
index 0000000..36e5d4c
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/resources/effective_tld_names.dat
@@ -0,0 +1,9719 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+// ===BEGIN ICANN DOMAINS===
+
+// ac : http://en.wikipedia.org/wiki/.ac
+ac
+com.ac
+edu.ac
+gov.ac
+net.ac
+mil.ac
+org.ac
+
+// ad : http://en.wikipedia.org/wiki/.ad
+ad
+nom.ad
+
+// ae : http://en.wikipedia.org/wiki/.ae
+// see also: "Domain Name Eligibility Policy" at http://www.aeda.ae/eng/aepolicy.php
+ae
+co.ae
+net.ae
+org.ae
+sch.ae
+ac.ae
+gov.ae
+mil.ae
+
+// aero : see http://www.information.aero/index.php?id=66
+aero
+accident-investigation.aero
+accident-prevention.aero
+aerobatic.aero
+aeroclub.aero
+aerodrome.aero
+agents.aero
+aircraft.aero
+airline.aero
+airport.aero
+air-surveillance.aero
+airtraffic.aero
+air-traffic-control.aero
+ambulance.aero
+amusement.aero
+association.aero
+author.aero
+ballooning.aero
+broker.aero
+caa.aero
+cargo.aero
+catering.aero
+certification.aero
+championship.aero
+charter.aero
+civilaviation.aero
+club.aero
+conference.aero
+consultant.aero
+consulting.aero
+control.aero
+council.aero
+crew.aero
+design.aero
+dgca.aero
+educator.aero
+emergency.aero
+engine.aero
+engineer.aero
+entertainment.aero
+equipment.aero
+exchange.aero
+express.aero
+federation.aero
+flight.aero
+freight.aero
+fuel.aero
+gliding.aero
+government.aero
+groundhandling.aero
+group.aero
+hanggliding.aero
+homebuilt.aero
+insurance.aero
+journal.aero
+journalist.aero
+leasing.aero
+logistics.aero
+magazine.aero
+maintenance.aero
+marketplace.aero
+media.aero
+microlight.aero
+modelling.aero
+navigation.aero
+parachuting.aero
+paragliding.aero
+passenger-association.aero
+pilot.aero
+press.aero
+production.aero
+recreation.aero
+repbody.aero
+res.aero
+research.aero
+rotorcraft.aero
+safety.aero
+scientist.aero
+services.aero
+show.aero
+skydiving.aero
+software.aero
+student.aero
+taxi.aero
+trader.aero
+trading.aero
+trainer.aero
+union.aero
+workinggroup.aero
+works.aero
+
+// af : http://www.nic.af/help.jsp
+af
+gov.af
+com.af
+org.af
+net.af
+edu.af
+
+// ag : http://www.nic.ag/prices.htm
+ag
+com.ag
+org.ag
+net.ag
+co.ag
+nom.ag
+
+// ai : http://nic.com.ai/
+ai
+off.ai
+com.ai
+net.ai
+org.ai
+
+// al : http://www.ert.gov.al/ert_alb/faq_det.html?Id=31
+al
+com.al
+edu.al
+gov.al
+mil.al
+net.al
+org.al
+
+// am : http://en.wikipedia.org/wiki/.am
+am
+
+// an : http://www.una.an/an_domreg/default.asp
+an
+com.an
+net.an
+org.an
+edu.an
+
+// ao : http://en.wikipedia.org/wiki/.ao
+// http://www.dns.ao/REGISTR.DOC
+ao
+ed.ao
+gv.ao
+og.ao
+co.ao
+pb.ao
+it.ao
+
+// aq : http://en.wikipedia.org/wiki/.aq
+aq
+
+// ar : https://nic.ar/normativa-vigente.xhtml
+ar
+com.ar
+edu.ar
+gob.ar
+gov.ar
+int.ar
+mil.ar
+net.ar
+org.ar
+tur.ar
+
+// arpa : http://en.wikipedia.org/wiki/.arpa
+// Confirmed by registry <ia...@icann.org> 2008-06-18
+arpa
+e164.arpa
+in-addr.arpa
+ip6.arpa
+iris.arpa
+uri.arpa
+urn.arpa
+
+// as : http://en.wikipedia.org/wiki/.as
+as
+gov.as
+
+// asia : http://en.wikipedia.org/wiki/.asia
+asia
+
+// at : http://en.wikipedia.org/wiki/.at
+// Confirmed by registry <it...@nic.at> 2008-06-17
+at
+ac.at
+co.at
+gv.at
+or.at
+
+// au : http://en.wikipedia.org/wiki/.au
+// http://www.auda.org.au/
+au
+// 2LDs
+com.au
+net.au
+org.au
+edu.au
+gov.au
+asn.au
+id.au
+// Historic 2LDs (closed to new registration, but sites still exist)
+info.au
+conf.au
+oz.au
+// CGDNs - http://www.cgdn.org.au/
+act.au
+nsw.au
+nt.au
+qld.au
+sa.au
+tas.au
+vic.au
+wa.au
+// 3LDs
+act.edu.au
+nsw.edu.au
+nt.edu.au
+qld.edu.au
+sa.edu.au
+tas.edu.au
+vic.edu.au
+wa.edu.au
+// act.gov.au  Bug 984824 - Removed at request of Greg Tankard
+// nsw.gov.au  Bug 547985 - Removed at request of <Sh...@services.nsw.gov.au>
+// nt.gov.au  Bug 940478 - Removed at request of Greg Connors <Gr...@nt.gov.au>
+qld.gov.au
+sa.gov.au
+tas.gov.au
+vic.gov.au
+wa.gov.au
+
+// aw : http://en.wikipedia.org/wiki/.aw
+aw
+com.aw
+
+// ax : http://en.wikipedia.org/wiki/.ax
+ax
+
+// az : http://en.wikipedia.org/wiki/.az
+az
+com.az
+net.az
+int.az
+gov.az
+org.az
+edu.az
+info.az
+pp.az
+mil.az
+name.az
+pro.az
+biz.az
+
+// ba : http://en.wikipedia.org/wiki/.ba
+ba
+org.ba
+net.ba
+edu.ba
+gov.ba
+mil.ba
+unsa.ba
+unbi.ba
+co.ba
+com.ba
+rs.ba
+
+// bb : http://en.wikipedia.org/wiki/.bb
+bb
+biz.bb
+co.bb
+com.bb
+edu.bb
+gov.bb
+info.bb
+net.bb
+org.bb
+store.bb
+tv.bb
+
+// bd : http://en.wikipedia.org/wiki/.bd
+*.bd
+
+// be : http://en.wikipedia.org/wiki/.be
+// Confirmed by registry <te...@dns.be> 2008-06-08
+be
+ac.be
+
+// bf : http://en.wikipedia.org/wiki/.bf
+bf
+gov.bf
+
+// bg : http://en.wikipedia.org/wiki/.bg
+// https://www.register.bg/user/static/rules/en/index.html
+bg
+a.bg
+b.bg
+c.bg
+d.bg
+e.bg
+f.bg
+g.bg
+h.bg
+i.bg
+j.bg
+k.bg
+l.bg
+m.bg
+n.bg
+o.bg
+p.bg
+q.bg
+r.bg
+s.bg
+t.bg
+u.bg
+v.bg
+w.bg
+x.bg
+y.bg
+z.bg
+0.bg
+1.bg
+2.bg
+3.bg
+4.bg
+5.bg
+6.bg
+7.bg
+8.bg
+9.bg
+
+// bh : http://en.wikipedia.org/wiki/.bh
+bh
+com.bh
+edu.bh
+net.bh
+org.bh
+gov.bh
+
+// bi : http://en.wikipedia.org/wiki/.bi
+// http://whois.nic.bi/
+bi
+co.bi
+com.bi
+edu.bi
+or.bi
+org.bi
+
+// biz : http://en.wikipedia.org/wiki/.biz
+biz
+
+// bj : http://en.wikipedia.org/wiki/.bj
+bj
+asso.bj
+barreau.bj
+gouv.bj
+
+// bm : http://www.bermudanic.bm/dnr-text.txt
+bm
+com.bm
+edu.bm
+gov.bm
+net.bm
+org.bm
+
+// bn : http://en.wikipedia.org/wiki/.bn
+*.bn
+
+// bo : http://www.nic.bo/
+bo
+com.bo
+edu.bo
+gov.bo
+gob.bo
+int.bo
+org.bo
+net.bo
+mil.bo
+tv.bo
+
+// br : http://registro.br/dominio/categoria.html
+// Submitted by registry <fn...@registro.br> 2014-08-11
+br
+adm.br
+adv.br
+agr.br
+am.br
+arq.br
+art.br
+ato.br
+b.br
+bio.br
+blog.br
+bmd.br
+cim.br
+cng.br
+cnt.br
+com.br
+coop.br
+ecn.br
+eco.br
+edu.br
+emp.br
+eng.br
+esp.br
+etc.br
+eti.br
+far.br
+flog.br
+fm.br
+fnd.br
+fot.br
+fst.br
+g12.br
+ggf.br
+gov.br
+imb.br
+ind.br
+inf.br
+jor.br
+jus.br
+leg.br
+lel.br
+mat.br
+med.br
+mil.br
+mp.br
+mus.br
+net.br
+*.nom.br
+not.br
+ntr.br
+odo.br
+org.br
+ppg.br
+pro.br
+psc.br
+psi.br
+qsl.br
+radio.br
+rec.br
+slg.br
+srv.br
+taxi.br
+teo.br
+tmp.br
+trd.br
+tur.br
+tv.br
+vet.br
+vlog.br
+wiki.br
+zlg.br
+
+// bs : http://www.nic.bs/rules.html
+bs
+com.bs
+net.bs
+org.bs
+edu.bs
+gov.bs
+
+// bt : http://en.wikipedia.org/wiki/.bt
+bt
+com.bt
+edu.bt
+gov.bt
+net.bt
+org.bt
+
+// bv : No registrations at this time.
+// Submitted by registry <ja...@uninett.no> 2006-06-16
+bv
+
+// bw : http://en.wikipedia.org/wiki/.bw
+// http://www.gobin.info/domainname/bw.doc
+// list of other 2nd level tlds ?
+bw
+co.bw
+org.bw
+
+// by : http://en.wikipedia.org/wiki/.by
+// http://tld.by/rules_2006_en.html
+// list of other 2nd level tlds ?
+by
+gov.by
+mil.by
+// Official information does not indicate that com.by is a reserved
+// second-level domain, but it's being used as one (see www.google.com.by and
+// www.yahoo.com.by, for example), so we list it here for safety's sake.
+com.by
+
+// http://hoster.by/
+of.by
+
+// bz : http://en.wikipedia.org/wiki/.bz
+// http://www.belizenic.bz/
+bz
+com.bz
+net.bz
+org.bz
+edu.bz
+gov.bz
+
+// ca : http://en.wikipedia.org/wiki/.ca
+ca
+// ca geographical names
+ab.ca
+bc.ca
+mb.ca
+nb.ca
+nf.ca
+nl.ca
+ns.ca
+nt.ca
+nu.ca
+on.ca
+pe.ca
+qc.ca
+sk.ca
+yk.ca
+// gc.ca: http://en.wikipedia.org/wiki/.gc.ca
+// see also: http://registry.gc.ca/en/SubdomainFAQ
+gc.ca
+
+// cat : http://en.wikipedia.org/wiki/.cat
+cat
+
+// cc : http://en.wikipedia.org/wiki/.cc
+cc
+
+// cd : http://en.wikipedia.org/wiki/.cd
+// see also: https://www.nic.cd/domain/insertDomain_2.jsp?act=1
+cd
+gov.cd
+
+// cf : http://en.wikipedia.org/wiki/.cf
+cf
+
+// cg : http://en.wikipedia.org/wiki/.cg
+cg
+
+// ch : http://en.wikipedia.org/wiki/.ch
+ch
+
+// ci : http://en.wikipedia.org/wiki/.ci
+// http://www.nic.ci/index.php?page=charte
+ci
+org.ci
+or.ci
+com.ci
+co.ci
+edu.ci
+ed.ci
+ac.ci
+net.ci
+go.ci
+asso.ci
+aéroport.ci
+int.ci
+presse.ci
+md.ci
+gouv.ci
+
+// ck : http://en.wikipedia.org/wiki/.ck
+*.ck
+!www.ck
+
+// cl : http://en.wikipedia.org/wiki/.cl
+cl
+gov.cl
+gob.cl
+co.cl
+mil.cl
+
+// cm : http://en.wikipedia.org/wiki/.cm plus bug 981927
+cm
+co.cm
+com.cm
+gov.cm
+net.cm
+
+// cn : http://en.wikipedia.org/wiki/.cn
+// Submitted by registry <ta...@cnnic.cn> 2008-06-11
+cn
+ac.cn
+com.cn
+edu.cn
+gov.cn
+net.cn
+org.cn
+mil.cn
+公司.cn
+网络.cn
+網絡.cn
+// cn geographic names
+ah.cn
+bj.cn
+cq.cn
+fj.cn
+gd.cn
+gs.cn
+gz.cn
+gx.cn
+ha.cn
+hb.cn
+he.cn
+hi.cn
+hl.cn
+hn.cn
+jl.cn
+js.cn
+jx.cn
+ln.cn
+nm.cn
+nx.cn
+qh.cn
+sc.cn
+sd.cn
+sh.cn
+sn.cn
+sx.cn
+tj.cn
+xj.cn
+xz.cn
+yn.cn
+zj.cn
+hk.cn
+mo.cn
+tw.cn
+
+// co : http://en.wikipedia.org/wiki/.co
+// Submitted by registry <te...@uniandes.edu.co> 2008-06-11
+co
+arts.co
+com.co
+edu.co
+firm.co
+gov.co
+info.co
+int.co
+mil.co
+net.co
+nom.co
+org.co
+rec.co
+web.co
+
+// com : http://en.wikipedia.org/wiki/.com
+com
+
+// coop : http://en.wikipedia.org/wiki/.coop
+coop
+
+// cr : http://www.nic.cr/niccr_publico/showRegistroDominiosScreen.do
+cr
+ac.cr
+co.cr
+ed.cr
+fi.cr
+go.cr
+or.cr
+sa.cr
+
+// cu : http://en.wikipedia.org/wiki/.cu
+cu
+com.cu
+edu.cu
+org.cu
+net.cu
+gov.cu
+inf.cu
+
+// cv : http://en.wikipedia.org/wiki/.cv
+cv
+
+// cw : http://www.una.cw/cw_registry/
+// Confirmed by registry <re...@una.net> 2013-03-26
+cw
+com.cw
+edu.cw
+net.cw
+org.cw
+
+// cx : http://en.wikipedia.org/wiki/.cx
+// list of other 2nd level tlds ?
+cx
+gov.cx
+
+// cy : http://en.wikipedia.org/wiki/.cy
+*.cy
+
+// cz : http://en.wikipedia.org/wiki/.cz
+cz
+
+// de : http://en.wikipedia.org/wiki/.de
+// Confirmed by registry <op...@denic.de> (with technical
+// reservations) 2008-07-01
+de
+
+// dj : http://en.wikipedia.org/wiki/.dj
+dj
+
+// dk : http://en.wikipedia.org/wiki/.dk
+// Confirmed by registry <ro...@dk-hostmaster.dk> 2008-06-17
+dk
+
+// dm : http://en.wikipedia.org/wiki/.dm
+dm
+com.dm
+net.dm
+org.dm
+edu.dm
+gov.dm
+
+// do : http://en.wikipedia.org/wiki/.do
+do
+art.do
+com.do
+edu.do
+gob.do
+gov.do
+mil.do
+net.do
+org.do
+sld.do
+web.do
+
+// dz : http://en.wikipedia.org/wiki/.dz
+dz
+com.dz
+org.dz
+net.dz
+gov.dz
+edu.dz
+asso.dz
+pol.dz
+art.dz
+
+// ec : http://www.nic.ec/reg/paso1.asp
+// Submitted by registry <va...@nic.ec> 2008-07-04
+ec
+com.ec
+info.ec
+net.ec
+fin.ec
+k12.ec
+med.ec
+pro.ec
+org.ec
+edu.ec
+gov.ec
+gob.ec
+mil.ec
+
+// edu : http://en.wikipedia.org/wiki/.edu
+edu
+
+// ee : http://www.eenet.ee/EENet/dom_reeglid.html#lisa_B
+ee
+edu.ee
+gov.ee
+riik.ee
+lib.ee
+med.ee
+com.ee
+pri.ee
+aip.ee
+org.ee
+fie.ee
+
+// eg : http://en.wikipedia.org/wiki/.eg
+eg
+com.eg
+edu.eg
+eun.eg
+gov.eg
+mil.eg
+name.eg
+net.eg
+org.eg
+sci.eg
+
+// er : http://en.wikipedia.org/wiki/.er
+*.er
+
+// es : https://www.nic.es/site_ingles/ingles/dominios/index.html
+es
+com.es
+nom.es
+org.es
+gob.es
+edu.es
+
+// et : http://en.wikipedia.org/wiki/.et
+et
+com.et
+gov.et
+org.et
+edu.et
+biz.et
+name.et
+info.et
+
+// eu : http://en.wikipedia.org/wiki/.eu
+eu
+
+// fi : http://en.wikipedia.org/wiki/.fi
+fi
+// aland.fi : http://en.wikipedia.org/wiki/.ax
+// This domain is being phased out in favor of .ax. As there are still many
+// domains under aland.fi, we still keep it on the list until aland.fi is
+// completely removed.
+// TODO: Check for updates (expected to be phased out around Q1/2009)
+aland.fi
+
+// fj : http://en.wikipedia.org/wiki/.fj
+*.fj
+
+// fk : http://en.wikipedia.org/wiki/.fk
+*.fk
+
+// fm : http://en.wikipedia.org/wiki/.fm
+fm
+
+// fo : http://en.wikipedia.org/wiki/.fo
+fo
+
+// fr : http://www.afnic.fr/
+// domaines descriptifs : http://www.afnic.fr/obtenir/chartes/nommage-fr/annexe-descriptifs
+fr
+com.fr
+asso.fr
+nom.fr
+prd.fr
+presse.fr
+tm.fr
+// domaines sectoriels : http://www.afnic.fr/obtenir/chartes/nommage-fr/annexe-sectoriels
+aeroport.fr
+assedic.fr
+avocat.fr
+avoues.fr
+cci.fr
+chambagri.fr
+chirurgiens-dentistes.fr
+experts-comptables.fr
+geometre-expert.fr
+gouv.fr
+greta.fr
+huissier-justice.fr
+medecin.fr
+notaires.fr
+pharmacien.fr
+port.fr
+veterinaire.fr
+
+// ga : http://en.wikipedia.org/wiki/.ga
+ga
+
+// gb : This registry is effectively dormant
+// Submitted by registry <Da...@ja.net> 2008-06-12
+gb
+
+// gd : http://en.wikipedia.org/wiki/.gd
+gd
+
+// ge : http://www.nic.net.ge/policy_en.pdf
+ge
+com.ge
+edu.ge
+gov.ge
+org.ge
+mil.ge
+net.ge
+pvt.ge
+
+// gf : http://en.wikipedia.org/wiki/.gf
+gf
+
+// gg : http://www.channelisles.net/register-domains/
+// Confirmed by registry <ni...@channelisles.net> 2013-11-28
+gg
+co.gg
+net.gg
+org.gg
+
+// gh : http://en.wikipedia.org/wiki/.gh
+// see also: http://www.nic.gh/reg_now.php
+// Although domains directly at second level are not possible at the moment,
+// they have been possible for some time and may come back.
+gh
+com.gh
+edu.gh
+gov.gh
+org.gh
+mil.gh
+
+// gi : http://www.nic.gi/rules.html
+gi
+com.gi
+ltd.gi
+gov.gi
+mod.gi
+edu.gi
+org.gi
+
+// gl : http://en.wikipedia.org/wiki/.gl
+// http://nic.gl
+gl
+
+// gm : http://www.nic.gm/htmlpages%5Cgm-policy.htm
+gm
+
+// gn : http://psg.com/dns/gn/gn.txt
+// Submitted by registry <ra...@psg.com> 2008-06-17
+gn
+ac.gn
+com.gn
+edu.gn
+gov.gn
+org.gn
+net.gn
+
+// gov : http://en.wikipedia.org/wiki/.gov
+gov
+
+// gp : http://www.nic.gp/index.php?lang=en
+gp
+com.gp
+net.gp
+mobi.gp
+edu.gp
+org.gp
+asso.gp
+
+// gq : http://en.wikipedia.org/wiki/.gq
+gq
+
+// gr : https://grweb.ics.forth.gr/english/1617-B-2005.html
+// Submitted by registry <se...@ics.forth.gr> 2008-06-09
+gr
+com.gr
+edu.gr
+net.gr
+org.gr
+gov.gr
+
+// gs : http://en.wikipedia.org/wiki/.gs
+gs
+
+// gt : http://www.gt/politicas_de_registro.html
+gt
+com.gt
+edu.gt
+gob.gt
+ind.gt
+mil.gt
+net.gt
+org.gt
+
+// gu : http://gadao.gov.gu/registration.txt
+*.gu
+
+// gw : http://en.wikipedia.org/wiki/.gw
+gw
+
+// gy : http://en.wikipedia.org/wiki/.gy
+// http://registry.gy/
+gy
+co.gy
+com.gy
+net.gy
+
+// hk : https://www.hkdnr.hk
+// Submitted by registry <hk...@hkirc.hk> 2008-06-11
+hk
+com.hk
+edu.hk
+gov.hk
+idv.hk
+net.hk
+org.hk
+公司.hk
+教育.hk
+敎育.hk
+政府.hk
+個人.hk
+个人.hk
+箇人.hk
+網络.hk
+网络.hk
+组織.hk
+網絡.hk
+网絡.hk
+组织.hk
+組織.hk
+組织.hk
+
+// hm : http://en.wikipedia.org/wiki/.hm
+hm
+
+// hn : http://www.nic.hn/politicas/ps02,,05.html
+hn
+com.hn
+edu.hn
+org.hn
+net.hn
+mil.hn
+gob.hn
+
+// hr : http://www.dns.hr/documents/pdf/HRTLD-regulations.pdf
+hr
+iz.hr
+from.hr
+name.hr
+com.hr
+
+// ht : http://www.nic.ht/info/charte.cfm
+ht
+com.ht
+shop.ht
+firm.ht
+info.ht
+adult.ht
+net.ht
+pro.ht
+org.ht
+med.ht
+art.ht
+coop.ht
+pol.ht
+asso.ht
+edu.ht
+rel.ht
+gouv.ht
+perso.ht
+
+// hu : http://www.domain.hu/domain/English/sld.html
+// Confirmed by registry <pa...@iszt.hu> 2008-06-12
+hu
+co.hu
+info.hu
+org.hu
+priv.hu
+sport.hu
+tm.hu
+2000.hu
+agrar.hu
+bolt.hu
+casino.hu
+city.hu
+erotica.hu
+erotika.hu
+film.hu
+forum.hu
+games.hu
+hotel.hu
+ingatlan.hu
+jogasz.hu
+konyvelo.hu
+lakas.hu
+media.hu
+news.hu
+reklam.hu
+sex.hu
+shop.hu
+suli.hu
+szex.hu
+tozsde.hu
+utazas.hu
+video.hu
+
+// id : https://register.pandi.or.id/
+id
+ac.id
+biz.id
+co.id
+desa.id
+go.id
+mil.id
+my.id
+net.id
+or.id
+sch.id
+web.id
+
+// ie : http://en.wikipedia.org/wiki/.ie
+ie
+gov.ie
+
+// il : http://en.wikipedia.org/wiki/.il
+*.il
+
+// im : https://www.nic.im/
+// Submitted by registry <in...@nic.im> 2013-11-15
+im
+ac.im
+co.im
+com.im
+ltd.co.im
+net.im
+org.im
+plc.co.im
+tt.im
+tv.im
+
+// in : http://en.wikipedia.org/wiki/.in
+// see also: https://registry.in/Policies
+// Please note, that nic.in is not an offical eTLD, but used by most
+// government institutions.
+in
+co.in
+firm.in
+net.in
+org.in
+gen.in
+ind.in
+nic.in
+ac.in
+edu.in
+res.in
+gov.in
+mil.in
+
+// info : http://en.wikipedia.org/wiki/.info
+info
+
+// int : http://en.wikipedia.org/wiki/.int
+// Confirmed by registry <ia...@icann.org> 2008-06-18
+int
+eu.int
+
+// io : http://www.nic.io/rules.html
+// list of other 2nd level tlds ?
+io
+com.io
+
+// iq : http://www.cmc.iq/english/iq/iqregister1.htm
+iq
+gov.iq
+edu.iq
+mil.iq
+com.iq
+org.iq
+net.iq
+
+// ir : http://www.nic.ir/Terms_and_Conditions_ir,_Appendix_1_Domain_Rules
+// Also see http://www.nic.ir/Internationalized_Domain_Names
+// Two <iran>.ir entries added at request of <te...@nic.ir>, 2010-04-16
+ir
+ac.ir
+co.ir
+gov.ir
+id.ir
+net.ir
+org.ir
+sch.ir
+// xn--mgba3a4f16a.ir (<iran>.ir, Persian YEH)
+ایران.ir
+// xn--mgba3a4fra.ir (<iran>.ir, Arabic YEH)
+ايران.ir
+
+// is : http://www.isnic.is/domain/rules.php
+// Confirmed by registry <ma...@isgate.is> 2008-12-06
+is
+net.is
+com.is
+edu.is
+gov.is
+org.is
+int.is
+
+// it : http://en.wikipedia.org/wiki/.it
+it
+gov.it
+edu.it
+// Reserved geo-names:
+// http://www.nic.it/documenti/regolamenti-e-linee-guida/regolamento-assegnazione-versione-6.0.pdf
+// There is also a list of reserved geo-names corresponding to Italian municipalities
+// http://www.nic.it/documenti/appendice-c.pdf, but it is not included here.
+// Regions
+abr.it
+abruzzo.it
+aosta-valley.it
+aostavalley.it
+bas.it
+basilicata.it
+cal.it
+calabria.it
+cam.it
+campania.it
+emilia-romagna.it
+emiliaromagna.it
+emr.it
+friuli-v-giulia.it
+friuli-ve-giulia.it
+friuli-vegiulia.it
+friuli-venezia-giulia.it
+friuli-veneziagiulia.it
+friuli-vgiulia.it
+friuliv-giulia.it
+friulive-giulia.it
+friulivegiulia.it
+friulivenezia-giulia.it
+friuliveneziagiulia.it
+friulivgiulia.it
+fvg.it
+laz.it
+lazio.it
+lig.it
+liguria.it
+lom.it
+lombardia.it
+lombardy.it
+lucania.it
+mar.it
+marche.it
+mol.it
+molise.it
+piedmont.it
+piemonte.it
+pmn.it
+pug.it
+puglia.it
+sar.it
+sardegna.it
+sardinia.it
+sic.it
+sicilia.it
+sicily.it
+taa.it
+tos.it
+toscana.it
+trentino-a-adige.it
+trentino-aadige.it
+trentino-alto-adige.it
+trentino-altoadige.it
+trentino-s-tirol.it
+trentino-stirol.it
+trentino-sud-tirol.it
+trentino-sudtirol.it
+trentino-sued-tirol.it
+trentino-suedtirol.it
+trentinoa-adige.it
+trentinoaadige.it
+trentinoalto-adige.it
+trentinoaltoadige.it
+trentinos-tirol.it
+trentinostirol.it
+trentinosud-tirol.it
+trentinosudtirol.it
+trentinosued-tirol.it
+trentinosuedtirol.it
+tuscany.it
+umb.it
+umbria.it
+val-d-aosta.it
+val-daosta.it
+vald-aosta.it
+valdaosta.it
+valle-aosta.it
+valle-d-aosta.it
+valle-daosta.it
+valleaosta.it
+valled-aosta.it
+valledaosta.it
+vallee-aoste.it
+valleeaoste.it
+vao.it
+vda.it
+ven.it
+veneto.it
+// Provinces
+ag.it
+agrigento.it
+al.it
+alessandria.it
+alto-adige.it
+altoadige.it
+an.it
+ancona.it
+andria-barletta-trani.it
+andria-trani-barletta.it
+andriabarlettatrani.it
+andriatranibarletta.it
+ao.it
+aosta.it
+aoste.it
+ap.it
+aq.it
+aquila.it
+ar.it
+arezzo.it
+ascoli-piceno.it
+ascolipiceno.it
+asti.it
+at.it
+av.it
+avellino.it
+ba.it
+balsan.it
+bari.it
+barletta-trani-andria.it
+barlettatraniandria.it
+belluno.it
+benevento.it
+bergamo.it
+bg.it
+bi.it
+biella.it
+bl.it
+bn.it
+bo.it
+bologna.it
+bolzano.it
+bozen.it
+br.it
+brescia.it
+brindisi.it
+bs.it
+bt.it
+bz.it
+ca.it
+cagliari.it
+caltanissetta.it
+campidano-medio.it
+campidanomedio.it
+campobasso.it
+carbonia-iglesias.it
+carboniaiglesias.it
+carrara-massa.it
+carraramassa.it
+caserta.it
+catania.it
+catanzaro.it
+cb.it
+ce.it
+cesena-forli.it
+cesenaforli.it
+ch.it
+chieti.it
+ci.it
+cl.it
+cn.it
+co.it
+como.it
+cosenza.it
+cr.it
+cremona.it
+crotone.it
+cs.it
+ct.it
+cuneo.it
+cz.it
+dell-ogliastra.it
+dellogliastra.it
+en.it
+enna.it
+fc.it
+fe.it
+fermo.it
+ferrara.it
+fg.it
+fi.it
+firenze.it
+florence.it
+fm.it
+foggia.it
+forli-cesena.it
+forlicesena.it
+fr.it
+frosinone.it
+ge.it
+genoa.it
+genova.it
+go.it
+gorizia.it
+gr.it
+grosseto.it
+iglesias-carbonia.it
+iglesiascarbonia.it
+im.it
+imperia.it
+is.it
+isernia.it
+kr.it
+la-spezia.it
+laquila.it
+laspezia.it
+latina.it
+lc.it
+le.it
+lecce.it
+lecco.it
+li.it
+livorno.it
+lo.it
+lodi.it
+lt.it
+lu.it
+lucca.it
+macerata.it
+mantova.it
+massa-carrara.it
+massacarrara.it
+matera.it
+mb.it
+mc.it
+me.it
+medio-campidano.it
+mediocampidano.it
+messina.it
+mi.it
+milan.it
+milano.it
+mn.it
+mo.it
+modena.it
+monza-brianza.it
+monza-e-della-brianza.it
+monza.it
+monzabrianza.it
+monzaebrianza.it
+monzaedellabrianza.it
+ms.it
+mt.it
+na.it
+naples.it
+napoli.it
+no.it
+novara.it
+nu.it
+nuoro.it
+og.it
+ogliastra.it
+olbia-tempio.it
+olbiatempio.it
+or.it
+oristano.it
+ot.it
+pa.it
+padova.it
+padua.it
+palermo.it
+parma.it
+pavia.it
+pc.it
+pd.it
+pe.it
+perugia.it
+pesaro-urbino.it
+pesarourbino.it
+pescara.it
+pg.it
+pi.it
+piacenza.it
+pisa.it
+pistoia.it
+pn.it
+po.it
+pordenone.it
+potenza.it
+pr.it
+prato.it
+pt.it
+pu.it
+pv.it
+pz.it
+ra.it
+ragusa.it
+ravenna.it
+rc.it
+re.it
+reggio-calabria.it
+reggio-emilia.it
+reggiocalabria.it
+reggioemilia.it
+rg.it
+ri.it
+rieti.it
+rimini.it
+rm.it
+rn.it
+ro.it
+roma.it
+rome.it
+rovigo.it
+sa.it
+salerno.it
+sassari.it
+savona.it
+si.it
+siena.it
+siracusa.it
+so.it
+sondrio.it
+sp.it
+sr.it
+ss.it
+suedtirol.it
+sv.it
+ta.it
+taranto.it
+te.it
+tempio-olbia.it
+tempioolbia.it
+teramo.it
+terni.it
+tn.it
+to.it
+torino.it
+tp.it
+tr.it
+trani-andria-barletta.it
+trani-barletta-andria.it
+traniandriabarletta.it
+tranibarlettaandria.it
+trapani.it
+trentino.it
+trento.it
+treviso.it
+trieste.it
+ts.it
+turin.it
+tv.it
+ud.it
+udine.it
+urbino-pesaro.it
+urbinopesaro.it
+va.it
+varese.it
+vb.it
+vc.it
+ve.it
+venezia.it
+venice.it
+verbania.it
+vercelli.it
+verona.it
+vi.it
+vibo-valentia.it
+vibovalentia.it
+vicenza.it
+viterbo.it
+vr.it
+vs.it
+vt.it
+vv.it
+
+// je : http://www.channelisles.net/register-domains/
+// Confirmed by registry <ni...@channelisles.net> 2013-11-28
+je
+co.je
+net.je
+org.je
+
+// jm : http://www.com.jm/register.html
+*.jm
+
+// jo : http://www.dns.jo/Registration_policy.aspx
+jo
+com.jo
+org.jo
+net.jo
+edu.jo
+sch.jo
+gov.jo
+mil.jo
+name.jo
+
+// jobs : http://en.wikipedia.org/wiki/.jobs
+jobs
+
+// jp : http://en.wikipedia.org/wiki/.jp
+// http://jprs.co.jp/en/jpdomain.html
+// Submitted by registry <in...@jprs.jp> 2014-10-30
+jp
+// jp organizational type names
+ac.jp
+ad.jp
+co.jp
+ed.jp
+go.jp
+gr.jp
+lg.jp
+ne.jp
+or.jp
+// jp prefecture type names
+aichi.jp
+akita.jp
+aomori.jp
+chiba.jp
+ehime.jp
+fukui.jp
+fukuoka.jp
+fukushima.jp
+gifu.jp
+gunma.jp
+hiroshima.jp
+hokkaido.jp
+hyogo.jp
+ibaraki.jp
+ishikawa.jp
+iwate.jp
+kagawa.jp
+kagoshima.jp
+kanagawa.jp
+kochi.jp
+kumamoto.jp
+kyoto.jp
+mie.jp
+miyagi.jp
+miyazaki.jp
+nagano.jp
+nagasaki.jp
+nara.jp
+niigata.jp
+oita.jp
+okayama.jp
+okinawa.jp
+osaka.jp
+saga.jp
+saitama.jp
+shiga.jp
+shimane.jp
+shizuoka.jp
+tochigi.jp
+tokushima.jp
+tokyo.jp
+tottori.jp
+toyama.jp
+wakayama.jp
+yamagata.jp
+yamaguchi.jp
+yamanashi.jp
+栃木.jp
+愛知.jp
+愛媛.jp
+兵庫.jp
+熊本.jp
+茨城.jp
+北海道.jp
+千葉.jp
+和歌山.jp
+長崎.jp
+長野.jp
+新潟.jp
+青森.jp
+静岡.jp
+東京.jp
+石川.jp
+埼玉.jp
+三重.jp
+京都.jp
+佐賀.jp
+大分.jp
+大阪.jp
+奈良.jp
+宮城.jp
+宮崎.jp
+富山.jp
+山口.jp
+山形.jp
+山梨.jp
+岩手.jp
+岐阜.jp
+岡山.jp
+島根.jp
+広島.jp
+徳島.jp
+沖縄.jp
+滋賀.jp
+神奈川.jp
+福井.jp
+福岡.jp
+福島.jp
+秋田.jp
+群馬.jp
+香川.jp
+高知.jp
+鳥取.jp
+鹿児島.jp
+// jp geographic type names
+// http://jprs.jp/doc/rule/saisoku-1.html
+*.kawasaki.jp
+*.kitakyushu.jp
+*.kobe.jp
+*.nagoya.jp
+*.sapporo.jp
+*.sendai.jp
+*.yokohama.jp
+!city.kawasaki.jp
+!city.kitakyushu.jp
+!city.kobe.jp
+!city.nagoya.jp
+!city.sapporo.jp
+!city.sendai.jp
+!city.yokohama.jp
+// 4th level registration
+aisai.aichi.jp
+ama.aichi.jp
+anjo.aichi.jp
+asuke.aichi.jp
+chiryu.aichi.jp
+chita.aichi.jp
+fuso.aichi.jp
+gamagori.aichi.jp
+handa.aichi.jp
+hazu.aichi.jp
+hekinan.aichi.jp
+higashiura.aichi.jp
+ichinomiya.aichi.jp
+inazawa.aichi.jp
+inuyama.aichi.jp
+isshiki.aichi.jp
+iwakura.aichi.jp
+kanie.aichi.jp
+kariya.aichi.jp
+kasugai.aichi.jp
+kira.aichi.jp
+kiyosu.aichi.jp
+komaki.aichi.jp
+konan.aichi.jp
+kota.aichi.jp
+mihama.aichi.jp
+miyoshi.aichi.jp
+nishio.aichi.jp
+nisshin.aichi.jp
+obu.aichi.jp
+oguchi.aichi.jp
+oharu.aichi.jp
+okazaki.aichi.jp
+owariasahi.aichi.jp
+seto.aichi.jp
+shikatsu.aichi.jp
+shinshiro.aichi.jp
+shitara.aichi.jp
+tahara.aichi.jp
+takahama.aichi.jp
+tobishima.aichi.jp
+toei.aichi.jp
+togo.aichi.jp
+tokai.aichi.jp
+tokoname.aichi.jp
+toyoake.aichi.jp
+toyohashi.aichi.jp
+toyokawa.aichi.jp
+toyone.aichi.jp
+toyota.aichi.jp
+tsushima.aichi.jp
+yatomi.aichi.jp
+akita.akita.jp
+daisen.akita.jp
+fujisato.akita.jp
+gojome.akita.jp
+hachirogata.akita.jp
+happou.akita.jp
+higashinaruse.akita.jp
+honjo.akita.jp
+honjyo.akita.jp
+ikawa.akita.jp
+kamikoani.akita.jp
+kamioka.akita.jp
+katagami.akita.jp
+kazuno.akita.jp
+kitaakita.akita.jp
+kosaka.akita.jp
+kyowa.akita.jp
+misato.akita.jp
+mitane.akita.jp
+moriyoshi.akita.jp
+nikaho.akita.jp
+noshiro.akita.jp
+odate.akita.jp
+oga.akita.jp
+ogata.akita.jp
+semboku.akita.jp
+yokote.akita.jp
+yurihonjo.akita.jp
+aomori.aomori.jp
+gonohe.aomori.jp
+hachinohe.aomori.jp
+hashikami.aomori.jp
+hiranai.aomori.jp
+hirosaki.aomori.jp
+itayanagi.aomori.jp
+kuroishi.aomori.jp
+misawa.aomori.jp
+mutsu.aomori.jp
+nakadomari.aomori.jp
+noheji.aomori.jp
+oirase.aomori.jp
+owani.aomori.jp
+rokunohe.aomori.jp
+sannohe.aomori.jp
+shichinohe.aomori.jp
+shingo.aomori.jp
+takko.aomori.jp
+towada.aomori.jp
+tsugaru.aomori.jp
+tsuruta.aomori.jp
+abiko.chiba.jp
+asahi.chiba.jp
+chonan.chiba.jp
+chosei.chiba.jp
+choshi.chiba.jp
+chuo.chiba.jp
+funabashi.chiba.jp
+futtsu.chiba.jp
+hanamigawa.chiba.jp
+ichihara.chiba.jp
+ichikawa.chiba.jp
+ichinomiya.chiba.jp
+inzai.chiba.jp
+isumi.chiba.jp
+kamagaya.chiba.jp
+kamogawa.chiba.jp
+kashiwa.chiba.jp
+katori.chiba.jp
+katsuura.chiba.jp
+kimitsu.chiba.jp
+kisarazu.chiba.jp
+kozaki.chiba.jp
+kujukuri.chiba.jp
+kyonan.chiba.jp
+matsudo.chiba.jp
+midori.chiba.jp
+mihama.chiba.jp
+minamiboso.chiba.jp
+mobara.chiba.jp
+mutsuzawa.chiba.jp
+nagara.chiba.jp
+nagareyama.chiba.jp
+narashino.chiba.jp
+narita.chiba.jp
+noda.chiba.jp
+oamishirasato.chiba.jp
+omigawa.chiba.jp
+onjuku.chiba.jp
+otaki.chiba.jp
+sakae.chiba.jp
+sakura.chiba.jp
+shimofusa.chiba.jp
+shirako.chiba.jp
+shiroi.chiba.jp
+shisui.chiba.jp
+sodegaura.chiba.jp
+sosa.chiba.jp
+tako.chiba.jp
+tateyama.chiba.jp
+togane.chiba.jp
+tohnosho.chiba.jp
+tomisato.chiba.jp
+urayasu.chiba.jp
+yachimata.chiba.jp
+yachiyo.chiba.jp
+yokaichiba.chiba.jp
+yokoshibahikari.chiba.jp
+yotsukaido.chiba.jp
+ainan.ehime.jp
+honai.ehime.jp
+ikata.ehime.jp
+imabari.ehime.jp
+iyo.ehime.jp
+kamijima.ehime.jp
+kihoku.ehime.jp
+kumakogen.ehime.jp
+masaki.ehime.jp
+matsuno.ehime.jp
+matsuyama.ehime.jp
+namikata.ehime.jp
+niihama.ehime.jp
+ozu.ehime.jp
+saijo.ehime.jp
+seiyo.ehime.jp
+shikokuchuo.ehime.jp
+tobe.ehime.jp
+toon.ehime.jp
+uchiko.ehime.jp
+uwajima.ehime.jp
+yawatahama.ehime.jp
+echizen.fukui.jp
+eiheiji.fukui.jp
+fukui.fukui.jp
+ikeda.fukui.jp
+katsuyama.fukui.jp
+mihama.fukui.jp
+minamiechizen.fukui.jp
+obama.fukui.jp
+ohi.fukui.jp
+ono.fukui.jp
+sabae.fukui.jp
+sakai.fukui.jp
+takahama.fukui.jp
+tsuruga.fukui.jp
+wakasa.fukui.jp
+ashiya.fukuoka.jp
+buzen.fukuoka.jp
+chikugo.fukuoka.jp
+chikuho.fukuoka.jp
+chikujo.fukuoka.jp
+chikushino.fukuoka.jp
+chikuzen.fukuoka.jp
+chuo.fukuoka.jp
+dazaifu.fukuoka.jp
+fukuchi.fukuoka.jp
+hakata.fukuoka.jp
+higashi.fukuoka.jp
+hirokawa.fukuoka.jp
+hisayama.fukuoka.jp
+iizuka.fukuoka.jp
+inatsuki.fukuoka.jp
+kaho.fukuoka.jp
+kasuga.fukuoka.jp
+kasuya.fukuoka.jp
+kawara.fukuoka.jp
+keisen.fukuoka.jp
+koga.fukuoka.jp
+kurate.fukuoka.jp
+kurogi.fukuoka.jp
+kurume.fukuoka.jp
+minami.fukuoka.jp
+miyako.fukuoka.jp
+miyama.fukuoka.jp
+miyawaka.fukuoka.jp
+mizumaki.fukuoka.jp
+munakata.fukuoka.jp
+nakagawa.fukuoka.jp
+nakama.fukuoka.jp
+nishi.fukuoka.jp
+nogata.fukuoka.jp
+ogori.fukuoka.jp
+okagaki.fukuoka.jp
+okawa.fukuoka.jp
+oki.fukuoka.jp
+omuta.fukuoka.jp
+onga.fukuoka.jp
+onojo.fukuoka.jp
+oto.fukuoka.jp
+saigawa.fukuoka.jp
+sasaguri.fukuoka.jp
+shingu.fukuoka.jp
+shinyoshitomi.fukuoka.jp
+shonai.fukuoka.jp
+soeda.fukuoka.jp
+sue.fukuoka.jp
+tachiarai.fukuoka.jp
+tagawa.fukuoka.jp
+takata.fukuoka.jp
+toho.fukuoka.jp
+toyotsu.fukuoka.jp
+tsuiki.fukuoka.jp
+ukiha.fukuoka.jp
+umi.fukuoka.jp
+usui.fukuoka.jp
+yamada.fukuoka.jp
+yame.fukuoka.jp
+yanagawa.fukuoka.jp
+yukuhashi.fukuoka.jp
+aizubange.fukushima.jp
+aizumisato.fukushima.jp
+aizuwakamatsu.fukushima.jp
+asakawa.fukushima.jp
+bandai.fukushima.jp
+date.fukushima.jp
+fukushima.fukushima.jp
+furudono.fukushima.jp
+futaba.fukushima.jp
+hanawa.fukushima.jp
+higashi.fukushima.jp
+hirata.fukushima.jp
+hirono.fukushima.jp
+iitate.fukushima.jp
+inawashiro.fukushima.jp
+ishikawa.fukushima.jp
+iwaki.fukushima.jp
+izumizaki.fukushima.jp
+kagamiishi.fukushima.jp
+kaneyama.fukushima.jp
+kawamata.fukushima.jp
+kitakata.fukushima.jp
+kitashiobara.fukushima.jp
+koori.fukushima.jp
+koriyama.fukushima.jp
+kunimi.fukushima.jp
+miharu.fukushima.jp
+mishima.fukushima.jp
+namie.fukushima.jp
+nango.fukushima.jp
+nishiaizu.fukushima.jp
+nishigo.fukushima.jp
+okuma.fukushima.jp
+omotego.fukushima.jp
+ono.fukushima.jp
+otama.fukushima.jp
+samegawa.fukushima.jp
+shimogo.fukushima.jp
+shirakawa.fukushima.jp
+showa.fukushima.jp
+soma.fukushima.jp
+sukagawa.fukushima.jp
+taishin.fukushima.jp
+tamakawa.fukushima.jp
+tanagura.fukushima.jp
+tenei.fukushima.jp
+yabuki.fukushima.jp
+yamato.fukushima.jp
+yamatsuri.fukushima.jp
+yanaizu.fukushima.jp
+yugawa.fukushima.jp
+anpachi.gifu.jp
+ena.gifu.jp
+gifu.gifu.jp
+ginan.gifu.jp
+godo.gifu.jp
+gujo.gifu.jp
+hashima.gifu.jp
+hichiso.gifu.jp
+hida.gifu.jp
+higashishirakawa.gifu.jp
+ibigawa.gifu.jp
+ikeda.gifu.jp
+kakamigahara.gifu.jp
+kani.gifu.jp
+kasahara.gifu.jp
+kasamatsu.gifu.jp
+kawaue.gifu.jp
+kitagata.gifu.jp
+mino.gifu.jp
+minokamo.gifu.jp
+mitake.gifu.jp
+mizunami.gifu.jp
+motosu.gifu.jp
+nakatsugawa.gifu.jp
+ogaki.gifu.jp
+sakahogi.gifu.jp
+seki.gifu.jp
+sekigahara.gifu.jp
+shirakawa.gifu.jp
+tajimi.gifu.jp
+takayama.gifu.jp
+tarui.gifu.jp
+toki.gifu.jp
+tomika.gifu.jp
+wanouchi.gifu.jp
+yamagata.gifu.jp
+yaotsu.gifu.jp
+yoro.gifu.jp
+annaka.gunma.jp
+chiyoda.gunma.jp
+fujioka.gunma.jp
+higashiagatsuma.gunma.jp
+isesaki.gunma.jp
+itakura.gunma.jp
+kanna.gunma.jp
+kanra.gunma.jp
+katashina.gunma.jp
+kawaba.gunma.jp
+kiryu.gunma.jp
+kusatsu.gunma.jp
+maebashi.gunma.jp
+meiwa.gunma.jp
+midori.gunma.jp
+minakami.gunma.jp
+naganohara.gunma.jp
+nakanojo.gunma.jp
+nanmoku.gunma.jp
+numata.gunma.jp
+oizumi.gunma.jp
+ora.gunma.jp
+ota.gunma.jp
+shibukawa.gunma.jp
+shimonita.gunma.jp
+shinto.gunma.jp
+showa.gunma.jp
+takasaki.gunma.jp
+takayama.gunma.jp
+tamamura.gunma.jp
+tatebayashi.gunma.jp
+tomioka.gunma.jp
+tsukiyono.gunma.jp
+tsumagoi.gunma.jp
+ueno.gunma.jp
+yoshioka.gunma.jp
+asaminami.hiroshima.jp
+daiwa.hiroshima.jp
+etajima.hiroshima.jp
+fuchu.hiroshima.jp
+fukuyama.hiroshima.jp
+hatsukaichi.hiroshima.jp
+higashihiroshima.hiroshima.jp
+hongo.hiroshima.jp
+jinsekikogen.hiroshima.jp
+kaita.hiroshima.jp
+kui.hiroshima.jp
+kumano.hiroshima.jp
+kure.hiroshima.jp
+mihara.hiroshima.jp
+miyoshi.hiroshima.jp
+naka.hiroshima.jp
+onomichi.hiroshima.jp
+osakikamijima.hiroshima.jp
+otake.hiroshima.jp
+saka.hiroshima.jp
+sera.hiroshima.jp
+seranishi.hiroshima.jp
+shinichi.hiroshima.jp
+shobara.hiroshima.jp
+takehara.hiroshima.jp
+abashiri.hokkaido.jp
+abira.hokkaido.jp
+aibetsu.hokkaido.jp
+akabira.hokkaido.jp
+akkeshi.hokkaido.jp
+asahikawa.hokkaido.jp
+ashibetsu.hokkaido.jp
+ashoro.hokkaido.jp
+assabu.hokkaido.jp
+atsuma.hokkaido.jp
+bibai.hokkaido.jp
+biei.hokkaido.jp
+bifuka.hokkaido.jp
+bihoro.hokkaido.jp
+biratori.hokkaido.jp
+chippubetsu.hokkaido.jp
+chitose.hokkaido.jp
+date.hokkaido.jp
+ebetsu.hokkaido.jp
+embetsu.hokkaido.jp
+eniwa.hokkaido.jp
+erimo.hokkaido.jp
+esan.hokkaido.jp
+esashi.hokkaido.jp
+fukagawa.hokkaido.jp
+fukushima.hokkaido.jp
+furano.hokkaido.jp
+furubira.hokkaido.jp
+haboro.hokkaido.jp
+hakodate.hokkaido.jp
+hamatonbetsu.hokkaido.jp
+hidaka.hokkaido.jp
+higashikagura.hokkaido.jp
+higashikawa.hokkaido.jp
+hiroo.hokkaido.jp
+hokuryu.hokkaido.jp
+hokuto.hokkaido.jp
+honbetsu.hokkaido.jp
+horokanai.hokkaido.jp
+horonobe.hokkaido.jp
+ikeda.hokkaido.jp
+imakane.hokkaido.jp
+ishikari.hokkaido.jp
+iwamizawa.hokkaido.jp
+iwanai.hokkaido.jp
+kamifurano.hokkaido.jp
+kamikawa.hokkaido.jp
+kamishihoro.hokkaido.jp
+kamisunagawa.hokkaido.jp
+kamoenai.hokkaido.jp
+kayabe.hokkaido.jp
+kembuchi.hokkaido.jp
+kikonai.hokkaido.jp
+kimobetsu.hokkaido.jp
+kitahiroshima.hokkaido.jp
+kitami.hokkaido.jp
+kiyosato.hokkaido.jp
+koshimizu.hokkaido.jp
+kunneppu.hokkaido.jp
+kuriyama.hokkaido.jp
+kuromatsunai.hokkaido.jp
+kushiro.hokkaido.jp
+kutchan.hokkaido.jp
+kyowa.hokkaido.jp
+mashike.hokkaido.jp
+matsumae.hokkaido.jp
+mikasa.hokkaido.jp
+minamifurano.hokkaido.jp
+mombetsu.hokkaido.jp
+moseushi.hokkaido.jp
+mukawa.hokkaido.jp
+muroran.hokkaido.jp
+naie.hokkaido.jp
+nakagawa.hokkaido.jp
+nakasatsunai.hokkaido.jp
+nakatombetsu.hokkaido.jp
+nanae.hokkaido.jp
+nanporo.hokkaido.jp
+nayoro.hokkaido.jp
+nemuro.hokkaido.jp
+niikappu.hokkaido.jp
+niki.hokkaido.jp
+nishiokoppe.hokkaido.jp
+noboribetsu.hokkaido.jp
+numata.hokkaido.jp
+obihiro.hokkaido.jp
+obira.hokkaido.jp
+oketo.hokkaido.jp
+okoppe.hokkaido.jp
+otaru.hokkaido.jp
+otobe.hokkaido.jp
+otofuke.hokkaido.jp
+otoineppu.hokkaido.jp
+oumu.hokkaido.jp
+ozora.hokkaido.jp
+pippu.hokkaido.jp
+rankoshi.hokkaido.jp
+rebun.hokkaido.jp
+rikubetsu.hokkaido.jp
+rishiri.hokkaido.jp
+rishirifuji.hokkaido.jp
+saroma.hokkaido.jp
+sarufutsu.hokkaido.jp
+shakotan.hokkaido.jp
+shari.hokkaido.jp
+shibecha.hokkaido.jp
+shibetsu.hokkaido.jp
+shikabe.hokkaido.jp
+shikaoi.hokkaido.jp
+shimamaki.hokkaido.jp
+shimizu.hokkaido.jp
+shimokawa.hokkaido.jp
+shinshinotsu.hokkaido.jp
+shintoku.hokkaido.jp
+shiranuka.hokkaido.jp
+shiraoi.hokkaido.jp
+shiriuchi.hokkaido.jp
+sobetsu.hokkaido.jp
+sunagawa.hokkaido.jp
+taiki.hokkaido.jp
+takasu.hokkaido.jp
+takikawa.hokkaido.jp
+takinoue.hokkaido.jp
+teshikaga.hokkaido.jp
+tobetsu.hokkaido.jp
+tohma.hokkaido.jp
+tomakomai.hokkaido.jp
+tomari.hokkaido.jp
+toya.hokkaido.jp
+toyako.hokkaido.jp
+toyotomi.hokkaido.jp
+toyoura.hokkaido.jp
+tsubetsu.hokkaido.jp
+tsukigata.hokkaido.jp
+urakawa.hokkaido.jp
+urausu.hokkaido.jp
+uryu.hokkaido.jp
+utashinai.hokkaido.jp
+wakkanai.hokkaido.jp
+wassamu.hokkaido.jp
+yakumo.hokkaido.jp
+yoichi.hokkaido.jp
+aioi.hyogo.jp
+akashi.hyogo.jp
+ako.hyogo.jp
+amagasaki.hyogo.jp
+aogaki.hyogo.jp
+asago.hyogo.jp
+ashiya.hyogo.jp
+awaji.hyogo.jp
+fukusaki.hyogo.jp
+goshiki.hyogo.jp
+harima.hyogo.jp
+himeji.hyogo.jp
+ichikawa.hyogo.jp
+inagawa.hyogo.jp
+itami.hyogo.jp
+kakogawa.hyogo.jp
+kamigori.hyogo.jp
+kamikawa.hyogo.jp
+kasai.hyogo.jp
+kasuga.hyogo.jp
+kawanishi.hyogo.jp
+miki.hyogo.jp
+minamiawaji.hyogo.jp
+nishinomiya.hyogo.jp
+nishiwaki.hyogo.jp
+ono.hyogo.jp
+sanda.hyogo.jp
+sannan.hyogo.jp
+sasayama.hyogo.jp
+sayo.hyogo.jp
+shingu.hyogo.jp
+shinonsen.hyogo.jp
+shiso.hyogo.jp
+sumoto.hyogo.jp
+taishi.hyogo.jp
+taka.hyogo.jp
+takarazuka.hyogo.jp
+takasago.hyogo.jp
+takino.hyogo.jp
+tamba.hyogo.jp
+tatsuno.hyogo.jp
+toyooka.hyogo.jp
+yabu.hyogo.jp
+yashiro.hyogo.jp
+yoka.hyogo.jp
+yokawa.hyogo.jp
+ami.ibaraki.jp
+asahi.ibaraki.jp
+bando.ibaraki.jp
+chikusei.ibaraki.jp
+daigo.ibaraki.jp
+fujishiro.ibaraki.jp
+hitachi.ibaraki.jp
+hitachinaka.ibaraki.jp
+hitachiomiya.ibaraki.jp
+hitachiota.ibaraki.jp
+ibaraki.ibaraki.jp
+ina.ibaraki.jp
+inashiki.ibaraki.jp
+itako.ibaraki.jp
+iwama.ibaraki.jp
+joso.ibaraki.jp
+kamisu.ibaraki.jp
+kasama.ibaraki.jp
+kashima.ibaraki.jp
+kasumigaura.ibaraki.jp
+koga.ibaraki.jp
+miho.ibaraki.jp
+mito.ibaraki.jp
+moriya.ibaraki.jp
+naka.ibaraki.jp
+namegata.ibaraki.jp
+oarai.ibaraki.jp
+ogawa.ibaraki.jp
+omitama.ibaraki.jp
+ryugasaki.ibaraki.jp
+sakai.ibaraki.jp
+sakuragawa.ibaraki.jp
+shimodate.ibaraki.jp
+shimotsuma.ibaraki.jp
+shirosato.ibaraki.jp
+sowa.ibaraki.jp
+suifu.ibaraki.jp
+takahagi.ibaraki.jp
+tamatsukuri.ibaraki.jp
+tokai.ibaraki.jp
+tomobe.ibaraki.jp
+tone.ibaraki.jp
+toride.ibaraki.jp
+tsuchiura.ibaraki.jp
+tsukuba.ibaraki.jp
+uchihara.ibaraki.jp
+ushiku.ibaraki.jp
+yachiyo.ibaraki.jp
+yamagata.ibaraki.jp
+yawara.ibaraki.jp
+yuki.ibaraki.jp
+anamizu.ishikawa.jp
+hakui.ishikawa.jp
+hakusan.ishikawa.jp
+kaga.ishikawa.jp
+kahoku.ishikawa.jp
+kanazawa.ishikawa.jp
+kawakita.ishikawa.jp
+komatsu.ishikawa.jp
+nakanoto.ishikawa.jp
+nanao.ishikawa.jp
+nomi.ishikawa.jp
+nonoichi.ishikawa.jp
+noto.ishikawa.jp
+shika.ishikawa.jp
+suzu.ishikawa.jp
+tsubata.ishikawa.jp
+tsurugi.ishikawa.jp
+uchinada.ishikawa.jp
+wajima.ishikawa.jp
+fudai.iwate.jp
+fujisawa.iwate.jp
+hanamaki.iwate.jp
+hiraizumi.iwate.jp
+hirono.iwate.jp
+ichinohe.iwate.jp
+ichinoseki.iwate.jp
+iwaizumi.iwate.jp
+iwate.iwate.jp
+joboji.iwate.jp
+kamaishi.iwate.jp
+kanegasaki.iwate.jp
+karumai.iwate.jp
+kawai.iwate.jp
+kitakami.iwate.jp
+kuji.iwate.jp
+kunohe.iwate.jp
+kuzumaki.iwate.jp
+miyako.iwate.jp
+mizusawa.iwate.jp
+morioka.iwate.jp
+ninohe.iwate.jp
+noda.iwate.jp
+ofunato.iwate.jp
+oshu.iwate.jp
+otsuchi.iwate.jp
+rikuzentakata.iwate.jp
+shiwa.iwate.jp
+shizukuishi.iwate.jp
+sumita.iwate.jp
+tanohata.iwate.jp
+tono.iwate.jp
+yahaba.iwate.jp
+yamada.iwate.jp
+ayagawa.kagawa.jp
+higashikagawa.kagawa.jp
+kanonji.kagawa.jp
+kotohira.kagawa.jp
+manno.kagawa.jp
+marugame.kagawa.jp
+mitoyo.kagawa.jp
+naoshima.kagawa.jp
+sanuki.kagawa.jp
+tadotsu.kagawa.jp
+takamatsu.kagawa.jp
+tonosho.kagawa.jp
+uchinomi.kagawa.jp
+utazu.kagawa.jp
+zentsuji.kagawa.jp
+akune.kagoshima.jp
+amami.kagoshima.jp
+hioki.kagoshima.jp
+isa.kagoshima.jp
+isen.kagoshima.jp
+izumi.kagoshima.jp
+kagoshima.kagoshima.jp
+kanoya.kagoshima.jp
+kawanabe.kagoshima.jp
+kinko.kagoshima.jp
+kouyama.kagoshima.jp
+makurazaki.kagoshima.jp
+matsumoto.kagoshima.jp
+minamitane.kagoshima.jp
+nakatane.kagoshima.jp
+nishinoomote.kagoshima.jp
+satsumasendai.kagoshima.jp
+soo.kagoshima.jp
+tarumizu.kagoshima.jp
+yusui.kagoshima.jp
+aikawa.kanagawa.jp
+atsugi.kanagawa.jp
+ayase.kanagawa.jp
+chigasaki.kanagawa.jp
+ebina.kanagawa.jp
+fujisawa.kanagawa.jp
+hadano.kanagawa.jp
+hakone.kanagawa.jp
+hiratsuka.kanagawa.jp
+isehara.kanagawa.jp
+kaisei.kanagawa.jp
+kamakura.kanagawa.jp
+kiyokawa.kanagawa.jp
+matsuda.kanagawa.jp
+minamiashigara.kanagawa.jp
+miura.kanagawa.jp
+nakai.kanagawa.jp
+ninomiya.kanagawa.jp
+odawara.kanagawa.jp
+oi.kanagawa.jp
+oiso.kanagawa.jp
+sagamihara.kanagawa.jp
+samukawa.kanagawa.jp
+tsukui.kanagawa.jp
+yamakita.kanagawa.jp
+yamato.kanagawa.jp
+yokosuka.kanagawa.jp
+yugawara.kanagawa.jp
+zama.kanagawa.jp
+zushi.kanagawa.jp
+aki.kochi.jp
+geisei.kochi.jp
+hidaka.kochi.jp
+higashitsuno.kochi.jp
+ino.kochi.jp
+kagami.kochi.jp
+kami.kochi.jp
+kitagawa.kochi.jp
+kochi.kochi.jp
+mihara.kochi.jp
+motoyama.kochi.jp
+muroto.kochi.jp
+nahari.kochi.jp
+nakamura.kochi.jp
+nankoku.kochi.jp
+nishitosa.kochi.jp
+niyodogawa.kochi.jp
+ochi.kochi.jp
+okawa.kochi.jp
+otoyo.kochi.jp
+otsuki.kochi.jp
+sakawa.kochi.jp
+sukumo.kochi.jp
+susaki.kochi.jp
+tosa.kochi.jp
+tosashimizu.kochi.jp
+toyo.kochi.jp
+tsuno.kochi.jp
+umaji.kochi.jp
+yasuda.kochi.jp
+yusuhara.kochi.jp
+amakusa.kumamoto.jp
+arao.kumamoto.jp
+aso.kumamoto.jp
+choyo.kumamoto.jp
+gyokuto.kumamoto.jp
+hitoyoshi.kumamoto.jp
+kamiamakusa.kumamoto.jp
+kashima.kumamoto.jp
+kikuchi.kumamoto.jp
+kosa.kumamoto.jp
+kumamoto.kumamoto.jp
+mashiki.kumamoto.jp
+mifune.kumamoto.jp
+minamata.kumamoto.jp
+minamioguni.kumamoto.jp
+nagasu.kumamoto.jp
+nishihara.kumamoto.jp
+oguni.kumamoto.jp
+ozu.kumamoto.jp
+sumoto.kumamoto.jp
+takamori.kumamoto.jp
+uki.kumamoto.jp
+uto.kumamoto.jp
+yamaga.kumamoto.jp
+yamato.kumamoto.jp
+yatsushiro.kumamoto.jp
+ayabe.kyoto.jp
+fukuchiyama.kyoto.jp
+higashiyama.kyoto.jp
+ide.kyoto.jp
+ine.kyoto.jp
+joyo.kyoto.jp
+kameoka.kyoto.jp
+kamo.kyoto.jp
+kita.kyoto.jp
+kizu.kyoto.jp
+kumiyama.kyoto.jp
+kyotamba.kyoto.jp
+kyotanabe.kyoto.jp
+kyotango.kyoto.jp
+maizuru.kyoto.jp
+minami.kyoto.jp
+minamiyamashiro.kyoto.jp
+miyazu.kyoto.jp
+muko.kyoto.jp
+nagaokakyo.kyoto.jp
+nakagyo.kyoto.jp
+nantan.kyoto.jp
+oyamazaki.kyoto.jp
+sakyo.kyoto.jp
+seika.kyoto.jp
+tanabe.kyoto.jp
+uji.kyoto.jp
+ujitawara.kyoto.jp
+wazuka.kyoto.jp
+yamashina.kyoto.jp
+yawata.kyoto.jp
+asahi.mie.jp
+inabe.mie.jp
+ise.mie.jp
+kameyama.mie.jp
+kawagoe.mie.jp
+kiho.mie.jp
+kisosaki.mie.jp
+kiwa.mie.jp
+komono.mie.jp
+kumano.mie.jp
+kuwana.mie.jp
+matsusaka.mie.jp
+meiwa.mie.jp
+mihama.mie.jp
+minamiise.mie.jp
+misugi.mie.jp
+miyama.mie.jp
+nabari.mie.jp
+shima.mie.jp
+suzuka.mie.jp
+tado.mie.jp
+taiki.mie.jp
+taki.mie.jp
+tamaki.mie.jp
+toba.mie.jp
+tsu.mie.jp
+udono.mie.jp
+ureshino.mie.jp
+watarai.mie.jp
+yokkaichi.mie.jp
+furukawa.miyagi.jp
+higashimatsushima.miyagi.jp
+ishinomaki.miyagi.jp
+iwanuma.miyagi.jp
+kakuda.miyagi.jp
+kami.miyagi.jp
+kawasaki.miyagi.jp
+kesennuma.miyagi.jp
+marumori.miyagi.jp
+matsushima.miyagi.jp
+minamisanriku.miyagi.jp
+misato.miyagi.jp
+murata.miyagi.jp
+natori.miyagi.jp
+ogawara.miyagi.jp
+ohira.miyagi.jp
+onagawa.miyagi.jp
+osaki.miyagi.jp
+rifu.miyagi.jp
+semine.miyagi.jp
+shibata.miyagi.jp
+shichikashuku.miyagi.jp
+shikama.miyagi.jp
+shiogama.miyagi.jp
+shiroishi.miyagi.jp
+tagajo.miyagi.jp
+taiwa.miyagi.jp
+tome.miyagi.jp
+tomiya.miyagi.jp
+wakuya.miyagi.jp
+watari.miyagi.jp
+yamamoto.miyagi.jp
+zao.miyagi.jp
+aya.miyazaki.jp
+ebino.miyazaki.jp
+gokase.miyazaki.jp
+hyuga.miyazaki.jp
+kadogawa.miyazaki.jp
+kawaminami.miyazaki.jp
+kijo.miyazaki.jp
+kitagawa.miyazaki.jp
+kitakata.miyazaki.jp
+kitaura.miyazaki.jp
+kobayashi.miyazaki.jp
+kunitomi.miyazaki.jp
+kushima.miyazaki.jp
+mimata.miyazaki.jp
+miyakonojo.miyazaki.jp
+miyazaki.miyazaki.jp
+morotsuka.miyazaki.jp
+nichinan.miyazaki.jp
+nishimera.miyazaki.jp
+nobeoka.miyazaki.jp
+saito.miyazaki.jp
+shiiba.miyazaki.jp
+shintomi.miyazaki.jp
+takaharu.miyazaki.jp
+takanabe.miyazaki.jp
+takazaki.miyazaki.jp
+tsuno.miyazaki.jp
+achi.nagano.jp
+agematsu.nagano.jp
+anan.nagano.jp
+aoki.nagano.jp
+asahi.nagano.jp
+azumino.nagano.jp
+chikuhoku.nagano.jp
+chikuma.nagano.jp
+chino.nagano.jp
+fujimi.nagano.jp
+hakuba.nagano.jp
+hara.nagano.jp
+hiraya.nagano.jp
+iida.nagano.jp
+iijima.nagano.jp
+iiyama.nagano.jp
+iizuna.nagano.jp
+ikeda.nagano.jp
+ikusaka.nagano.jp
+ina.nagano.jp
+karuizawa.nagano.jp
+kawakami.nagano.jp
+kiso.nagano.jp
+kisofukushima.nagano.jp
+kitaaiki.nagano.jp
+komagane.nagano.jp
+komoro.nagano.jp
+matsukawa.nagano.jp
+matsumoto.nagano.jp
+miasa.nagano.jp
+minamiaiki.nagano.jp
+minamimaki.nagano.jp
+minamiminowa.nagano.jp
+minowa.nagano.jp
+miyada.nagano.jp
+miyota.nagano.jp
+mochizuki.nagano.jp
+nagano.nagano.jp
+nagawa.nagano.jp
+nagiso.nagano.jp
+nakagawa.nagano.jp
+nakano.nagano.jp
+nozawaonsen.nagano.jp
+obuse.nagano.jp
+ogawa.nagano.jp
+okaya.nagano.jp
+omachi.nagano.jp
+omi.nagano.jp
+ookuwa.nagano.jp
+ooshika.nagano.jp
+otaki.nagano.jp
+otari.nagano.jp
+sakae.nagano.jp
+sakaki.nagano.jp
+saku.nagano.jp
+sakuho.nagano.jp
+shimosuwa.nagano.jp
+shinanomachi.nagano.jp
+shiojiri.nagano.jp
+suwa.nagano.jp
+suzaka.nagano.jp
+takagi.nagano.jp
+takamori.nagano.jp
+takayama.nagano.jp
+tateshina.nagano.jp
+tatsuno.nagano.jp
+togakushi.nagano.jp
+togura.nagano.jp
+tomi.nagano.jp
+ueda.nagano.jp
+wada.nagano.jp
+yamagata.nagano.jp
+yamanouchi.nagano.jp
+yasaka.nagano.jp
+yasuoka.nagano.jp
+chijiwa.nagasaki.jp
+futsu.nagasaki.jp
+goto.nagasaki.jp
+hasami.nagasaki.jp
+hirado.nagasaki.jp
+iki.nagasaki.jp
+isahaya.nagasaki.jp
+kawatana.nagasaki.jp
+kuchinotsu.nagasaki.jp
+matsuura.nagasaki.jp
+nagasaki.nagasaki.jp
+obama.nagasaki.jp
+omura.nagasaki.jp
+oseto.nagasaki.jp
+saikai.nagasaki.jp
+sasebo.nagasaki.jp
+seihi.nagasaki.jp
+shimabara.nagasaki.jp
+shinkamigoto.nagasaki.jp
+togitsu.nagasaki.jp
+tsushima.nagasaki.jp
+unzen.nagasaki.jp
+ando.nara.jp
+gose.nara.jp
+heguri.nara.jp
+higashiyoshino.nara.jp
+ikaruga.nara.jp
+ikoma.nara.jp
+kamikitayama.nara.jp
+kanmaki.nara.jp
+kashiba.nara.jp
+kashihara.nara.jp
+katsuragi.nara.jp
+kawai.nara.jp
+kawakami.nara.jp
+kawanishi.nara.jp
+koryo.nara.jp
+kurotaki.nara.jp
+mitsue.nara.jp
+miyake.nara.jp
+nara.nara.jp
+nosegawa.nara.jp
+oji.nara.jp
+ouda.nara.jp
+oyodo.nara.jp
+sakurai.nara.jp
+sango.nara.jp
+shimoichi.nara.jp
+shimokitayama.nara.jp
+shinjo.nara.jp
+soni.nara.jp
+takatori.nara.jp
+tawaramoto.nara.jp
+tenkawa.nara.jp
+tenri.nara.jp
+uda.nara.jp
+yamatokoriyama.nara.jp
+yamatotakada.nara.jp
+yamazoe.nara.jp
+yoshino.nara.jp
+aga.niigata.jp
+agano.niigata.jp
+gosen.niigata.jp
+itoigawa.niigata.jp
+izumozaki.niigata.jp
+joetsu.niigata.jp
+kamo.niigata.jp
+kariwa.niigata.jp
+kashiwazaki.niigata.jp
+minamiuonuma.niigata.jp
+mitsuke.niigata.jp
+muika.niigata.jp
+murakami.niigata.jp
+myoko.niigata.jp
+nagaoka.niigata.jp
+niigata.niigata.jp
+ojiya.niigata.jp
+omi.niigata.jp
+sado.niigata.jp
+sanjo.niigata.jp
+seiro.niigata.jp
+seirou.niigata.jp
+sekikawa.niigata.jp
+shibata.niigata.jp
+tagami.niigata.jp
+tainai.niigata.jp
+tochio.niigata.jp
+tokamachi.niigata.jp
+tsubame.niigata.jp
+tsunan.niigata.jp
+uonuma.niigata.jp
+yahiko.niigata.jp
+yoita.niigata.jp
+yuzawa.niigata.jp
+beppu.oita.jp
+bungoono.oita.jp
+bungotakada.oita.jp
+hasama.oita.jp
+hiji.oita.jp
+himeshima.oita.jp
+hita.oita.jp
+kamitsue.oita.jp
+kokonoe.oita.jp
+kuju.oita.jp
+kunisaki.oita.jp
+kusu.oita.jp
+oita.oita.jp
+saiki.oita.jp
+taketa.oita.jp
+tsukumi.oita.jp
+usa.oita.jp
+usuki.oita.jp
+yufu.oita.jp
+akaiwa.okayama.jp
+asakuchi.okayama.jp
+bizen.okayama.jp
+hayashima.okayama.jp
+ibara.okayama.jp
+kagamino.okayama.jp
+kasaoka.okayama.jp
+kibichuo.okayama.jp
+kumenan.okayama.jp
+kurashiki.okayama.jp
+maniwa.okayama.jp
+misaki.okayama.jp
+nagi.okayama.jp
+niimi.okayama.jp
+nishiawakura.okayama.jp
+okayama.okayama.jp
+satosho.okayama.jp
+setouchi.okayama.jp
+shinjo.okayama.jp
+shoo.okayama.jp
+soja.okayama.jp
+takahashi.okayama.jp
+tamano.okayama.jp
+tsuyama.okayama.jp
+wake.okayama.jp
+yakage.okayama.jp
+aguni.okinawa.jp
+ginowan.okinawa.jp
+ginoza.okinawa.jp
+gushikami.okinawa.jp
+haebaru.okinawa.jp
+higashi.okinawa.jp
+hirara.okinawa.jp
+iheya.okinawa.jp
+ishigaki.okinawa.jp
+ishikawa.okinawa.jp
+itoman.okinawa.jp
+izena.okinawa.jp
+kadena.okinawa.jp
+kin.okinawa.jp
+kitadaito.okinawa.jp
+kitanakagusuku.okinawa.jp
+kumejima.okinawa.jp
+kunigami.okinawa.jp
+minamidaito.okinawa.jp
+motobu.okinawa.jp
+nago.okinawa.jp
+naha.okinawa.jp
+nakagusuku.okinawa.jp
+nakijin.okinawa.jp
+nanjo.okinawa.jp
+nishihara.okinawa.jp
+ogimi.okinawa.jp
+okinawa.okinawa.jp
+onna.okinawa.jp
+shimoji.okinawa.jp
+taketomi.okinawa.jp
+tarama.okinawa.jp
+tokashiki.okinawa.jp
+tomigusuku.okinawa.jp
+tonaki.okinawa.jp
+urasoe.okinawa.jp
+uruma.okinawa.jp
+yaese.okinawa.jp
+yomitan.okinawa.jp
+yonabaru.okinawa.jp
+yonaguni.okinawa.jp
+zamami.okinawa.jp
+abeno.osaka.jp
+chihayaakasaka.osaka.jp
+chuo.osaka.jp
+daito.osaka.jp
+fujiidera.osaka.jp
+habikino.osaka.jp
+hannan.osaka.jp
+higashiosaka.osaka.jp
+higashisumiyoshi.osaka.jp
+higashiyodogawa.osaka.jp
+hirakata.osaka.jp
+ibaraki.osaka.jp
+ikeda.osaka.jp
+izumi.osaka.jp
+izumiotsu.osaka.jp
+izumisano.osaka.jp
+kadoma.osaka.jp
+kaizuka.osaka.jp
+kanan.osaka.jp
+kashiwara.osaka.jp
+katano.osaka.jp
+kawachinagano.osaka.jp
+kishiwada.osaka.jp
+kita.osaka.jp
+kumatori.osaka.jp
+matsubara.osaka.jp
+minato.osaka.jp
+minoh.osaka.jp
+misaki.osaka.jp
+moriguchi.osaka.jp
+neyagawa.osaka.jp
+nishi.osaka.jp
+nose.osaka.jp
+osakasayama.osaka.jp
+sakai.osaka.jp
+sayama.osaka.jp
+sennan.osaka.jp
+settsu.osaka.jp
+shijonawate.osaka.jp
+shimamoto.osaka.jp
+suita.osaka.jp
+tadaoka.osaka.jp
+taishi.osaka.jp
+tajiri.osaka.jp
+takaishi.osaka.jp
+takatsuki.osaka.jp
+tondabayashi.osaka.jp
+toyonaka.osaka.jp
+toyono.osaka.jp
+yao.osaka.jp
+ariake.saga.jp
+arita.saga.jp
+fukudomi.saga.jp
+genkai.saga.jp
+hamatama.saga.jp
+hizen.saga.jp
+imari.saga.jp
+kamimine.saga.jp
+kanzaki.saga.jp
+karatsu.saga.jp
+kashima.saga.jp
+kitagata.saga.jp
+kitahata.saga.jp
+kiyama.saga.jp
+kouhoku.saga.jp
+kyuragi.saga.jp
+nishiarita.saga.jp
+ogi.saga.jp
+omachi.saga.jp
+ouchi.saga.jp
+saga.saga.jp
+shiroishi.saga.jp
+taku.saga.jp
+tara.saga.jp
+tosu.saga.jp
+yoshinogari.saga.jp
+arakawa.saitama.jp
+asaka.saitama.jp
+chichibu.saitama.jp
+fujimi.saitama.jp
+fujimino.saitama.jp
+fukaya.saitama.jp
+hanno.saitama.jp
+hanyu.saitama.jp
+hasuda.saitama.jp
+hatogaya.saitama.jp
+hatoyama.saitama.jp
+hidaka.saitama.jp
+higashichichibu.saitama.jp
+higashimatsuyama.saitama.jp
+honjo.saitama.jp
+ina.saitama.jp
+iruma.saitama.jp
+iwatsuki.saitama.jp
+kamiizumi.saitama.jp
+kamikawa.saitama.jp
+kamisato.saitama.jp
+kasukabe.saitama.jp
+kawagoe.saitama.jp
+kawaguchi.saitama.jp
+kawajima.saitama.jp
+kazo.saitama.jp
+kitamoto.saitama.jp
+koshigaya.saitama.jp
+kounosu.saitama.jp
+kuki.saitama.jp
+kumagaya.saitama.jp
+matsubushi.saitama.jp
+minano.saitama.jp
+misato.saitama.jp
+miyashiro.saitama.jp
+miyoshi.saitama.jp
+moroyama.saitama.jp
+nagatoro.saitama.jp
+namegawa.saitama.jp
+niiza.saitama.jp
+ogano.saitama.jp
+ogawa.saitama.jp
+ogose.saitama.jp
+okegawa.saitama.jp
+omiya.saitama.jp
+otaki.saitama.jp
+ranzan.saitama.jp
+ryokami.saitama.jp
+saitama.saitama.jp
+sakado.saitama.jp
+satte.saitama.jp
+sayama.saitama.jp
+shiki.saitama.jp
+shiraoka.saitama.jp
+soka.saitama.jp
+sugito.saitama.jp
+toda.saitama.jp
+tokigawa.saitama.jp
+tokorozawa.saitama.jp
+tsurugashima.saitama.jp
+urawa.saitama.jp
+warabi.saitama.jp
+yashio.saitama.jp
+yokoze.saitama.jp
+yono.saitama.jp
+yorii.saitama.jp
+yoshida.saitama.jp
+yoshikawa.saitama.jp
+yoshimi.saitama.jp
+aisho.shiga.jp
+gamo.shiga.jp
+higashiomi.shiga.jp
+hikone.shiga.jp
+koka.shiga.jp
+konan.shiga.jp
+kosei.shiga.jp
+koto.shiga.jp
+kusatsu.shiga.jp
+maibara.shiga.jp
+moriyama.shiga.jp
+nagahama.shiga.jp
+nishiazai.shiga.jp
+notogawa.shiga.jp
+omihachiman.shiga.jp
+otsu.shiga.jp
+ritto.shiga.jp
+ryuoh.shiga.jp
+takashima.shiga.jp
+takatsuki.shiga.jp
+torahime.shiga.jp
+toyosato.shiga.jp
+yasu.shiga.jp
+akagi.shimane.jp
+ama.shimane.jp
+gotsu.shimane.jp
+hamada.shimane.jp
+higashiizumo.shimane.jp
+hikawa.shimane.jp
+hikimi.shimane.jp
+izumo.shimane.jp
+kakinoki.shimane.jp
+masuda.shimane.jp
+matsue.shimane.jp
+misato.shimane.jp
+nishinoshima.shimane.jp
+ohda.shimane.jp
+okinoshima.shimane.jp
+okuizumo.shimane.jp
+shimane.shimane.jp
+tamayu.shimane.jp
+tsuwano.shimane.jp
+unnan.shimane.jp
+yakumo.shimane.jp
+yasugi.shimane.jp
+yatsuka.shimane.jp
+arai.shizuoka.jp
+atami.shizuoka.jp
+fuji.shizuoka.jp
+fujieda.shizuoka.jp
+fujikawa.shizuoka.jp
+fujinomiya.shizuoka.jp
+fukuroi.shizuoka.jp
+gotemba.shizuoka.jp
+haibara.shizuoka.jp
+hamamatsu.shizuoka.jp
+higashiizu.shizuoka.jp
+ito.shizuoka.jp
+iwata.shizuoka.jp
+izu.shizuoka.jp
+izunokuni.shizuoka.jp
+kakegawa.shizuoka.jp
+kannami.shizuoka.jp
+kawanehon.shizuoka.jp
+kawazu.shizuoka.jp
+kikugawa.shizuoka.jp
+kosai.shizuoka.jp
+makinohara.shizuoka.jp
+matsuzaki.shizuoka.jp
+minamiizu.shizuoka.jp
+mishima.shizuoka.jp
+morimachi.shizuoka.jp
+nishiizu.shizuoka.jp
+numazu.shizuoka.jp
+omaezaki.shizuoka.jp
+shimada.shizuoka.jp
+shimizu.shizuoka.jp
+shimoda.shizuoka.jp
+shizuoka.shizuoka.jp
+susono.shizuoka.jp
+yaizu.shizuoka.jp
+yoshida.shizuoka.jp
+ashikaga.tochigi.jp
+bato.tochigi.jp
+haga.tochigi.jp
+ichikai.tochigi.jp
+iwafune.tochigi.jp
+kaminokawa.tochigi.jp
+kanuma.tochigi.jp
+karasuyama.tochigi.jp
+kuroiso.tochigi.jp
+mashiko.tochigi.jp
+mibu.tochigi.jp
+moka.tochigi.jp
+motegi.tochigi.jp
+nasu.tochigi.jp
+nasushiobara.tochigi.jp
+nikko.tochigi.jp
+nishikata.tochigi.jp
+nogi.tochigi.jp
+ohira.tochigi.jp
+ohtawara.tochigi.jp
+oyama.tochigi.jp
+sakura.tochigi.jp
+sano.tochigi.jp
+shimotsuke.tochigi.jp
+shioya.tochigi.jp
+takanezawa.tochigi.jp
+tochigi.tochigi.jp
+tsuga.tochigi.jp
+ujiie.tochigi.jp
+utsunomiya.tochigi.jp
+yaita.tochigi.jp
+aizumi.tokushima.jp
+anan.tokushima.jp
+ichiba.tokushima.jp
+itano.tokushima.jp
+kainan.tokushima.jp
+komatsushima.tokushima.jp
+matsushige.tokushima.jp
+mima.tokushima.jp
+minami.tokushima.jp
+miyoshi.tokushima.jp
+mugi.tokushima.jp
+nakagawa.tokushima.jp
+naruto.tokushima.jp
+sanagochi.tokushima.jp
+shishikui.tokushima.jp
+tokushima.tokushima.jp
+wajiki.tokushima.jp
+adachi.tokyo.jp
+akiruno.tokyo.jp
+akishima.tokyo.jp
+aogashima.tokyo.jp
+arakawa.tokyo.jp
+bunkyo.tokyo.jp
+chiyoda.tokyo.jp
+chofu.tokyo.jp
+chuo.tokyo.jp
+edogawa.tokyo.jp
+fuchu.tokyo.jp
+fussa.tokyo.jp
+hachijo.tokyo.jp
+hachioji.tokyo.jp
+hamura.tokyo.jp
+higashikurume.tokyo.jp
+higashimurayama.tokyo.jp
+higashiyamato.tokyo.jp
+hino.tokyo.jp
+hinode.tokyo.jp
+hinohara.tokyo.jp
+inagi.tokyo.jp
+itabashi.tokyo.jp
+katsushika.tokyo.jp
+kita.tokyo.jp
+kiyose.tokyo.jp
+kodaira.tokyo.jp
+koganei.tokyo.jp
+kokubunji.tokyo.jp
+komae.tokyo.jp
+koto.tokyo.jp
+kouzushima.tokyo.jp
+kunitachi.tokyo.jp
+machida.tokyo.jp
+meguro.tokyo.jp
+minato.tokyo.jp
+mitaka.tokyo.jp
+mizuho.tokyo.jp
+musashimurayama.tokyo.jp
+musashino.tokyo.jp
+nakano.tokyo.jp
+nerima.tokyo.jp
+ogasawara.tokyo.jp
+okutama.tokyo.jp
+ome.tokyo.jp
+oshima.tokyo.jp
+ota.tokyo.jp
+setagaya.tokyo.jp
+shibuya.tokyo.jp
+shinagawa.tokyo.jp
+shinjuku.tokyo.jp
+suginami.tokyo.jp
+sumida.tokyo.jp
+tachikawa.tokyo.jp
+taito.tokyo.jp
+tama.tokyo.jp
+toshima.tokyo.jp
+chizu.tottori.jp
+hino.tottori.jp
+kawahara.tottori.jp
+koge.tottori.jp
+kotoura.tottori.jp
+misasa.tottori.jp
+nanbu.tottori.jp
+nichinan.tottori.jp
+sakaiminato.tottori.jp
+tottori.tottori.jp
+wakasa.tottori.jp
+yazu.tottori.jp
+yonago.tottori.jp
+asahi.toyama.jp
+fuchu.toyama.jp
+fukumitsu.toyama.jp
+funahashi.toyama.jp
+himi.toyama.jp
+imizu.toyama.jp
+inami.toyama.jp
+johana.toyama.jp
+kamiichi.toyama.jp
+kurobe.toyama.jp
+nakaniikawa.toyama.jp
+namerikawa.toyama.jp
+nanto.toyama.jp
+nyuzen.toyama.jp
+oyabe.toyama.jp
+taira.toyama.jp
+takaoka.toyama.jp
+tateyama.toyama.jp
+toga.toyama.jp
+tonami.toyama.jp
+toyama.toyama.jp
+unazuki.toyama.jp
+uozu.toyama.jp
+yamada.toyama.jp
+arida.wakayama.jp
+aridagawa.wakayama.jp
+gobo.wakayama.jp
+hashimoto.wakayama.jp
+hidaka.wakayama.jp
+hirogawa.wakayama.jp
+inami.wakayama.jp
+iwade.wakayama.jp
+kainan.wakayama.jp
+kamitonda.wakayama.jp
+katsuragi.wakayama.jp
+kimino.wakayama.jp
+kinokawa.wakayama.jp
+kitayama.wakayama.jp
+koya.wakayama.jp
+koza.wakayama.jp
+kozagawa.wakayama.jp
+kudoyama.wakayama.jp
+kushimoto.wakayama.jp
+mihama.wakayama.jp
+misato.wakayama.jp
+nachikatsuura.wakayama.jp
+shingu.wakayama.jp
+shirahama.wakayama.jp
+taiji.wakayama.jp
+tanabe.wakayama.jp
+wakayama.wakayama.jp
+yuasa.wakayama.jp
+yura.wakayama.jp
+asahi.yamagata.jp
+funagata.yamagata.jp
+higashine.yamagata.jp
+iide.yamagata.jp
+kahoku.yamagata.jp
+kaminoyama.yamagata.jp
+kaneyama.yamagata.jp
+kawanishi.yamagata.jp
+mamurogawa.yamagata.jp
+mikawa.yamagata.jp
+murayama.yamagata.jp
+nagai.yamagata.jp
+nakayama.yamagata.jp
+nanyo.yamagata.jp
+nishikawa.yamagata.jp
+obanazawa.yamagata.jp
+oe.yamagata.jp
+oguni.yamagata.jp
+ohkura.yamagata.jp
+oishida.yamagata.jp
+sagae.yamagata.jp
+sakata.yamagata.jp
+sakegawa.yamagata.jp
+shinjo.yamagata.jp
+shirataka.yamagata.jp
+shonai.yamagata.jp
+takahata.yamagata.jp
+tendo.yamagata.jp
+tozawa.yamagata.jp
+tsuruoka.yamagata.jp
+yamagata.yamagata.jp
+yamanobe.yamagata.jp
+yonezawa.yamagata.jp
+yuza.yamagata.jp
+abu.yamaguchi.jp
+hagi.yamaguchi.jp
+hikari.yamaguchi.jp
+hofu.yamaguchi.jp
+iwakuni.yamaguchi.jp
+kudamatsu.yamaguchi.jp
+mitou.yamaguchi.jp
+nagato.yamaguchi.jp
+oshima.yamaguchi.jp
+shimonoseki.yamaguchi.jp
+shunan.yamaguchi.jp
+tabuse.yamaguchi.jp
+tokuyama.yamaguchi.jp
+toyota.yamaguchi.jp
+ube.yamaguchi.jp
+yuu.yamaguchi.jp
+chuo.yamanashi.jp
+doshi.yamanashi.jp
+fuefuki.yamanashi.jp
+fujikawa.yamanashi.jp
+fujikawaguchiko.yamanashi.jp
+fujiyoshida.yamanashi.jp
+hayakawa.yamanashi.jp
+hokuto.yamanashi.jp
+ichikawamisato.yamanashi.jp
+kai.yamanashi.jp
+kofu.yamanashi.jp
+koshu.yamanashi.jp
+kosuge.yamanashi.jp
+minami-alps.yamanashi.jp
+minobu.yamanashi.jp
+nakamichi.yamanashi.jp
+nanbu.yamanashi.jp
+narusawa.yamanashi.jp
+nirasaki.yamanashi.jp
+nishikatsura.yamanashi.jp
+oshino.yamanashi.jp
+otsuki.yamanashi.jp
+showa.yamanashi.jp
+tabayama.yamanashi.jp
+tsuru.yamanashi.jp
+uenohara.yamanashi.jp
+yamanakako.yamanashi.jp
+yamanashi.yamanashi.jp
+
+// ke : http://www.kenic.or.ke/index.php?option=com_content&task=view&id=117&Itemid=145
+*.ke
+
+// kg : http://www.domain.kg/dmn_n.html
+kg
+org.kg
+net.kg
+com.kg
+edu.kg
+gov.kg
+mil.kg
+
+// kh : http://www.mptc.gov.kh/dns_registration.htm
+*.kh
+
+// ki : http://www.ki/dns/index.html
+ki
+edu.ki
+biz.ki
+net.ki
+org.ki
+gov.ki
+info.ki
+com.ki
+
+// km : http://en.wikipedia.org/wiki/.km
+// http://www.domaine.km/documents/charte.doc
+km
+org.km
+nom.km
+gov.km
+prd.km
+tm.km
+edu.km
+mil.km
+ass.km
+com.km
+// These are only mentioned as proposed suggestions at domaine.km, but
+// http://en.wikipedia.org/wiki/.km says they're available for registration:
+coop.km
+asso.km
+presse.km
+medecin.km
+notaires.km
+pharmaciens.km
+veterinaire.km
+gouv.km
+
+// kn : http://en.wikipedia.org/wiki/.kn
+// http://www.dot.kn/domainRules.html
+kn
+net.kn
+org.kn
+edu.kn
+gov.kn
+
+// kp : http://www.kcce.kp/en_index.php
+kp
+com.kp
+edu.kp
+gov.kp
+org.kp
+rep.kp
+tra.kp
+
+// kr : http://en.wikipedia.org/wiki/.kr
+// see also: http://domain.nida.or.kr/eng/registration.jsp
+kr
+ac.kr
+co.kr
+es.kr
+go.kr
+hs.kr
+kg.kr
+mil.kr
+ms.kr
+ne.kr
+or.kr
+pe.kr
+re.kr
+sc.kr
+// kr geographical names
+busan.kr
+chungbuk.kr
+chungnam.kr
+daegu.kr
+daejeon.kr
+gangwon.kr
+gwangju.kr
+gyeongbuk.kr
+gyeonggi.kr
+gyeongnam.kr
+incheon.kr
+jeju.kr
+jeonbuk.kr
+jeonnam.kr
+seoul.kr
+ulsan.kr
+
+// kw : http://en.wikipedia.org/wiki/.kw
+*.kw
+
+// ky : http://www.icta.ky/da_ky_reg_dom.php
+// Confirmed by registry <ky...@perimeterusa.com> 2008-06-17
+ky
+edu.ky
+gov.ky
+com.ky
+org.ky
+net.ky
+
+// kz : http://en.wikipedia.org/wiki/.kz
+// see also: http://www.nic.kz/rules/index.jsp
+kz
+org.kz
+edu.kz
+net.kz
+gov.kz
+mil.kz
+com.kz
+
+// la : http://en.wikipedia.org/wiki/.la
+// Submitted by registry <ga...@nic.la> 2008-06-10
+la
+int.la
+net.la
+info.la
+edu.la
+gov.la
+per.la
+com.la
+org.la
+
+// lb : http://en.wikipedia.org/wiki/.lb
+// Submitted by registry <ra...@psg.com> 2008-06-17
+lb
+com.lb
+edu.lb
+gov.lb
+net.lb
+org.lb
+
+// lc : http://en.wikipedia.org/wiki/.lc
+// see also: http://www.nic.lc/rules.htm
+lc
+com.lc
+net.lc
+co.lc
+org.lc
+edu.lc
+gov.lc
+
+// li : http://en.wikipedia.org/wiki/.li
+li
+
+// lk : http://www.nic.lk/seclevpr.html
+lk
+gov.lk
+sch.lk
+net.lk
+int.lk
+com.lk
+org.lk
+edu.lk
+ngo.lk
+soc.lk
+web.lk
+ltd.lk
+assn.lk
+grp.lk
+hotel.lk
+
+// lr : http://psg.com/dns/lr/lr.txt
+// Submitted by registry <ra...@psg.com> 2008-06-17
+lr
+com.lr
+edu.lr
+gov.lr
+org.lr
+net.lr
+
+// ls : http://en.wikipedia.org/wiki/.ls
+ls
+co.ls
+org.ls
+
+// lt : http://en.wikipedia.org/wiki/.lt
+lt
+// gov.lt : http://www.gov.lt/index_en.php
+gov.lt
+
+// lu : http://www.dns.lu/en/
+lu
+
+// lv : http://www.nic.lv/DNS/En/generic.php
+lv
+com.lv
+edu.lv
+gov.lv
+org.lv
+mil.lv
+id.lv
+net.lv
+asn.lv
+conf.lv
+
+// ly : http://www.nic.ly/regulations.php
+ly
+com.ly
+net.ly
+gov.ly
+plc.ly
+edu.ly
+sch.ly
+med.ly
+org.ly
+id.ly
+
+// ma : http://en.wikipedia.org/wiki/.ma
+// http://www.anrt.ma/fr/admin/download/upload/file_fr782.pdf
+ma
+co.ma
+net.ma
+gov.ma
+org.ma
+ac.ma
+press.ma
+
+// mc : http://www.nic.mc/
+mc
+tm.mc
+asso.mc
+
+// md : http://en.wikipedia.org/wiki/.md
+md
+
+// me : http://en.wikipedia.org/wiki/.me
+me
+co.me
+net.me
+org.me
+edu.me
+ac.me
+gov.me
+its.me
+priv.me
+
+// mg : http://www.nic.mg/tarif.htm
+mg
+org.mg
+nom.mg
+gov.mg
+prd.mg
+tm.mg
+edu.mg
+mil.mg
+com.mg
+
+// mh : http://en.wikipedia.org/wiki/.mh
+mh
+
+// mil : http://en.wikipedia.org/wiki/.mil
+mil
+
+// mk : http://en.wikipedia.org/wiki/.mk
+// see also: http://dns.marnet.net.mk/postapka.php
+mk
+com.mk
+org.mk
+net.mk
+edu.mk
+gov.mk
+inf.mk
+name.mk
+
+// ml : http://www.gobin.info/domainname/ml-template.doc
+// see also: http://en.wikipedia.org/wiki/.ml
+ml
+com.ml
+edu.ml
+gouv.ml
+gov.ml
+net.ml
+org.ml
+presse.ml
+
+// mm : http://en.wikipedia.org/wiki/.mm
+*.mm
+
+// mn : http://en.wikipedia.org/wiki/.mn
+mn
+gov.mn
+edu.mn
+org.mn
+
+// mo : http://www.monic.net.mo/
+mo
+com.mo
+net.mo
+org.mo
+edu.mo
+gov.mo
+
+// mobi : http://en.wikipedia.org/wiki/.mobi
+mobi
+
+// mp : http://www.dot.mp/
+// Confirmed by registry <dc...@saipan.com> 2008-06-17
+mp
+
+// mq : http://en.wikipedia.org/wiki/.mq
+mq
+
+// mr : http://en.wikipedia.org/wiki/.mr
+mr
+gov.mr
+
+// ms : http://www.nic.ms/pdf/MS_Domain_Name_Rules.pdf
+ms
+com.ms
+edu.ms
+gov.ms
+net.ms
+org.ms
+
+// mt : https://www.nic.org.mt/go/policy
+// Submitted by registry <he...@nic.org.mt> 2013-11-19
+mt
+com.mt
+edu.mt
+net.mt
+org.mt
+
+// mu : http://en.wikipedia.org/wiki/.mu
+mu
+com.mu
+net.mu
+org.mu
+gov.mu
+ac.mu
+co.mu
+or.mu
+
+// museum : http://about.museum/naming/
+// http://index.museum/
+museum
+academy.museum
+agriculture.museum
+air.museum
+airguard.museum
+alabama.museum
+alaska.museum
+amber.museum
+ambulance.museum
+american.museum
+americana.museum
+americanantiques.museum
+americanart.museum
+amsterdam.museum
+and.museum
+annefrank.museum
+anthro.museum
+anthropology.museum
+antiques.museum
+aquarium.museum
+arboretum.museum
+archaeological.museum
+archaeology.museum
+architecture.museum
+art.museum
+artanddesign.museum
+artcenter.museum
+artdeco.museum
+arteducation.museum
+artgallery.museum
+arts.museum
+artsandcrafts.museum
+asmatart.museum
+assassination.museum
+assisi.museum
+association.museum
+astronomy.museum
+atlanta.museum
+austin.museum
+australia.museum
+automotive.museum
+aviation.museum
+axis.museum
+badajoz.museum
+baghdad.museum
+bahn.museum
+bale.museum
+baltimore.museum
+barcelona.museum
+baseball.museum
+basel.museum
+baths.museum
+bauern.museum
+beauxarts.museum
+beeldengeluid.museum
+bellevue.museum
+bergbau.museum
+berkeley.museum
+berlin.museum
+bern.museum
+bible.museum
+bilbao.museum
+bill.museum
+birdart.museum
+birthplace.museum
+bonn.museum
+boston.museum
+botanical.museum
+botanicalgarden.museum
+botanicgarden.museum
+botany.museum
+brandywinevalley.museum
+brasil.museum
+bristol.museum
+british.museum
+britishcolumbia.museum
+broadcast.museum
+brunel.museum
+brussel.museum
+brussels.museum
+bruxelles.museum
+building.museum
+burghof.museum
+bus.museum
+bushey.museum
+cadaques.museum
+california.museum
+cambridge.museum
+can.museum
+canada.museum
+capebreton.museum
+carrier.museum
+cartoonart.museum
+casadelamoneda.museum
+castle.museum
+castres.museum
+celtic.museum
+center.museum
+chattanooga.museum
+cheltenham.museum
+chesapeakebay.museum
+chicago.museum
+children.museum
+childrens.museum
+childrensgarden.museum
+chiropractic.museum
+chocolate.museum
+christiansburg.museum
+cincinnati.museum
+cinema.museum
+circus.museum
+civilisation.museum
+civilization.museum
+civilwar.museum
+clinton.museum
+clock.museum
+coal.museum
+coastaldefence.museum
+cody.museum
+coldwar.museum
+collection.museum
+colonialwilliamsburg.museum
+coloradoplateau.museum
+columbia.museum
+columbus.museum
+communication.museum
+communications.museum
+community.museum
+computer.museum
+computerhistory.museum
+comunicações.museum
+contemporary.museum
+contemporaryart.museum
+convent.museum
+copenhagen.museum
+corporation.museum
+correios-e-telecomunicações.museum
+corvette.museum
+costume.museum
+countryestate.museum
+county.museum
+crafts.museum
+cranbrook.museum
+creation.museum
+cultural.museum
+culturalcenter.museum
+culture.museum
+cyber.museum
+cymru.museum
+dali.museum
+dallas.museum
+database.museum
+ddr.museum
+decorativearts.museum
+delaware.museum
+delmenhorst.museum
+denmark.museum
+depot.museum
+design.museum
+detroit.museum
+dinosaur.museum
+discovery.museum
+dolls.museum
+donostia.museum
+durham.museum
+eastafrica.museum
+eastcoast.museum
+education.museum
+educational.museum
+egyptian.museum
+eisenbahn.museum
+elburg.museum
+elvendrell.museum
+embroidery.museum
+encyclopedic.museum
+england.museum
+entomology.museum
+environment.museum
+environmentalconservation.museum
+epilepsy.museum
+essex.museum
+estate.museum
+ethnology.museum
+exeter.museum
+exhibition.museum
+family.museum
+farm.museum
+farmequipment.museum
+farmers.museum
+farmstead.museum
+field.museum
+figueres.museum
+filatelia.museum
+film.museum
+fineart.museum
+finearts.museum
+finland.museum
+flanders.museum
+florida.museum
+force.museum
+fortmissoula.museum
+fortworth.museum
+foundation.museum
+francaise.museum
+frankfurt.museum
+franziskaner.museum
+freemasonry.museum
+freiburg.museum
+fribourg.museum
+frog.museum
+fundacio.museum
+furniture.museum
+gallery.museum
+garden.museum
+gateway.museum
+geelvinck.museum
+gemological.museum
+geology.museum
+georgia.museum
+giessen.museum
+glas.museum
+glass.museum
+gorge.museum
+grandrapids.museum
+graz.museum
+guernsey.museum
+halloffame.museum
+hamburg.museum
+handson.museum
+harvestcelebration.museum
+hawaii.museum
+health.museum
+heimatunduhren.museum
+hellas.museum
+helsinki.museum
+hembygdsforbund.museum
+heritage.museum
+histoire.museum
+historical.museum
+historicalsociety.museum
+historichouses.museum
+historisch.museum
+historisches.museum
+history.museum
+historyofscience.museum
+horology.museum
+house.museum
+humanities.museum
+illustration.museum
+imageandsound.museum
+indian.museum
+indiana.museum
+indianapolis.museum
+indianmarket.museum
+intelligence.museum
+interactive.museum
+iraq.museum
+iron.museum
+isleofman.museum
+jamison.museum
+jefferson.museum
+jerusalem.museum
+jewelry.museum
+jewish.museum
+jewishart.museum
+jfk.museum
+journalism.museum
+judaica.museum
+judygarland.museum
+juedisches.museum
+juif.museum
+karate.museum
+karikatur.museum
+kids.museum
+koebenhavn.museum
+koeln.museum
+kunst.museum
+kunstsammlung.museum
+kunstunddesign.museum
+labor.museum
+labour.museum
+lajolla.museum
+lancashire.museum
+landes.museum
+lans.museum
+läns.museum
+larsson.museum
+lewismiller.museum
+lincoln.museum
+linz.museum
+living.museum
+livinghistory.museum
+localhistory.museum
+london.museum
+losangeles.museum
+louvre.museum
+loyalist.museum
+lucerne.museum
+luxembourg.museum
+luzern.museum
+mad.museum
+madrid.museum
+mallorca.museum
+manchester.museum
+mansion.museum
+mansions.museum
+manx.museum
+marburg.museum
+maritime.museum
+maritimo.museum
+maryland.museum
+marylhurst.museum
+media.museum
+medical.museum
+medizinhistorisches.museum
+meeres.museum
+memorial.museum
+mesaverde.museum
+michigan.museum
+midatlantic.museum
+military.museum
+mill.museum
+miners.museum
+mining.museum
+minnesota.museum
+missile.museum
+missoula.museum
+modern.museum
+moma.museum
+money.museum
+monmouth.museum
+monticello.museum
+montreal.museum
+moscow.museum
+motorcycle.museum
+muenchen.museum
+muenster.museum
+mulhouse.museum
+muncie.museum
+museet.museum
+museumcenter.museum
+museumvereniging.museum
+music.museum
+national.museum
+nationalfirearms.museum
+nationalheritage.museum
+nativeamerican.museum
+naturalhistory.museum
+naturalhistorymuseum.museum
+naturalsciences.museum
+nature.museum
+naturhistorisches.museum
+natuurwetenschappen.museum
+naumburg.museum
+naval.museum
+nebraska.museum
+neues.museum
+newhampshire.museum
+newjersey.museum
+newmexico.museum
+newport.museum
+newspaper.museum
+newyork.museum
+niepce.museum
+norfolk.museum
+north.museum
+nrw.museum
+nuernberg.museum
+nuremberg.museum
+nyc.museum
+nyny.museum
+oceanographic.museum
+oceanographique.museum
+omaha.museum
+online.museum
+ontario.museum
+openair.museum
+oregon.museum
+oregontrail.museum
+otago.museum
+oxford.museum
+pacific.museum
+paderborn.museum
+palace.museum
+paleo.museum
+palmsprings.museum
+panama.museum
+paris.museum
+pasadena.museum
+pharmacy.museum
+philadelphia.museum
+philadelphiaarea.museum
+philately.museum
+phoenix.museum
+photography.museum
+pilots.museum
+pittsburgh.museum
+planetarium.museum
+plantation.museum
+plants.museum
+plaza.museum
+portal.museum
+portland.museum
+portlligat.museum
+posts-and-telecommunications.museum
+preservation.museum
+presidio.museum
+press.museum
+project.museum
+public.museum
+pubol.museum
+quebec.museum
+railroad.museum
+railway.museum
+research.museum
+resistance.museum
+riodejaneiro.museum
+rochester.museum
+rockart.museum
+roma.museum
+russia.museum
+saintlouis.museum
+salem.museum
+salvadordali.museum
+salzburg.museum
+sandiego.museum
+sanfrancisco.museum
+santabarbara.museum
+santacruz.museum
+santafe.museum
+saskatchewan.museum
+satx.museum
+savannahga.museum
+schlesisches.museum
+schoenbrunn.museum
+schokoladen.museum
+school.museum
+schweiz.museum
+science.museum
+scienceandhistory.museum
+scienceandindustry.museum
+sciencecenter.museum
+sciencecenters.museum
+science-fiction.museum
+sciencehistory.museum
+sciences.museum
+sciencesnaturelles.museum
+scotland.museum
+seaport.museum
+settlement.museum
+settlers.museum
+shell.museum
+sherbrooke.museum
+sibenik.museum
+silk.museum
+ski.museum
+skole.museum
+society.museum
+sologne.museum
+soundandvision.museum
+southcarolina.museum
+southwest.museum
+space.museum
+spy.museum
+square.museum
+stadt.museum
+stalbans.museum
+starnberg.museum
+state.museum
+stateofdelaware.museum
+station.museum
+steam.museum
+steiermark.museum
+stjohn.museum
+stockholm.museum
+stpetersburg.museum
+stuttgart.museum
+suisse.museum
+surgeonshall.museum
+surrey.museum
+svizzera.museum
+sweden.museum
+sydney.museum
+tank.museum
+tcm.museum
+technology.museum
+telekommunikation.museum
+television.museum
+texas.museum
+textile.museum
+theater.museum
+time.museum
+timekeeping.museum
+topology.museum
+torino.museum
+touch.museum
+town.museum
+transport.museum
+tree.museum
+trolley.museum
+trust.museum
+trustee.museum
+uhren.museum
+ulm.museum
+undersea.museum
+university.museum
+usa.museum
+usantiques.museum
+usarts.museum
+uscountryestate.museum
+usculture.museum
+usdecorativearts.museum
+usgarden.museum
+ushistory.museum
+ushuaia.museum
+uslivinghistory.museum
+utah.museum
+uvic.museum
+valley.museum
+vantaa.museum
+versailles.museum
+viking.museum
+village.museum
+virginia.museum
+virtual.museum
+virtuel.museum
+vlaanderen.museum
+volkenkunde.museum
+wales.museum
+wallonie.museum
+war.museum
+washingtondc.museum
+watchandclock.museum
+watch-and-clock.museum
+western.museum
+westfalen.museum
+whaling.museum
+wildlife.museum
+williamsburg.museum
+windmill.museum
+workshop.museum
+york.museum
+yorkshire.museum
+yosemite.museum
+youth.museum
+zoological.museum
+zoology.museum
+ירושלים.museum
+иком.museum
+
+// mv : http://en.wikipedia.org/wiki/.mv
+// "mv" included because, contra Wikipedia, google.mv exists.
+mv
+aero.mv
+biz.mv
+com.mv
+coop.mv
+edu.mv
+gov.mv
+info.mv
+int.mv
+mil.mv
+museum.mv
+name.mv
+net.mv
+org.mv
+pro.mv
+
+// mw : http://www.registrar.mw/
+mw
+ac.mw
+biz.mw
+co.mw
+com.mw
+coop.mw
+edu.mw
+gov.mw
+int.mw
+museum.mw
+net.mw
+org.mw
+
+// mx : http://www.nic.mx/
+// Submitted by registry <fa...@nic.mx> 2008-06-19
+mx
+com.mx
+org.mx
+gob.mx
+edu.mx
+net.mx
+
+// my : http://www.mynic.net.my/
+my
+com.my
+net.my
+org.my
+gov.my
+edu.my
+mil.my
+name.my
+
+// mz : http://www.gobin.info/domainname/mz-template.doc
+*.mz
+!teledata.mz
+
+// na : http://www.na-nic.com.na/
+// http://www.info.na/domain/
+na
+info.na
+pro.na
+name.na
+school.na
+or.na
+dr.na
+us.na
+mx.na
+ca.na
+in.na
+cc.na
+tv.na
+ws.na
+mobi.na
+co.na
+com.na
+org.na
+
+// name : has 2nd-level tlds, but there's no list of them
+name
+
+// nc : http://www.cctld.nc/
+nc
+asso.nc
+
+// ne : http://en.wikipedia.org/wiki/.ne
+ne
+
+// net : http://en.wikipedia.org/wiki/.net
+net
+
+// nf : http://en.wikipedia.org/wiki/.nf
+nf
+com.nf
+net.nf
+per.nf
+rec.nf
+web.nf
+arts.nf
+firm.nf
+info.nf
+other.nf
+store.nf
+
+// ng : http://psg.com/dns/ng/
+ng
+com.ng
+edu.ng
+name.ng
+net.ng
+org.ng
+sch.ng
+gov.ng
+mil.ng
+mobi.ng
+
+// ni : http://www.nic.ni/dominios.htm
+*.ni
+
+// nl : http://www.domain-registry.nl/ace.php/c,728,122,,,,Home.html
+// Confirmed by registry <An...@sidn.nl> (with technical
+// reservations) 2008-06-08
+nl
+
+// BV.nl will be a registry for dutch BV's (besloten vennootschap)
+bv.nl
+
+// no : http://www.norid.no/regelverk/index.en.html
+// The Norwegian registry has declined to notify us of updates. The web pages
+// referenced below are the official source of the data. There is also an
+// announce mailing list:
+// https://postlister.uninett.no/sympa/info/norid-diskusjon
+no
+// Norid generic domains : http://www.norid.no/regelverk/vedlegg-c.en.html
+fhs.no
+vgs.no
+fylkesbibl.no
+folkebibl.no
+museum.no
+idrett.no
+priv.no
+// Non-Norid generic domains : http://www.norid.no/regelverk/vedlegg-d.en.html
+mil.no
+stat.no
+dep.no
+kommune.no
+herad.no
+// no geographical names : http://www.norid.no/regelverk/vedlegg-b.en.html
+// counties
+aa.no
+ah.no
+bu.no
+fm.no
+hl.no
+hm.no
+jan-mayen.no
+mr.no
+nl.no
+nt.no
+of.no
+ol.no
+oslo.no
+rl.no
+sf.no
+st.no
+svalbard.no
+tm.no
+tr.no
+va.no
+vf.no
+// primary and lower secondary schools per county
+gs.aa.no
+gs.ah.no
+gs.bu.no
+gs.fm.no
+gs.hl.no
+gs.hm.no
+gs.jan-mayen.no
+gs.mr.no
+gs.nl.no
+gs.nt.no
+gs.of.no
+gs.ol.no
+gs.oslo.no
+gs.rl.no
+gs.sf.no
+gs.st.no
+gs.svalbard.no
+gs.tm.no
+gs.tr.no
+gs.va.no
+gs.vf.no
+// cities
+akrehamn.no
+åkrehamn.no
+algard.no
+ålgård.no
+arna.no
+brumunddal.no
+bryne.no
+bronnoysund.no
+brønnøysund.no
+drobak.no
+drøbak.no
+egersund.no
+fetsund.no
+floro.no
+florø.no
+fredrikstad.no
+hokksund.no
+honefoss.no
+hønefoss.no
+jessheim.no
+jorpeland.no
+jørpeland.no
+kirkenes.no
+kopervik.no
+krokstadelva.no
+langevag.no
+langevåg.no
+leirvik.no
+mjondalen.no
+mjøndalen.no
+mo-i-rana.no
+mosjoen.no
+mosjøen.no
+nesoddtangen.no
+orkanger.no
+osoyro.no
+osøyro.no
+raholt.no
+råholt.no
+sandnessjoen.no
+sandnessjøen.no
+skedsmokorset.no
+slattum.no
+spjelkavik.no
+stathelle.no
+stavern.no
+stjordalshalsen.no
+stjørdalshalsen.no
+tananger.no
+tranby.no
+vossevangen.no
+// communities
+afjord.no
+åfjord.no
+agdenes.no
+al.no
+ål.no
+alesund.no
+ålesund.no
+alstahaug.no
+alta.no
+áltá.no
+alaheadju.no
+álaheadju.no
+alvdal.no
+amli.no
+åmli.no
+amot.no
+åmot.no
+andebu.no
+andoy.no
+andøy.no
+andasuolo.no
+ardal.no
+årdal.no
+aremark.no
+arendal.no
+ås.no
+aseral.no
+åseral.no
+asker.no
+askim.no
+askvoll.no
+askoy.no
+askøy.no
+asnes.no
+åsnes.no
+audnedaln.no
+aukra.no
+aure.no
+aurland.no
+aurskog-holand.no
+aurskog-høland.no
+austevoll.no
+austrheim.no
+averoy.no
+averøy.no
+balestrand.no
+ballangen.no
+balat.no
+bálát.no
+balsfjord.no
+bahccavuotna.no
+báhccavuotna.no
+bamble.no
+bardu.no
+beardu.no
+beiarn.no
+bajddar.no
+bájddar.no
+baidar.no
+báidár.no
+berg.no
+bergen.no
+berlevag.no
+berlevåg.no
+bearalvahki.no
+bearalváhki.no
+bindal.no
+birkenes.no
+bjarkoy.no
+bjarkøy.no
+bjerkreim.no
+bjugn.no
+bodo.no
+bodø.no
+badaddja.no
+bådåddjå.no
+budejju.no
+bokn.no
+bremanger.no
+bronnoy.no
+brønnøy.no
+bygland.no
+bykle.no
+barum.no
+bærum.no
+bo.telemark.no
+bø.telemark.no
+bo.nordland.no
+bø.nordland.no
+bievat.no
+bievát.no
+bomlo.no
+bømlo.no
+batsfjord.no
+båtsfjord.no
+bahcavuotna.no
+báhcavuotna.no
+dovre.no
+drammen.no
+drangedal.no
+dyroy.no
+dyrøy.no
+donna.no
+dønna.no
+eid.no
+eidfjord.no
+eidsberg.no
+eidskog.no
+eidsvoll.no
+eigersund.no
+elverum.no
+enebakk.no
+engerdal.no
+etne.no
+etnedal.no
+evenes.no
+evenassi.no
+evenášši.no
+evje-og-hornnes.no
+farsund.no
+fauske.no
+fuossko.no
+fuoisku.no
+fedje.no
+fet.no
+finnoy.no
+finnøy.no
+fitjar.no
+fjaler.no
+fjell.no
+flakstad.no
+flatanger.no
+flekkefjord.no
+flesberg.no
+flora.no
+fla.no
+flå.no
+folldal.no
+forsand.no
+fosnes.no
+frei.no
+frogn.no
+froland.no
+frosta.no
+frana.no
+fræna.no
+froya.no
+frøya.no
+fusa.no
+fyresdal.no
+forde.no
+førde.no
+gamvik.no
+gangaviika.no
+gáŋgaviika.no
+gaular.no
+gausdal.no
+gildeskal.no
+gildeskål.no
+giske.no
+gjemnes.no
+gjerdrum.no
+gjerstad.no
+gjesdal.no
+gjovik.no
+gjøvik.no
+gloppen.no
+gol.no
+gran.no
+grane.no
+granvin.no
+gratangen.no
+grimstad.no
+grong.no
+kraanghke.no
+kråanghke.no
+grue.no
+gulen.no
+hadsel.no
+halden.no
+halsa.no
+hamar.no
+hamaroy.no
+habmer.no
+hábmer.no
+hapmir.no
+hápmir.no
+hammerfest.no
+hammarfeasta.no
+hámmárfeasta.no
+haram.no
+hareid.no
+harstad.no
+hasvik.no
+aknoluokta.no
+ákŋoluokta.no
+hattfjelldal.no
+aarborte.no
+haugesund.no
+hemne.no
+hemnes.no
+hemsedal.no
+heroy.more-og-romsdal.no
+herøy.møre-og-romsdal.no
+heroy.nordland.no
+herøy.nordland.no
+hitra.no
+hjartdal.no
+hjelmeland.no
+hobol.no
+hobøl.no
+hof.no
+hol.no
+hole.no
+holmestrand.no
+holtalen.no
+holtålen.no
+hornindal.no
+horten.no
+hurdal.no
+hurum.no
+hvaler.no
+hyllestad.no
+hagebostad.no
+hægebostad.no
+hoyanger.no
+høyanger.no
+hoylandet.no
+høylandet.no
+ha.no
+hå.no
+ibestad.no
+inderoy.no
+inderøy.no
+iveland.no
+jevnaker.no
+jondal.no
+jolster.no
+jølster.no
+karasjok.no
+karasjohka.no
+kárášjohka.no
+karlsoy.no
+galsa.no
+gálsá.no
+karmoy.no
+karmøy.no
+kautokeino.no
+guovdageaidnu.no
+klepp.no
+klabu.no
+klæbu.no
+kongsberg.no
+kongsvinger.no
+kragero.no
+kragerø.no
+kristiansand.no
+kristiansund.no
+krodsherad.no
+krødsherad.no
+kvalsund.no
+rahkkeravju.no
+ráhkkerávju.no
+kvam.no
+kvinesdal.no
+kvinnherad.no
+kviteseid.no
+kvitsoy.no
+kvitsøy.no
+kvafjord.no
+kvæfjord.no
+giehtavuoatna.no
+kvanangen.no
+kvænangen.no
+navuotna.no
+návuotna.no
+kafjord.no
+kåfjord.no
+gaivuotna.no
+gáivuotna.no
+larvik.no
+lavangen.no
+lavagis.no
+loabat.no
+loabát.no
+lebesby.no
+davvesiida.no
+leikanger.no
+leirfjord.no
+leka.no
+leksvik.no
+lenvik.no
+leangaviika.no
+leaŋgaviika.no
+lesja.no
+levanger.no
+lier.no
+lierne.no
+lillehammer.no
+lillesand.no
+lindesnes.no
+lindas.no
+lindås.no
+lom.no
+loppa.no
+lahppi.no
+láhppi.no
+lund.no
+lunner.no
+luroy.no
+lurøy.no
+luster.no
+lyngdal.no
+lyngen.no
+ivgu.no
+lardal.no
+lerdal.no
+lærdal.no
+lodingen.no
+lødingen.no
+lorenskog.no
+lørenskog.no
+loten.no
+løten.no
+malvik.no
+masoy.no
+måsøy.no
+muosat.no
+muosát.no
+mandal.no
+marker.no
+marnardal.no
+masfjorden.no
+meland.no
+meldal.no
+melhus.no
+meloy.no
+meløy.no
+meraker.no
+meråker.no
+moareke.no
+moåreke.no
+midsund.no
+midtre-gauldal.no
+modalen.no
+modum.no
+molde.no
+moskenes.no
+moss.no
+mosvik.no
+malselv.no
+målselv.no
+malatvuopmi.no
+málatvuopmi.no
+namdalseid.no
+aejrie.no
+namsos.no
+namsskogan.no
+naamesjevuemie.no
+nååmesjevuemie.no
+laakesvuemie.no
+nannestad.no
+narvik.no
+narviika.no
+naustdal.no
+nedre-eiker.no
+nes.akershus.no
+nes.buskerud.no
+nesna.no
+nesodden.no
+nesseby.no
+unjarga.no
+unjárga.no
+nesset.no
+nissedal.no
+nittedal.no
+nord-aurdal.no
+nord-fron.no
+nord-odal.no
+norddal.no
+nordkapp.no
+davvenjarga.no
+davvenjárga.no
+nordre-land.no
+nordreisa.no
+raisa.no
+ráisa.no
+nore-og-uvdal.no
+notodden.no
+naroy.no
+nærøy.no
+notteroy.no
+nøtterøy.no
+odda.no
+oksnes.no
+øksnes.no
+oppdal.no
+oppegard.no
+oppegård.no
+orkdal.no
+orland.no
+ørland.no
+orskog.no
+ørskog.no
+orsta.no
+ørsta.no
+os.hedmark.no
+os.hordaland.no
+osen.no
+osteroy.no
+osterøy.no
+ostre-toten.no
+østre-toten.no
+overhalla.no
+ovre-eiker.no
+øvre-eiker.no
+oyer.no
+øyer.no
+oygarden.no
+øygarden.no
+oystre-slidre.no
+øystre-slidre.no
+porsanger.no
+porsangu.no
+porsáŋgu.no
+porsgrunn.no
+radoy.no
+radøy.no
+rakkestad.no
+rana.no
+ruovat.no
+randaberg.no
+rauma.no
+rendalen.no
+rennebu.no
+rennesoy.no
+rennesøy.no
+rindal.no
+ringebu.no
+ringerike.no
+ringsaker.no
+rissa.no
+risor.no
+risør.no
+roan.no
+rollag.no
+rygge.no
+ralingen.no
+rælingen.no
+rodoy.no
+rødøy.no
+romskog.no
+rømskog.no
+roros.no
+røros.no
+rost.no
+røst.no
+royken.no
+røyken.no
+royrvik.no
+røyrvik.no
+rade.no
+råde.no
+salangen.no
+siellak.no
+saltdal.no
+salat.no
+sálát.no
+sálat.no
+samnanger.no
+sande.more-og-romsdal.no
+sande.møre-og-romsdal.no
+sande.vestfold.no
+sandefjord.no
+sandnes.no
+sandoy.no
+sandøy.no
+sarpsborg.no
+sauda.no
+sauherad.no
+sel.no
+selbu.no
+selje.no
+seljord.no
+sigdal.no
+siljan.no
+sirdal.no
+skaun.no
+skedsmo.no
+ski.no
+skien.no
+skiptvet.no
+skjervoy.no
+skjervøy.no
+skierva.no
+skiervá.no
+skjak.no
+skjåk.no
+skodje.no
+skanland.no
+skånland.no
+skanit.no
+skánit.no
+smola.no
+smøla.no
+snillfjord.no
+snasa.no
+snåsa.no
+snoasa.no
+snaase.no
+snåase.no
+sogndal.no
+sokndal.no
+sola.no
+solund.no
+songdalen.no
+sortland.no
+spydeberg.no
+stange.no
+stavanger.no
+steigen.no
+steinkjer.no
+stjordal.no
+stjørdal.no
+stokke.no
+stor-elvdal.no
+stord.no
+stordal.no
+storfjord.no
+omasvuotna.no
+strand.no
+stranda.no
+stryn.no
+sula.no
+suldal.no
+sund.no
+sunndal.no
+surnadal.no
+sveio.no
+svelvik.no
+sykkylven.no
+sogne.no
+søgne.no
+somna.no
+sømna.no
+sondre-land.no
+søndre-land.no
+sor-aurdal.no
+sør-aurdal.no
+sor-fron.no
+sør-fron.no
+sor-odal.no
+sør-odal.no
+sor-varanger.no
+sør-varanger.no
+matta-varjjat.no
+mátta-várjjat.no
+sorfold.no
+sørfold.no
+sorreisa.no
+sørreisa.no
+sorum.no
+sørum.no
+tana.no
+deatnu.no
+time.no
+tingvoll.no
+tinn.no
+tjeldsund.no
+dielddanuorri.no
+tjome.no
+tjøme.no
+tokke.no
+tolga.no
+torsken.no
+tranoy.no
+tranøy.no
+tromso.no
+tromsø.no
+tromsa.no
+romsa.no
+trondheim.no
+troandin.no
+trysil.no
+trana.no
+træna.no
+trogstad.no
+trøgstad.no
+tvedestrand.no
+tydal.no
+tynset.no
+tysfjord.no
+divtasvuodna.no
+divttasvuotna.no
+tysnes.no
+tysvar.no
+tysvær.no
+tonsberg.no
+tønsberg.no
+ullensaker.no
+ullensvang.no
+ulvik.no
+utsira.no
+vadso.no
+vadsø.no
+cahcesuolo.no
+čáhcesuolo.no
+vaksdal.no
+valle.no
+vang.no
+vanylven.no
+vardo.no
+vardø.no
+varggat.no
+várggát.no
+vefsn.no
+vaapste.no
+vega.no
+vegarshei.no
+vegårshei.no
+vennesla.no
+verdal.no
+verran.no
+vestby.no
+vestnes.no
+vestre-slidre.no
+vestre-toten.no
+vestvagoy.no
+vestvågøy.no
+vevelstad.no
+vik.no
+vikna.no
+vindafjord.no
+volda.no
+voss.no
+varoy.no
+værøy.no
+vagan.no
+vågan.no
+voagat.no
+vagsoy.no
+vågsøy.no
+vaga.no
+vågå.no
+valer.ostfold.no
+våler.østfold.no
+valer.hedmark.no
+våler.hedmark.no
+
+// np : http://www.mos.com.np/register.html
+*.np
+
+// nr : http://cenpac.net.nr/dns/index.html
+// Confirmed by registry <te...@cenpac.net.nr> 2008-06-17
+nr
+biz.nr
+info.nr
+gov.nr
+edu.nr
+org.nr
+net.nr
+com.nr
+
+// nu : http://en.wikipedia.org/wiki/.nu
+nu
+
+// nz : http://en.wikipedia.org/wiki/.nz
+// Confirmed by registry <ja...@nzrs.net.nz> 2014-05-19
+nz
+ac.nz
+co.nz
+cri.nz
+geek.nz
+gen.nz
+govt.nz
+health.nz
+iwi.nz
+kiwi.nz
+maori.nz
+mil.nz
+māori.nz
+net.nz
+org.nz
+parliament.nz
+school.nz
+
+// om : http://en.wikipedia.org/wiki/.om
+om
+co.om
+com.om
+edu.om
+gov.om
+med.om
+museum.om
+net.om
+org.om
+pro.om
+
+// org : http://en.wikipedia.org/wiki/.org
+org
+
+// pa : http://www.nic.pa/
+// Some additional second level "domains" resolve directly as hostnames, such as
+// pannet.pa, so we add a rule for "pa".
+pa
+ac.pa
+gob.pa
+com.pa
+org.pa
+sld.pa
+edu.pa
+net.pa
+ing.pa
+abo.pa
+med.pa
+nom.pa
+
+// pe : https://www.nic.pe/InformeFinalComision.pdf
+pe
+edu.pe
+gob.pe
+nom.pe
+mil.pe
+org.pe
+com.pe
+net.pe
+
+// pf : http://www.gobin.info/domainname/formulaire-pf.pdf
+pf
+com.pf
+org.pf
+edu.pf
+
+// pg : http://en.wikipedia.org/wiki/.pg
+*.pg
+
+// ph : http://www.domains.ph/FAQ2.asp
+// Submitted by registry <je...@email.com.ph> 2008-06-13
+ph
+com.ph
+net.ph
+org.ph
+gov.ph
+edu.ph
+ngo.ph
+mil.ph
+i.ph
+
+// pk : http://pk5.pknic.net.pk/pk5/msgNamepk.PK
+pk
+com.pk
+net.pk
+edu.pk
+org.pk
+fam.pk
+biz.pk
+web.pk
+gov.pk
+gob.pk
+gok.pk
+gon.pk
+gop.pk
+gos.pk
+info.pk
+
+// pl http://www.dns.pl/english/index.html
+// confirmed on 26.09.2014 from Bogna Tchórzewska <pa...@dns.pl>
+pl
+com.pl
+net.pl
+org.pl
+info.pl
+waw.pl
+gov.pl
+// pl functional domains (http://www.dns.pl/english/index.html)
+aid.pl
+agro.pl
+atm.pl
+auto.pl
+biz.pl
+edu.pl
+gmina.pl
+gsm.pl
+mail.pl
+miasta.pl
+media.pl
+mil.pl
+nieruchomosci.pl
+nom.pl
+pc.pl
+powiat.pl
+priv.pl
+realestate.pl
+rel.pl
+sex.pl
+shop.pl
+sklep.pl
+sos.pl
+szkola.pl
+targi.pl
+tm.pl
+tourism.pl
+travel.pl
+turystyka.pl
+// Government domains (administred by ippt.gov.pl)
+uw.gov.pl
+um.gov.pl
+ug.gov.pl
+upow.gov.pl
+starostwo.gov.pl
+so.gov.pl
+sr.gov.pl
+po.gov.pl
+pa.gov.pl
+// pl regional domains (http://www.dns.pl/english/index.html)
+augustow.pl
+babia-gora.pl
+bedzin.pl
+beskidy.pl
+bialowieza.pl
+bialystok.pl
+bielawa.pl
+bieszczady.pl
+boleslawiec.pl
+bydgoszcz.pl
+bytom.pl
+cieszyn.pl
+czeladz.pl
+czest.pl
+dlugoleka.pl
+elblag.pl
+elk.pl
+glogow.pl
+gniezno.pl
+gorlice.pl
+grajewo.pl
+ilawa.pl
+jaworzno.pl
+jelenia-gora.pl
+jgora.pl
+kalisz.pl
+kazimierz-dolny.pl
+karpacz.pl
+kartuzy.pl
+kaszuby.pl
+katowice.pl
+kepno.pl
+ketrzyn.pl
+klodzko.pl
+kobierzyce.pl
+kolobrzeg.pl
+konin.pl
+konskowola.pl
+kutno.pl
+lapy.pl
+lebork.pl
+legnica.pl
+lezajsk.pl
+limanowa.pl
+lomza.pl
+lowicz.pl
+lubin.pl
+lukow.pl
+malbork.pl
+malopolska.pl
+mazowsze.pl
+mazury.pl
+mielec.pl
+mielno.pl
+mragowo.pl
+naklo.pl
+nowaruda.pl
+nysa.pl
+olawa.pl
+olecko.pl
+olkusz.pl
+olsztyn.pl
+opoczno.pl
+opole.pl
+ostroda.pl
+ostroleka.pl
+ostrowiec.pl
+ostrowwlkp.pl
+pila.pl
+pisz.pl
+podhale.pl
+podlasie.pl
+polkowice.pl
+pomorze.pl
+pomorskie.pl
+prochowice.pl
+pruszkow.pl
+przeworsk.pl
+pulawy.pl
+radom.pl
+rawa-maz.pl
+rybnik.pl
+rzeszow.pl
+sanok.pl
+sejny.pl
+slask.pl
+slupsk.pl
+sosnowiec.pl
+stalowa-wola.pl
+skoczow.pl
+starachowice.pl
+stargard.pl
+suwalki.pl
+swidnica.pl
+swiebodzin.pl
+swinoujscie.pl
+szczecin.pl
+szczytno.pl
+tarnobrzeg.pl
+tgory.pl
+turek.pl
+tychy.pl
+ustka.pl
+walbrzych.pl
+warmia.pl
+warszawa.pl
+wegrow.pl
+wielun.pl
+wlocl.pl
+wloclawek.pl
+wodzislaw.pl
+wolomin.pl
+wroclaw.pl
+zachpomor.pl
+zagan.pl
+zarow.pl
+zgora.pl
+zgorzelec.pl
+
+// pm : http://www.afnic.fr/medias/documents/AFNIC-naming-policy2012.pdf
+pm
+
+// pn : http://www.government.pn/PnRegistry/policies.htm
+pn
+gov.pn
+co.pn
+org.pn
+edu.pn
+net.pn
+
+// post : http://en.wikipedia.org/wiki/.post
+post
+
+// pr : http://www.nic.pr/index.asp?f=1
+pr
+com.pr
+net.pr
+org.pr
+gov.pr
+edu.pr
+isla.pr
+pro.pr
+biz.pr
+info.pr
+name.pr
+// these aren't mentioned on nic.pr, but on http://en.wikipedia.org/wiki/.pr
+est.pr
+prof.pr
+ac.pr
+
+// pro : http://www.nic.pro/support_faq.htm
+pro
+aca.pro
+bar.pro
+cpa.pro
+jur.pro
+law.pro
+med.pro
+eng.pro
+
+// ps : http://en.wikipedia.org/wiki/.ps
+// http://www.nic.ps/registration/policy.html#reg
+ps
+edu.ps
+gov.ps
+sec.ps
+plo.ps
+com.ps
+org.ps
+net.ps
+
+// pt : http://online.dns.pt/dns/start_dns
+pt
+net.pt
+gov.pt
+org.pt
+edu.pt
+int.pt
+publ.pt
+com.pt
+nome.pt
+
+// pw : http://en.wikipedia.org/wiki/.pw
+pw
+co.pw
+ne.pw
+or.pw
+ed.pw
+go.pw
+belau.pw
+
+// py : http://www.nic.py/pautas.html#seccion_9
+// Confirmed by registry 2012-10-03
+py
+com.py
+coop.py
+edu.py
+gov.py
+mil.py
+net.py
+org.py
+
+// qa : http://domains.qa/en/
+qa
+com.qa
+edu.qa
+gov.qa
+mil.qa
+name.qa
+net.qa
+org.qa
+sch.qa
+
+// re : http://www.afnic.re/obtenir/chartes/nommage-re/annexe-descriptifs
+re
+com.re
+asso.re
+nom.re
+
+// ro : http://www.rotld.ro/
+ro
+com.ro
+org.ro
+tm.ro
+nt.ro
+nom.ro
+info.ro
+rec.ro
+arts.ro
+firm.ro
+store.ro
+www.ro
+
+// rs : http://en.wikipedia.org/wiki/.rs
+rs
+co.rs
+org.rs
+edu.rs
+ac.rs
+gov.rs
+in.rs
+
+// ru : http://www.cctld.ru/ru/docs/aktiv_8.php
+// Industry domains
+ru
+ac.ru
+com.ru
+edu.ru
+int.ru
+net.ru
+org.ru
+pp.ru
+// Geographical domains
+adygeya.ru
+altai.ru
+amur.ru
+arkhangelsk.ru
+astrakhan.ru
+bashkiria.ru
+belgorod.ru
+bir.ru
+bryansk.ru
+buryatia.ru
+cbg.ru
+chel.ru
+chelyabinsk.ru
+chita.ru
+chukotka.ru
+chuvashia.ru
+dagestan.ru
+dudinka.ru
+e-burg.ru
+grozny.ru
+irkutsk.ru
+ivanovo.ru
+izhevsk.ru
+jar.ru
+joshkar-ola.ru
+kalmykia.ru
+kaluga.ru
+kamchatka.ru
+karelia.ru
+kazan.ru
+kchr.ru
+kemerovo.ru
+khabarovsk.ru
+khakassia.ru
+khv.ru
+kirov.ru
+koenig.ru
+komi.ru
+kostroma.ru
+krasnoyarsk.ru
+kuban.ru
+kurgan.ru
+kursk.ru
+lipetsk.ru
+magadan.ru
+mari.ru
+mari-el.ru
+marine.ru
+mordovia.ru
+// mosreg.ru  Bug 1090800 - removed at request of Aleksey Konstantinov <ko...@mosreg.ru>
+msk.ru
+murmansk.ru
+nalchik.ru
+nnov.ru
+nov.ru
+novosibirsk.ru
+nsk.ru
+omsk.ru
+orenburg.ru
+oryol.ru
+palana.ru
+penza.ru
+perm.ru
+ptz.ru
+rnd.ru
+ryazan.ru
+sakhalin.ru
+samara.ru
+saratov.ru
+simbirsk.ru
+smolensk.ru
+spb.ru
+stavropol.ru
+stv.ru
+surgut.ru
+tambov.ru
+tatarstan.ru
+tom.ru
+tomsk.ru
+tsaritsyn.ru
+tsk.ru
+tula.ru
+tuva.ru
+tver.ru
+tyumen.ru
+udm.ru
+udmurtia.ru
+ulan-ude.ru
+vladikavkaz.ru
+vladimir.ru
+vladivostok.ru
+volgograd.ru
+vologda.ru
+voronezh.ru
+vrn.ru
+vyatka.ru
+yakutia.ru
+yamal.ru
+yaroslavl.ru
+yekaterinburg.ru
+yuzhno-sakhalinsk.ru
+// More geographical domains
+amursk.ru
+baikal.ru
+cmw.ru
+fareast.ru
+jamal.ru
+kms.ru
+k-uralsk.ru
+kustanai.ru
+kuzbass.ru
+magnitka.ru
+mytis.ru
+nakhodka.ru
+nkz.ru
+norilsk.ru
+oskol.ru
+pyatigorsk.ru
+rubtsovsk.ru
+snz.ru
+syzran.ru
+vdonsk.ru
+zgrad.ru
+// State domains
+gov.ru
+mil.ru
+// Technical domains
+test.ru
+
+// rw : http://www.nic.rw/cgi-bin/policy.pl
+rw
+gov.rw
+net.rw
+edu.rw
+ac.rw
+com.rw
+co.rw
+int.rw
+mil.rw
+gouv.rw
+
+// sa : http://www.nic.net.sa/
+sa
+com.sa
+net.sa
+org.sa
+gov.sa
+med.sa
+pub.sa
+edu.sa
+sch.sa
+
+// sb : http://www.sbnic.net.sb/
+// Submitted by registry <le...@telekom.com.sb> 2008-06-08
+sb
+com.sb
+edu.sb
+gov.sb
+net.sb
+org.sb
+
+// sc : http://www.nic.sc/
+sc
+com.sc
+gov.sc
+net.sc
+org.sc
+edu.sc
+
+// sd : http://www.isoc.sd/sudanic.isoc.sd/billing_pricing.htm
+// Submitted by registry <ad...@isoc.sd> 2008-06-17
+sd
+com.sd
+net.sd
+org.sd
+edu.sd
+med.sd
+t

<TRUNCATED>


[33/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ThreatIntelBulkLoader.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ThreatIntelBulkLoader.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ThreatIntelBulkLoader.java
new file mode 100644
index 0000000..1cc591b
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ThreatIntelBulkLoader.java
@@ -0,0 +1,259 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.bulk;
+
+import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.io.Files;
+import org.apache.commons.cli.*;
+import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.metron.dataloads.extractor.ExtractorHandler;
+import org.apache.metron.dataloads.hbase.mr.BulkLoadMapper;
+import org.apache.metron.common.configuration.EnrichmentConfig;
+import org.apache.metron.enrichment.converter.HbaseConverter;
+import org.apache.metron.enrichment.converter.EnrichmentConverter;
+import org.apache.metron.common.utils.JSONUtils;
+
+import javax.annotation.Nullable;
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.text.*;
+import java.util.Date;
+
+public class ThreatIntelBulkLoader  {
+  private static abstract class OptionHandler implements Function<String, Option> {}
+  private enum BulkLoadOptions {
+    HELP("h", new OptionHandler() {
+
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        return new Option(s, "help", false, "Generate Help screen");
+      }
+    })
+    ,TABLE("t", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "table", true, "HBase table to import data into");
+        o.setRequired(true);
+        o.setArgName("HBASE_TABLE");
+        return o;
+      }
+    })
+    ,COLUMN_FAMILY("f", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "column_family", true, "Column family of the HBase table to import into");
+        o.setRequired(true);
+        o.setArgName("CF_NAME");
+        return o;
+      }
+    })
+    ,EXTRACTOR_CONFIG("e", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "extractor_config", true, "JSON Document describing the extractor for this input data source");
+        o.setArgName("JSON_FILE");
+        o.setRequired(true);
+        return o;
+      }
+    })
+    ,INPUT_DATA("i", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "input", true, "Input directory in HDFS for the data to import into HBase");
+        o.setArgName("DIR");
+        o.setRequired(true);
+        return o;
+      }
+    })
+    ,AS_OF_TIME("a", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "as_of", true, "The last read timestamp to mark the records with (omit for time of execution)");
+        o.setArgName("datetime");
+        o.setRequired(false);
+        return o;
+      }
+    })
+    ,AS_OF_TIME_FORMAT("z", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "as_of_format", true, "The format of the as_of time (only used in conjunction with the as_of option)");
+        o.setArgName("format");
+        o.setRequired(false);
+        return o;
+      }
+    })
+    ,CONVERTER("c", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "converter", true, "The HBase converter class to use (Default is threat intel)");
+        o.setArgName("class");
+        o.setRequired(false);
+        return o;
+      }
+    })
+    ,ENRICHMENT_CONFIG("n", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "enrichment_config", true
+                , "JSON Document describing the enrichment configuration details." +
+                "  This is used to associate an enrichment type with a field type in zookeeper."
+        );
+        o.setArgName("JSON_FILE");
+        o.setRequired(false);
+        return o;
+      }
+    })
+    ;
+    Option option;
+    String shortCode;
+    BulkLoadOptions(String shortCode, OptionHandler optionHandler) {
+      this.shortCode = shortCode;
+      this.option = optionHandler.apply(shortCode);
+    }
+
+    public boolean has(CommandLine cli) {
+      return cli.hasOption(shortCode);
+    }
+
+    public String get(CommandLine cli) {
+      return cli.getOptionValue(shortCode);
+    }
+
+    public static CommandLine parse(CommandLineParser parser, String[] args) {
+      try {
+        CommandLine cli = parser.parse(getOptions(), args);
+        if(ThreatIntelBulkLoader.BulkLoadOptions.HELP.has(cli)) {
+          printHelp();
+          System.exit(0);
+        }
+        return cli;
+      } catch (ParseException e) {
+        System.err.println("Unable to parse args: " + Joiner.on(' ').join(args));
+        e.printStackTrace(System.err);
+        printHelp();
+        System.exit(-1);
+        return null;
+      }
+    }
+
+    public static void printHelp() {
+      HelpFormatter formatter = new HelpFormatter();
+      formatter.printHelp( "ThreatIntelBulkLoader", getOptions());
+    }
+
+    public static Options getOptions() {
+      Options ret = new Options();
+      for(BulkLoadOptions o : BulkLoadOptions.values()) {
+        ret.addOption(o.option);
+      }
+      return ret;
+    }
+  }
+  private static long getTimestamp(CommandLine cli) throws java.text.ParseException {
+    if(BulkLoadOptions.AS_OF_TIME.has(cli)) {
+      if(!BulkLoadOptions.AS_OF_TIME_FORMAT.has(cli)) {
+        throw new IllegalStateException("Unable to proceed: Specified as_of_time without an associated format.");
+      }
+      else {
+        DateFormat format = new SimpleDateFormat(BulkLoadOptions.AS_OF_TIME_FORMAT.get(cli));
+        Date d = format.parse(BulkLoadOptions.AS_OF_TIME.get(cli));
+        return d.getTime();
+      }
+    }
+    else {
+      return System.currentTimeMillis();
+    }
+  }
+  private static String readExtractorConfig(File configFile) throws IOException {
+    return Joiner.on("\n").join(Files.readLines(configFile, Charset.defaultCharset()));
+  }
+
+  public static Job createJob(Configuration conf, String input, String table, String cf, String extractorConfigContents, long ts, HbaseConverter converter) throws IOException {
+    Job job = new Job(conf);
+    job.setJobName("ThreatIntelBulkLoader: " + input + " => " +  table + ":" + cf);
+    System.out.println("Configuring " + job.getJobName());
+    job.setJarByClass(ThreatIntelBulkLoader.class);
+    job.setMapperClass(org.apache.metron.dataloads.hbase.mr.BulkLoadMapper.class);
+    job.setOutputFormatClass(TableOutputFormat.class);
+    job.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, table);
+    job.getConfiguration().set(BulkLoadMapper.COLUMN_FAMILY_KEY, cf);
+    job.getConfiguration().set(BulkLoadMapper.CONFIG_KEY, extractorConfigContents);
+    job.getConfiguration().set(BulkLoadMapper.LAST_SEEN_KEY, "" + ts);
+    job.getConfiguration().set(BulkLoadMapper.CONVERTER_KEY, converter.getClass().getName());
+    job.setOutputKeyClass(ImmutableBytesWritable.class);
+    job.setOutputValueClass(Put.class);
+    job.setNumReduceTasks(0);
+    ExtractorHandler handler = ExtractorHandler.load(extractorConfigContents);
+    handler.getInputFormatHandler().set(job, new Path(input), handler.getConfig());
+    return job;
+  }
+
+  public static void main(String... argv) throws Exception {
+    Configuration conf = HBaseConfiguration.create();
+    String[] otherArgs = new GenericOptionsParser(conf, argv).getRemainingArgs();
+
+    CommandLine cli = BulkLoadOptions.parse(new PosixParser(), otherArgs);
+    Long ts = getTimestamp(cli);
+    String input = BulkLoadOptions.INPUT_DATA.get(cli);
+    String table = BulkLoadOptions.TABLE.get(cli);
+    String cf = BulkLoadOptions.COLUMN_FAMILY.get(cli);
+    String extractorConfigContents = readExtractorConfig(new File(BulkLoadOptions.EXTRACTOR_CONFIG.get(cli)));
+    String converterClass = EnrichmentConverter.class.getName();
+    if(BulkLoadOptions.CONVERTER.has(cli)) {
+      converterClass = BulkLoadOptions.CONVERTER.get(cli);
+    }
+    EnrichmentConfig enrichmentConfig = null;
+    if(BulkLoadOptions.ENRICHMENT_CONFIG.has(cli)) {
+      enrichmentConfig = JSONUtils.INSTANCE.load( new File(BulkLoadOptions.ENRICHMENT_CONFIG.get(cli))
+              , EnrichmentConfig.class
+      );
+    }
+
+    HbaseConverter converter = (HbaseConverter) Class.forName(converterClass).newInstance();
+    Job job = createJob(conf, input, table, cf, extractorConfigContents, ts, converter);
+    System.out.println(conf);
+    boolean jobRet = job.waitForCompletion(true);
+    if(!jobRet) {
+      System.exit(1);
+    }
+    if(enrichmentConfig != null) {
+        enrichmentConfig.updateSensorConfigs();
+    }
+    System.exit(0);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/cif/HBaseTableLoad.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/cif/HBaseTableLoad.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/cif/HBaseTableLoad.java
new file mode 100644
index 0000000..0cff227
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/cif/HBaseTableLoad.java
@@ -0,0 +1,255 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.cif;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.zip.GZIPInputStream;
+import java.util.zip.ZipInputStream;
+
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
+import org.apache.commons.cli.BasicParser;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.Options;
+
+import java.io.BufferedInputStream;
+
+public class HBaseTableLoad {
+
+	private static final Logger LOG = Logger.getLogger(HBaseTableLoad.class);
+	private static Configuration conf = null;
+	private String hbaseTable = "cif_table";
+	private String dirName = "./";
+	private boolean usefileList = false;
+	private Set<String> files;
+
+	/**
+	 * Initialization
+	 */
+	static {
+		conf = HBaseConfiguration.create();
+	}
+
+	public static void main(String[] args) {
+
+		HBaseTableLoad ht = new HBaseTableLoad();
+
+		ht.parse(args);
+		//ht.LoadDirHBase();
+
+	}
+
+	private void LoadDirHBase() {
+		LOG.info("Working on:" + dirName);
+		File folder = new File(dirName);
+		File[] listOfFiles = folder.listFiles();
+		InputStream input;
+
+		for (int i = 0; i < listOfFiles.length; i++) {
+			File file = listOfFiles[i];
+
+			if (file.isFile()) {
+
+				// Check if filename is present in FileList
+				if (usefileList)
+					if (!files.contains(file.getAbsolutePath()))
+						continue;
+
+				// e.g. folder name is infrastructure_botnet. Col Qualifier is
+				// botnet and col_family is infrastructure
+
+				String col_family = folder.getName().split("_")[0];
+				String col_qualifier = folder.getName().split("_")[1];
+
+				// Open file
+				try {
+					if (file.getName().endsWith(".gz"))
+						input = new BufferedInputStream(new GZIPInputStream(
+								new FileInputStream(file)));
+					else if (file.getName().endsWith(".zip"))
+						input = new BufferedInputStream(new ZipInputStream(
+								new FileInputStream(file)));
+					else if (file.getName().endsWith(".json"))
+						input = new BufferedInputStream((new FileInputStream(
+								file)));
+					else
+						continue;
+
+					LOG.info("Begin Loading File:" + file.getAbsolutePath());
+
+					HBaseBulkPut(input, col_family, col_qualifier);
+					LOG.info("Completed Loading File:" + file.getAbsolutePath());
+
+				} catch (IOException e) {
+					// TODO Auto-generated catch block
+					e.printStackTrace();
+				} catch (ParseException e) {
+					// TODO Auto-generated catch block
+					e.printStackTrace();
+				}
+			} else if (file.isDirectory()) // if sub-directory then call the
+											// function recursively
+				this.LoadDirHBase(file.getAbsolutePath());
+		}
+	}
+
+	private void LoadDirHBase(String dirname) {
+
+		this.dirName = dirname;
+		this.LoadDirHBase();
+
+	}
+
+	/**
+	 * @param input
+	 * @param hbaseTable
+	 * @param col_family
+	 * @throws IOException
+	 * @throws ParseException
+	 * 
+	 * 
+	 *     Inserts all json records picked up from the inputStream
+	 */
+	private void HBaseBulkPut(InputStream input, String col_family,
+			String col_qualifier) throws IOException, ParseException {
+
+		HTable table = new HTable(conf, hbaseTable);
+		JSONParser parser = new JSONParser();
+
+		BufferedReader br = new BufferedReader(new InputStreamReader(input));
+		String jsonString;
+		List<Put> allputs = new ArrayList<Put>();
+		Map json;
+
+		while ((jsonString = br.readLine()) != null) {
+
+			try {
+
+				json = (Map) parser.parse(jsonString);
+			} catch (ParseException e) {
+				// System.out.println("Unable to Parse: " +jsonString);
+				continue;
+			}
+			// Iterator iter = json.entrySet().iterator();
+
+			// Get Address - either IP/domain or email and make that the Key
+			Put put = new Put(Bytes.toBytes((String) json.get("address")));
+
+			// We are just adding a "Y" flag to mark this address
+			put.add(Bytes.toBytes(col_family), Bytes.toBytes(col_qualifier),
+					Bytes.toBytes("Y"));
+
+			allputs.add(put);
+		}
+		table.put(allputs);
+		table.close();
+	}
+
+	private void printUsage() {
+		System.out
+				.println("Usage: java -cp JarFile org.apache.metron.dataloads.cif.HBaseTableLoad -d <directory> -t <tablename> -f <optional file-list>");
+	}
+
+	private void parse(String[] args) {
+		CommandLineParser parser = new BasicParser();
+		Options options = new Options();
+
+		options.addOption("d", true, "description");
+		options.addOption("t", true, "description");
+		options.addOption("f", false, "description");
+
+		CommandLine cmd = null;
+		try {
+			cmd = parser.parse(options, args);
+
+			if (cmd.hasOption("d"))
+			{
+				this.dirName = cmd.getOptionValue("d");
+				LOG.info("Directory Name:" + cmd.getOptionValue("d"));
+			}
+			else {
+				LOG.info("Missing Directory Name");
+				printUsage();
+				System.exit(-1);
+			}
+
+			if (cmd.hasOption("t"))
+			{
+				this.hbaseTable = cmd.getOptionValue("t");
+				LOG.info("HBase Table Name:" + cmd.getOptionValue("t"));
+			}
+			else {
+				LOG.info("Missing Table Name");
+				printUsage();
+				System.exit(-1);
+			}
+
+			if (cmd.hasOption("f")) {
+				this.usefileList = true;
+				files = LoadFileList(cmd.getOptionValue("f"));
+				LOG.info("FileList:" + cmd.getOptionValue("f"));
+			}
+
+		} catch (org.apache.commons.cli.ParseException e) {
+			LOG.error("Failed to parse comand line properties", e);
+			e.printStackTrace();
+			System.exit(-1);
+		}
+	}
+
+	private Set<String> LoadFileList(String filename) {
+
+		Set<String> output = null;
+		BufferedReader reader;
+
+		try {
+			reader = new BufferedReader(new InputStreamReader(
+					new FileInputStream(filename)));
+			output = new HashSet<String>();
+			String in = "";
+
+			while ((in = reader.readLine()) != null)
+				output.add(in);
+
+			reader.close();
+
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}
+
+		return output;
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/Extractor.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/Extractor.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/Extractor.java
new file mode 100644
index 0000000..bd490c8
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/Extractor.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.extractor;
+
+import org.apache.metron.enrichment.lookup.LookupKV;
+
+import java.io.IOException;
+import java.util.Map;
+
+public interface Extractor {
+    Iterable<LookupKV> extract(String line) throws IOException;
+    void initialize(Map<String, Object> config);
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/ExtractorCreator.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/ExtractorCreator.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/ExtractorCreator.java
new file mode 100644
index 0000000..6e081aa
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/ExtractorCreator.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.extractor;
+
+import java.util.Map;
+
+public interface ExtractorCreator {
+    Extractor create();
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/ExtractorHandler.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/ExtractorHandler.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/ExtractorHandler.java
new file mode 100644
index 0000000..5d17cbe
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/ExtractorHandler.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.extractor;
+
+import org.apache.metron.dataloads.extractor.inputformat.Formats;
+import org.apache.metron.dataloads.extractor.inputformat.InputFormatHandler;
+import org.codehaus.jackson.map.ObjectMapper;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.util.Map;
+
+public class ExtractorHandler {
+    final static ObjectMapper _mapper = new ObjectMapper();
+    private Map<String, Object> config;
+    private Extractor extractor;
+    private InputFormatHandler inputFormatHandler = Formats.BY_LINE;
+
+    public Map<String, Object> getConfig() {
+        return config;
+    }
+
+    public void setConfig(Map<String, Object> config) {
+        this.config = config;
+    }
+
+    public InputFormatHandler getInputFormatHandler() {
+        return inputFormatHandler;
+    }
+
+    public void setInputFormatHandler(String handler) {
+        try {
+            this.inputFormatHandler= Formats.create(handler);
+        } catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) {
+            throw new IllegalStateException("Unable to create an inputformathandler", e);
+        }
+    }
+
+    public Extractor getExtractor() {
+        return extractor;
+    }
+    public void setExtractor(String extractor) {
+        try {
+            this.extractor = Extractors.create(extractor);
+        } catch (ClassNotFoundException | IllegalAccessException | InstantiationException e) {
+            throw new IllegalStateException("Unable to create an extractor", e);
+        }
+    }
+
+    public static synchronized ExtractorHandler load(InputStream is) throws IOException {
+        ExtractorHandler ret = _mapper.readValue(is, ExtractorHandler.class);
+        ret.getExtractor().initialize(ret.getConfig());
+        return ret;
+    }
+    public static synchronized ExtractorHandler load(String s, Charset c) throws IOException {
+        return load( new ByteArrayInputStream(s.getBytes(c)));
+    }
+    public static synchronized ExtractorHandler load(String s) throws IOException {
+        return load( s, Charset.defaultCharset());
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/Extractors.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/Extractors.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/Extractors.java
new file mode 100644
index 0000000..bbd4c22
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/Extractors.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.extractor;
+
+import org.apache.metron.dataloads.extractor.csv.CSVExtractor;
+import org.apache.metron.dataloads.extractor.stix.StixExtractor;
+
+import java.util.Map;
+
+public enum Extractors implements ExtractorCreator {
+    CSV(new ExtractorCreator() {
+
+        @Override
+        public Extractor create() {
+            return new CSVExtractor();
+        }
+    })
+    ,STIX(new ExtractorCreator() {
+        @Override
+        public Extractor create() {
+            return new StixExtractor();
+        }
+    })
+    ;
+    ExtractorCreator _creator;
+    Extractors(ExtractorCreator creator) {
+        this._creator = creator;
+    }
+    @Override
+    public Extractor create() {
+        return _creator.create();
+    }
+    public static Extractor create(String extractorName) throws ClassNotFoundException, IllegalAccessException, InstantiationException {
+        try {
+            ExtractorCreator ec = Extractors.valueOf(extractorName);
+            return ec.create();
+        }
+        catch(IllegalArgumentException iae) {
+            Extractor ex = (Extractor) Class.forName(extractorName).newInstance();
+            return ex;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/csv/CSVExtractor.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/csv/CSVExtractor.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/csv/CSVExtractor.java
new file mode 100644
index 0000000..1fce0fc
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/csv/CSVExtractor.java
@@ -0,0 +1,139 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.extractor.csv;
+
+import com.google.common.base.Splitter;
+import com.google.common.collect.Iterables;
+import com.opencsv.CSVParser;
+import com.opencsv.CSVParserBuilder;
+import org.apache.metron.dataloads.extractor.Extractor;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.apache.metron.enrichment.lookup.LookupKey;
+
+import java.io.IOException;
+import java.util.*;
+
+public class CSVExtractor implements Extractor {
+  public static final String COLUMNS_KEY="columns";
+  public static final String INDICATOR_COLUMN_KEY="indicator_column";
+  public static final String TYPE_COLUMN_KEY="type_column";
+  public static final String TYPE_KEY="type";
+  public static final String SEPARATOR_KEY="separator";
+  public static final String LOOKUP_CONVERTER = "lookup_converter";
+
+  private int typeColumn;
+  private String type;
+  private int indicatorColumn;
+  private Map<String, Integer> columnMap = new HashMap<>();
+  private CSVParser parser;
+  private LookupConverter converter = LookupConverters.ENRICHMENT.getConverter();
+
+  @Override
+  public Iterable<LookupKV> extract(String line) throws IOException {
+    if(line.trim().startsWith("#")) {
+      //comment
+      return Collections.emptyList();
+    }
+    String[] tokens = parser.parseLine(line);
+
+    LookupKey key = converter.toKey(getType(tokens), tokens[indicatorColumn]);
+    Map<String, String> values = new HashMap<>();
+    for(Map.Entry<String, Integer> kv : columnMap.entrySet()) {
+      values.put(kv.getKey(), tokens[kv.getValue()]);
+    }
+    return Arrays.asList(new LookupKV(key, converter.toValue(values)));
+  }
+
+  private String getType(String[] tokens) {
+    if(type == null) {
+      return tokens[typeColumn];
+    }
+    else {
+      return type;
+    }
+  }
+
+  private static Map.Entry<String, Integer> getColumnMapEntry(String column, int i) {
+    if(column.contains(":")) {
+      Iterable<String> tokens = Splitter.on(':').split(column);
+      String col = Iterables.getFirst(tokens, null);
+      Integer pos = Integer.parseInt(Iterables.getLast(tokens));
+      return new AbstractMap.SimpleEntry<>(col, pos);
+    }
+    else {
+      return new AbstractMap.SimpleEntry<>(column, i);
+    }
+
+  }
+  private static Map<String, Integer> getColumnMap(Map<String, Object> config) {
+    Map<String, Integer> columnMap = new HashMap<>();
+    if(config.containsKey(COLUMNS_KEY)) {
+      Object columnsObj = config.get(COLUMNS_KEY);
+      if(columnsObj instanceof String) {
+        String columns = (String)columnsObj;
+        int i = 0;
+        for (String column : Splitter.on(',').split(columns)) {
+          Map.Entry<String, Integer> e = getColumnMapEntry(column, i++);
+          columnMap.put(e.getKey(), e.getValue());
+        }
+      }
+      else if(columnsObj instanceof List) {
+        List columns = (List)columnsObj;
+        int i = 0;
+        for(Object column : columns) {
+          Map.Entry<String, Integer> e = getColumnMapEntry(column.toString(), i++);
+          columnMap.put(e.getKey(), e.getValue());
+        }
+      }
+      else if(columnsObj instanceof Map) {
+        Map<Object, Object> map = (Map<Object, Object>)columnsObj;
+        for(Map.Entry<Object, Object> e : map.entrySet()) {
+          columnMap.put(e.getKey().toString(), Integer.parseInt(e.getValue().toString()));
+        }
+      }
+    }
+    return columnMap;
+  }
+
+  @Override
+  public void initialize(Map<String, Object> config) {
+    if(config.containsKey(COLUMNS_KEY)) {
+      columnMap = getColumnMap(config);
+    }
+    else {
+      throw new IllegalStateException("CSVExtractor requires " + COLUMNS_KEY + " configuration");
+    }
+    if(config.containsKey(INDICATOR_COLUMN_KEY)) {
+      indicatorColumn = columnMap.get(config.get(INDICATOR_COLUMN_KEY).toString());
+    }
+    if(config.containsKey(TYPE_KEY)) {
+      type = config.get(TYPE_KEY).toString();
+    }
+    else if(config.containsKey(TYPE_COLUMN_KEY)) {
+      typeColumn = columnMap.get(config.get(TYPE_COLUMN_KEY).toString());
+    }
+    if(config.containsKey(SEPARATOR_KEY)) {
+      char separator = config.get(SEPARATOR_KEY).toString().charAt(0);
+      parser = new CSVParserBuilder().withSeparator(separator)
+              .build();
+    }
+    if(config.containsKey(LOOKUP_CONVERTER)) {
+      converter = LookupConverters.getConverter((String) config.get(LOOKUP_CONVERTER));
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/csv/LookupConverter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/csv/LookupConverter.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/csv/LookupConverter.java
new file mode 100644
index 0000000..e0ca4ee
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/csv/LookupConverter.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.dataloads.extractor.csv;
+
+import org.apache.metron.enrichment.lookup.LookupKey;
+import org.apache.metron.enrichment.lookup.LookupValue;
+
+import java.util.Map;
+
+public interface LookupConverter {
+    LookupKey toKey(String type, String indicator);
+    LookupValue toValue(Map<String, String> metadata);
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/csv/LookupConverters.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/csv/LookupConverters.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/csv/LookupConverters.java
new file mode 100644
index 0000000..bd58ba7
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/csv/LookupConverters.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.dataloads.extractor.csv;
+
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.enrichment.lookup.LookupKey;
+import org.apache.metron.enrichment.lookup.LookupValue;
+
+import java.util.Map;
+
+public enum LookupConverters {
+
+    ENRICHMENT(new LookupConverter() {
+        @Override
+        public LookupKey toKey(String type, String indicator) {
+            return new EnrichmentKey(type, indicator);
+
+        }
+
+        @Override
+        public LookupValue toValue(Map<String, String> metadata) {
+            return new EnrichmentValue(metadata);
+        }
+    })
+    ;
+    LookupConverter converter;
+    LookupConverters(LookupConverter converter) {
+        this.converter = converter;
+    }
+    public LookupConverter getConverter() {
+        return converter;
+    }
+
+    public static LookupConverter getConverter(String name) {
+        try {
+            return LookupConverters.valueOf(name).getConverter();
+        }
+        catch(Throwable t) {
+            try {
+                return (LookupConverter) Class.forName(name).newInstance();
+            } catch (InstantiationException e) {
+                throw new IllegalStateException("Unable to parse " + name, e);
+            } catch (IllegalAccessException e) {
+                throw new IllegalStateException("Unable to parse " + name, e);
+            } catch (ClassNotFoundException e) {
+                throw new IllegalStateException("Unable to parse " + name, e);
+            }
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/Formats.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/Formats.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/Formats.java
new file mode 100644
index 0000000..7e58455
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/Formats.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.extractor.inputformat;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+
+import java.io.IOException;
+import java.util.Map;
+
+public enum Formats implements InputFormatHandler{
+    BY_LINE(new InputFormatHandler() {
+        @Override
+        public void set(Job job, Path input, Map<String, Object> config) throws IOException {
+
+            FileInputFormat.addInputPath(job, input);
+        }
+    })
+    ;
+    InputFormatHandler _handler = null;
+    Formats(InputFormatHandler handler) {
+        this._handler = handler;
+    }
+    @Override
+    public void set(Job job, Path path, Map<String, Object> config) throws IOException {
+        _handler.set(job, path, config);
+    }
+
+    public static InputFormatHandler create(String handlerName) throws ClassNotFoundException, IllegalAccessException, InstantiationException {
+        try {
+            InputFormatHandler ec = Formats.valueOf(handlerName);
+            return ec;
+        }
+        catch(IllegalArgumentException iae) {
+            InputFormatHandler ex = (InputFormatHandler) Class.forName(handlerName).newInstance();
+            return ex;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/InputFormatHandler.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/InputFormatHandler.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/InputFormatHandler.java
new file mode 100644
index 0000000..2287969
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/InputFormatHandler.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.extractor.inputformat;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.Job;
+
+import java.io.IOException;
+import java.util.Map;
+
+public interface InputFormatHandler {
+    void set(Job job, Path input, Map<String, Object> config) throws IOException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/WholeFileFormat.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/WholeFileFormat.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/WholeFileFormat.java
new file mode 100644
index 0000000..e0a58ef
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/WholeFileFormat.java
@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.extractor.inputformat;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.*;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.input.FileSplit;
+
+import java.io.IOException;
+import java.util.Map;
+
+public class WholeFileFormat implements InputFormatHandler {
+
+    public static class WholeFileRecordReader extends RecordReader<NullWritable, Text> {
+        private FileSplit fileSplit;
+        private Configuration conf;
+        private Text value = new Text();
+        private boolean processed = false;
+
+        @Override
+        public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
+            this.fileSplit = (FileSplit) split;
+            this.conf = context.getConfiguration();
+        }
+
+        @Override
+        public boolean nextKeyValue() throws IOException, InterruptedException {
+            if (!processed) {
+                byte[] contents = new byte[(int) fileSplit.getLength()];
+                Path file = fileSplit.getPath();
+                FileSystem fs = file.getFileSystem(conf);
+                FSDataInputStream in = null;
+                try {
+                    in = fs.open(file);
+                    IOUtils.readFully(in, contents, 0, contents.length);
+                    value.set(contents, 0, contents.length);
+                } finally {
+                    IOUtils.closeStream(in);
+                }
+                processed = true;
+                return true;
+            }
+            return false;
+        }
+
+        @Override
+        public NullWritable getCurrentKey() throws IOException, InterruptedException {
+            return NullWritable.get();
+        }
+        @Override
+        public Text getCurrentValue() throws IOException, InterruptedException{
+            return value;
+        }
+
+        @Override
+        public float getProgress() throws IOException {
+            return processed ? 1.0f : 0.0f;
+        }
+
+        @Override
+        public void close() throws IOException{
+            //do nothing :)
+        }
+    }
+
+    public static class WholeFileInputFormat extends FileInputFormat<NullWritable, Text> {
+
+        @Override
+        protected boolean isSplitable(JobContext context, Path file) {
+            return false;
+        }
+
+        @Override
+        public RecordReader<NullWritable, Text> createRecordReader(
+                InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
+            WholeFileRecordReader reader = new WholeFileRecordReader();
+            reader.initialize(split, context);
+            return reader;
+        }
+    }
+    @Override
+    public void set(Job job, Path input, Map<String, Object> config) throws IOException {
+        WholeFileInputFormat.setInputPaths(job, input);
+        job.setInputFormatClass(WholeFileInputFormat.class);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/StixExtractor.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/StixExtractor.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/StixExtractor.java
new file mode 100644
index 0000000..4696639
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/StixExtractor.java
@@ -0,0 +1,132 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.extractor.stix;
+
+import com.google.common.base.Splitter;
+import org.apache.commons.io.FileUtils;
+import org.apache.metron.dataloads.extractor.Extractor;
+import org.apache.metron.dataloads.extractor.stix.types.ObjectTypeHandler;
+import org.apache.metron.dataloads.extractor.stix.types.ObjectTypeHandlers;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.mitre.cybox.common_2.*;
+import org.mitre.cybox.cybox_2.ObjectType;
+import org.mitre.cybox.cybox_2.Observable;
+import org.mitre.cybox.cybox_2.Observables;
+import org.mitre.stix.common_1.IndicatorBaseType;
+import org.mitre.stix.indicator_2.Indicator;
+import org.mitre.stix.stix_1.STIXPackage;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+public class StixExtractor implements Extractor {
+    Map<String, Object> config;
+    @Override
+    public Iterable<LookupKV> extract(String line) throws IOException {
+        STIXPackage stixPackage = STIXPackage.fromXMLString(line.replaceAll("\"Equal\"", "\"Equals\""));
+        List<LookupKV> ret = new ArrayList<>();
+        for(Observable o : getObservables(stixPackage)) {
+            ObjectType obj = o.getObject();
+            if(obj != null) {
+                ObjectPropertiesType props = obj.getProperties();
+                if(props != null) {
+                    ObjectTypeHandler handler = ObjectTypeHandlers.getHandlerByInstance(props);
+                    if (handler != null) {
+                        Iterable<LookupKV> extractions = handler.extract(props, config);
+                        for(LookupKV extraction : extractions) {
+                            ret.add(extraction);
+                        }
+                    }
+                }
+            }
+        }
+        return ret;
+    }
+
+    public List<Observable> getObservables(STIXPackage stixPackage) {
+        List<Observable> ret = new ArrayList<>();
+        Observables observables = stixPackage.getObservables();
+        if(observables != null) {
+            for (Observable o : observables.getObservables()) {
+                ret.add(o);
+            }
+        }
+        if (stixPackage.getIndicators() != null) {
+            if (stixPackage.getIndicators().getIndicators() != null) {
+                List<IndicatorBaseType> indicators = stixPackage.getIndicators().getIndicators();
+                int indicatorCount = indicators.size();
+                for (int i = 0; i < indicatorCount; i++) {
+                    Indicator indicator = (Indicator) indicators.get(i);
+                    if (indicator.getObservable() != null) {
+                        ret.add(indicator.getObservable());
+                    }
+                }
+            }
+        }
+        return ret;
+    }
+
+    @Override
+    public void initialize(Map<String, Object> config) {
+        this.config = config;
+    }
+
+    public static Iterable<String> split(StringObjectPropertyType value) {
+        final ConditionTypeEnum condition = value.getCondition();
+        final ConditionApplicationEnum applyCondition = value.getApplyCondition();
+        List<String> tokens = new ArrayList<>();
+        if(condition == ConditionTypeEnum.EQUALS && applyCondition == ConditionApplicationEnum.ANY) {
+            String delim = value.getDelimiter();
+            String line = value.getValue().toString();
+            if (delim != null) {
+                for (String token : Splitter.on(delim).split(line)) {
+                    tokens.add(token);
+                }
+            } else {
+                tokens.add(line);
+            }
+        }
+        return tokens;
+    }
+    public static void main(String[] args) throws IOException {
+
+        File file = new File("/tmp/sample.xml");
+
+        /*if (args.length > 0) {
+            file = new File(args[0]);
+        } else {
+            try {
+                URL url = XML2Object.class.getClass().getResource(
+                        "/org/mitre/stix/examples/sample.xml");
+                file = new File(url.toURI());
+            } catch (URISyntaxException e) {
+                throw new RuntimeException(e);
+            }
+        }*/
+
+        String line = FileUtils.readFileToString(file);
+        StixExtractor extractor = new StixExtractor();
+        for(LookupKV results : extractor.extract(line)) {
+            System.out.println(results);
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/AbstractObjectTypeHandler.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/AbstractObjectTypeHandler.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/AbstractObjectTypeHandler.java
new file mode 100644
index 0000000..b637c6e
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/AbstractObjectTypeHandler.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.extractor.stix.types;
+
+import org.mitre.cybox.common_2.ObjectPropertiesType;
+import org.mitre.cybox.common_2.StringObjectPropertyType;
+
+public abstract class AbstractObjectTypeHandler<T extends ObjectPropertiesType> implements ObjectTypeHandler<T> {
+    protected Class<T> objectPropertiesType;
+    public AbstractObjectTypeHandler(Class<T> clazz) {
+        objectPropertiesType = clazz;
+    }
+    @Override
+    public Class<T> getTypeClass() {
+        return objectPropertiesType;
+    }
+    public String getType() {
+        return getTypeClass().getSimpleName().toLowerCase();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/AddressHandler.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/AddressHandler.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/AddressHandler.java
new file mode 100644
index 0000000..ffcff43
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/AddressHandler.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.extractor.stix.types;
+
+import com.google.common.base.Splitter;
+import org.apache.metron.dataloads.extractor.stix.StixExtractor;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.mitre.cybox.common_2.StringObjectPropertyType;
+import org.mitre.cybox.objects.Address;
+import org.mitre.cybox.objects.CategoryTypeEnum;
+
+import java.io.IOException;
+import java.util.*;
+
+public class AddressHandler extends AbstractObjectTypeHandler<Address> {
+  public static final String SPECIFIC_CATEGORY_CONFIG = "stix_address_categories";
+  public static final String TYPE_CONFIG = "stix_address_type";
+  public static final EnumSet<CategoryTypeEnum> SUPPORTED_CATEGORIES = EnumSet.of(CategoryTypeEnum.E_MAIL
+          ,CategoryTypeEnum.IPV_4_ADDR
+          ,CategoryTypeEnum.IPV_6_ADDR
+          ,CategoryTypeEnum.MAC
+  ) ;
+  public AddressHandler() {
+    super(Address.class);
+  }
+
+  @Override
+  public Iterable<LookupKV> extract(final Address type, Map<String, Object> config) throws IOException {
+    List<LookupKV> ret = new ArrayList<>();
+    final CategoryTypeEnum category= type.getCategory();
+    if(!SUPPORTED_CATEGORIES.contains(category)) {
+      return ret;
+    }
+    String typeStr = getType();
+    if(config != null) {
+      if(config.containsKey(SPECIFIC_CATEGORY_CONFIG)) {
+        List<CategoryTypeEnum> categories = new ArrayList<>();
+        for (String c : Splitter.on(",").split(config.get(SPECIFIC_CATEGORY_CONFIG).toString())) {
+          categories.add(CategoryTypeEnum.valueOf(c));
+        }
+        EnumSet<CategoryTypeEnum> specificCategories = EnumSet.copyOf(categories);
+        if (!specificCategories.contains(category)) {
+          return ret;
+        }
+      }
+      if(config.containsKey(TYPE_CONFIG)) {
+        typeStr = config.get(TYPE_CONFIG).toString();
+      }
+    }
+    StringObjectPropertyType value = type.getAddressValue();
+    for(String token : StixExtractor.split(value)) {
+      final String indicatorType = typeStr + ":" + category;
+      LookupKV results = new LookupKV(new EnrichmentKey(indicatorType, token)
+              , new EnrichmentValue(
+              new HashMap<String, String>() {{
+                put("source-type", "STIX");
+                put("indicator-type", indicatorType);
+                put("source", type.toXMLString());
+              }}
+      )
+      );
+      ret.add(results);
+    }
+    return ret;
+  }
+
+  @Override
+  public List<String> getPossibleTypes() {
+    String typeStr = getType();
+    List<String> ret = new ArrayList<>();
+    for(CategoryTypeEnum e : SUPPORTED_CATEGORIES)
+    {
+       ret.add(typeStr + ":" + e);
+    }
+    return ret;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/DomainHandler.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/DomainHandler.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/DomainHandler.java
new file mode 100644
index 0000000..755cddd
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/DomainHandler.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.extractor.stix.types;
+
+import org.apache.metron.dataloads.extractor.stix.StixExtractor;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.mitre.cybox.common_2.StringObjectPropertyType;
+import org.mitre.cybox.objects.DomainName;
+import org.mitre.cybox.objects.DomainNameTypeEnum;
+
+import java.io.IOException;
+import java.util.*;
+
+public class DomainHandler extends AbstractObjectTypeHandler<DomainName> {
+  public static final String TYPE_CONFIG = "stix_domain_type";
+  EnumSet<DomainNameTypeEnum> SUPPORTED_TYPES = EnumSet.of(DomainNameTypeEnum.FQDN);
+  public DomainHandler() {
+    super(DomainName.class);
+  }
+
+  @Override
+  public Iterable<LookupKV> extract(final DomainName type, Map<String, Object> config) throws IOException {
+    List<LookupKV> ret = new ArrayList<>();
+    String typeStr = getType();
+    if(config != null) {
+      Object o = config.get(TYPE_CONFIG);
+      if(o != null) {
+        typeStr = o.toString();
+      }
+    }
+    final DomainNameTypeEnum domainType = type.getType();
+    if(domainType == null || SUPPORTED_TYPES.contains(domainType)) {
+      StringObjectPropertyType value = type.getValue();
+      for (String token : StixExtractor.split(value)) {
+        final String indicatorType = typeStr + ":" + DomainNameTypeEnum.FQDN;
+        LookupKV results = new LookupKV(new EnrichmentKey(indicatorType, token)
+                , new EnrichmentValue(
+                new HashMap<String, String>() {{
+                  put("source-type", "STIX");
+                  put("indicator-type", indicatorType);
+                  put("source", type.toXMLString());
+                }}
+        )
+        );
+        ret.add(results);
+      }
+    }
+    return ret;
+  }
+  @Override
+  public List<String> getPossibleTypes() {
+    String typeStr = getType();
+    List<String> ret = new ArrayList<>();
+    for(DomainNameTypeEnum e : SUPPORTED_TYPES)
+    {
+       ret.add(typeStr + ":" + e);
+    }
+    return ret;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/HostnameHandler.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/HostnameHandler.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/HostnameHandler.java
new file mode 100644
index 0000000..c7b05eb
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/HostnameHandler.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.dataloads.extractor.stix.types;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.metron.dataloads.extractor.stix.StixExtractor;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.mitre.cybox.common_2.StringObjectPropertyType;
+import org.mitre.cybox.objects.Hostname;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class HostnameHandler  extends AbstractObjectTypeHandler<Hostname>{
+  public static final String TYPE_CONFIG = "stix_hostname_type";
+  public HostnameHandler() {
+    super(Hostname.class);
+  }
+
+  @Override
+  public Iterable<LookupKV> extract(final Hostname type, Map<String, Object> config) throws IOException {
+    StringObjectPropertyType value = type.getHostnameValue();
+    String typeStr = getType();
+    if(config != null) {
+      Object o = config.get(TYPE_CONFIG);
+      if(o != null) {
+        typeStr = o.toString();
+      }
+    }
+    List<LookupKV> ret = new ArrayList<>();
+    for(String token : StixExtractor.split(value)) {
+      final String indicatorType = typeStr;
+      LookupKV results = new LookupKV(new EnrichmentKey(indicatorType, token)
+              , new EnrichmentValue(new HashMap<String, String>() {{
+        put("source-type", "STIX");
+        put("indicator-type", indicatorType);
+        put("source", type.toXMLString());
+      }}
+      )
+      );
+      ret.add(results);
+    }
+    return ret;
+  }
+  @Override
+  public List<String> getPossibleTypes() {
+    return ImmutableList.of(getType());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/ObjectTypeHandler.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/ObjectTypeHandler.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/ObjectTypeHandler.java
new file mode 100644
index 0000000..c7692be
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/ObjectTypeHandler.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.extractor.stix.types;
+
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.mitre.cybox.common_2.ObjectPropertiesType;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+public interface ObjectTypeHandler<T extends ObjectPropertiesType> {
+  Iterable<LookupKV> extract(T type, Map<String, Object> config) throws IOException;
+  Class<T> getTypeClass();
+  List<String> getPossibleTypes();
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/ObjectTypeHandlers.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/ObjectTypeHandlers.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/ObjectTypeHandlers.java
new file mode 100644
index 0000000..06d8cd8
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/stix/types/ObjectTypeHandlers.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.extractor.stix.types;
+
+import org.mitre.cybox.common_2.ObjectPropertiesType;
+
+public enum ObjectTypeHandlers {
+      ADDRESS(new AddressHandler())
+    ,HOSTNAME(new HostnameHandler())
+    ,DOMAINNAME(new DomainHandler())
+    ,;
+   ObjectTypeHandler _handler;
+   ObjectTypeHandlers(ObjectTypeHandler handler) {
+      _handler = handler;
+   }
+   ObjectTypeHandler getHandler() {
+      return _handler;
+   }
+   public static ObjectTypeHandler getHandlerByInstance(ObjectPropertiesType inst) {
+      for(ObjectTypeHandlers h : values()) {
+         if(inst.getClass().equals(h.getHandler().getTypeClass())) {
+            return h.getHandler();
+         }
+      }
+      return null;
+   }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/hbase/mr/BulkLoadMapper.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/hbase/mr/BulkLoadMapper.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/hbase/mr/BulkLoadMapper.java
new file mode 100644
index 0000000..558ac16
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/hbase/mr/BulkLoadMapper.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.hbase.mr;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.metron.dataloads.extractor.Extractor;
+import org.apache.metron.dataloads.extractor.ExtractorHandler;
+import org.apache.metron.enrichment.converter.HbaseConverter;
+import org.apache.metron.enrichment.lookup.LookupKV;
+
+import java.io.IOException;
+
+public class BulkLoadMapper extends Mapper<Object, Text, ImmutableBytesWritable, Put>
+{
+    public static final String CONFIG_KEY="bl_extractor_config";
+    public static final String COLUMN_FAMILY_KEY = "bl_column_family";
+    public static final String LAST_SEEN_KEY = "bl_last_seen";
+    public static final String CONVERTER_KEY = "bl_converter";
+    Extractor extractor = null;
+    String columnFamily = null;
+    HbaseConverter converter;
+    @Override
+    public void setup(Context context) throws IOException,
+            InterruptedException {
+        initialize(context.getConfiguration());
+    }
+
+    @Override
+    public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
+        for(LookupKV results : extractor.extract(value.toString())) {
+            if (results != null) {
+                Put put = converter.toPut(columnFamily, results.getKey(), results.getValue());
+                write(new ImmutableBytesWritable(results.getKey().toBytes()), put, context);
+            }
+        }
+    }
+
+    protected void initialize(Configuration configuration) throws IOException{
+        String configStr = configuration.get(CONFIG_KEY);
+        extractor = ExtractorHandler.load(configStr).getExtractor();
+        columnFamily = configuration.get(COLUMN_FAMILY_KEY);
+        try {
+            converter = (HbaseConverter) Class.forName(configuration.get(CONVERTER_KEY)).newInstance();
+        } catch (InstantiationException e) {
+            throw new IllegalStateException("Unable to create converter object: " + configuration.get(CONVERTER_KEY), e);
+        } catch (IllegalAccessException e) {
+            throw new IllegalStateException("Unable to create converter object: " + configuration.get(CONVERTER_KEY), e);
+        } catch (ClassNotFoundException e) {
+            throw new IllegalStateException("Unable to create converter object: " + configuration.get(CONVERTER_KEY), e);
+        }
+    }
+
+    protected void write(ImmutableBytesWritable key, Put value, Context context) throws IOException, InterruptedException {
+        context.write(key, value);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/hbase/mr/PrunerMapper.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/hbase/mr/PrunerMapper.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/hbase/mr/PrunerMapper.java
new file mode 100644
index 0000000..d0f1e46
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/hbase/mr/PrunerMapper.java
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.hbase.mr;
+
+import org.apache.hadoop.hbase.client.*;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.mapreduce.TableMapper;
+import org.apache.metron.enrichment.lookup.LookupKey;
+import org.apache.metron.enrichment.lookup.accesstracker.AccessTracker;
+import org.apache.metron.enrichment.lookup.accesstracker.AccessTrackerUtil;
+
+import java.io.IOException;
+
+public class PrunerMapper extends TableMapper<ImmutableBytesWritable, Delete> {
+    public static final String ACCESS_TRACKER_TABLE_CONF = "access_tracker_table";
+    public static final String ACCESS_TRACKER_CF_CONF = "access_tracker_cf";
+    public static final String TIMESTAMP_CONF = "access_tracker_timestamp";
+    public static final String ACCESS_TRACKER_NAME_CONF = "access_tracker_name";
+    AccessTracker tracker;
+    @Override
+    public void setup(Context context) throws IOException
+    {
+        String atTable = context.getConfiguration().get(ACCESS_TRACKER_TABLE_CONF);
+        String atCF = context.getConfiguration().get(ACCESS_TRACKER_CF_CONF);
+        String atName = context.getConfiguration().get(ACCESS_TRACKER_NAME_CONF);
+        HTable table = new HTable(context.getConfiguration(), atTable);
+        long timestamp = context.getConfiguration().getLong(TIMESTAMP_CONF, -1);
+        if(timestamp < 0) {
+            throw new IllegalStateException("Must specify a timestamp that is positive.");
+        }
+        try {
+            tracker = AccessTrackerUtil.INSTANCE.loadAll(AccessTrackerUtil.INSTANCE.loadAll(table, atCF, atName, timestamp));
+        } catch (Throwable e) {
+            throw new IllegalStateException("Unable to load the accesstrackers from the directory", e);
+        }
+    }
+
+    @Override
+    public void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {
+        if(tracker == null || key == null) {
+            throw new RuntimeException("Tracker = " + tracker + " key = " + key);
+        }
+        if(!tracker.hasSeen(toLookupKey(key.get()))) {
+            Delete d = new Delete(key.get());
+            context.write(key, d);
+        }
+    }
+
+    protected LookupKey toLookupKey(final byte[] bytes) {
+        return new LookupKey() {
+            @Override
+            public byte[] toBytes() {
+                return bytes;
+            }
+
+            @Override
+            public void fromBytes(byte[] in) {
+
+            }
+        };
+    }
+
+}


[43/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_slave/files/hostname.sh
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_slave/files/hostname.sh b/metron-deployment/roles/ambari_slave/files/hostname.sh
new file mode 100644
index 0000000..cc8c1cd
--- /dev/null
+++ b/metron-deployment/roles/ambari_slave/files/hostname.sh
@@ -0,0 +1,19 @@
+#!/bin/sh
+
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+echo {{ inventory_hostname }}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_slave/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_slave/tasks/main.yml b/metron-deployment/roles/ambari_slave/tasks/main.yml
new file mode 100644
index 0000000..fc068da
--- /dev/null
+++ b/metron-deployment/roles/ambari_slave/tasks/main.yml
@@ -0,0 +1,51 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+# tasks file for ambari_slave
+- name: Install ambari-agent
+  yum:
+    name: ambari-agent
+    state: installed
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10
+
+- name: Create ambari-agent hostname script
+  template:
+    src: "../roles/ambari_slave/files/hostname.sh"
+    dest: "/var/lib/ambari-agent/hostname.sh"
+    mode: 0744
+    owner: "{{ ambari_user }}"
+    group: "{{ ambari_user }}"
+
+- name: Configure ambari-server hostname in ambari-agent configuration
+  lineinfile:
+    dest: /etc/ambari-agent/conf/ambari-agent.ini
+    regexp: "{{ item.regexp }}"
+    line: "{{ item.line }}"
+    insertafter: "{{ item.insertafter }}"
+    backup: yes
+  with_items:
+    - { regexp: "^.*hostname=.*$", line: "hostname={{ groups.ambari_master[0] }}", insertafter: '\[server\]' }
+    - { regexp: "^hostname_script=.*$", line: "hostname_script=/var/lib/ambari-agent/hostname.sh", insertafter: '\[agent\]'}
+
+- name: Ensure ambari-agent is running
+  service: name=ambari-agent state=restarted
+
+- name : Wait for agent to register
+  command : sleep 10

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/ambari_slave/vars/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/ambari_slave/vars/main.yml b/metron-deployment/roles/ambari_slave/vars/main.yml
new file mode 100644
index 0000000..600b3fc
--- /dev/null
+++ b/metron-deployment/roles/ambari_slave/vars/main.yml
@@ -0,0 +1,24 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+#
+# TODO: duplicates from ambari-common.  need all of these moved to group_vars
+#
+rhel_ambari_install_url: "http://public-repo-1.hortonworks.com/ambari/centos6/2.x/updates/2.1.2.1/ambari.repo"
+ambari_user: "root"
+local_tmp_keygen_file: "/tmp/id_rsa.tmp"
+dest_tmp_keygen_file: "/tmp/id_rsa.tmp"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/bro/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/bro/meta/main.yml b/metron-deployment/roles/bro/meta/main.yml
new file mode 100644
index 0000000..9c9286f
--- /dev/null
+++ b/metron-deployment/roles/bro/meta/main.yml
@@ -0,0 +1,23 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - libselinux-python
+  - ambari_gather_facts
+  - build-tools
+  - kafka-client
+  - librdkafka

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/bro/tasks/bro-plugin-kafka.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/bro/tasks/bro-plugin-kafka.yml b/metron-deployment/roles/bro/tasks/bro-plugin-kafka.yml
new file mode 100644
index 0000000..d8e887d
--- /dev/null
+++ b/metron-deployment/roles/bro/tasks/bro-plugin-kafka.yml
@@ -0,0 +1,41 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Distribute bro-kafka plugin
+  copy: src=../../../metron-sensors/bro-plugin-kafka dest=/tmp/ mode=0755
+
+- name: Compile and install the plugin
+  shell: "{{ item }}"
+  args:
+    chdir: "/tmp/bro-plugin-kafka"
+    creates: "{{ bro_home }}/lib/bro/plugins/BRO_KAFKA"
+  with_items:
+    - rm -rf build/
+    - "./configure --bro-dist=/tmp/bro-{{ bro_version }} --install-root={{ bro_home }}/lib/bro/plugins/ --with-librdkafka={{ librdkafka_home }}"
+    - make
+    - make install
+
+- name: Configure bro-kafka plugin
+  lineinfile:
+    dest: "{{ bro_home }}/share/bro/site/local.bro"
+    line: "{{ item }}"
+  with_items:
+    - "@load Bro/Kafka/logs-to-kafka.bro"
+    - "redef Kafka::logs_to_send = set(HTTP::LOG, DNS::LOG);"
+    - "redef Kafka::topic_name = \"{{ bro_topic }}\";"
+    - "redef Kafka::tag_json = T;"
+    - "redef Kafka::kafka_conf = table([\"metadata.broker.list\"] = \"{{ kafka_broker_url }}\");"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/bro/tasks/bro.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/bro/tasks/bro.yml b/metron-deployment/roles/bro/tasks/bro.yml
new file mode 100644
index 0000000..fb27ef9
--- /dev/null
+++ b/metron-deployment/roles/bro/tasks/bro.yml
@@ -0,0 +1,44 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Download bro
+  get_url:
+    url: "https://www.bro.org/downloads/release/bro-{{ bro_version }}.tar.gz"
+    dest: "/tmp/bro-{{ bro_version }}.tar.gz"
+
+- name: Extract bro tarball
+  unarchive:
+    src: "/tmp/bro-{{ bro_version }}.tar.gz"
+    dest: /tmp
+    copy: no
+    creates: "/tmp/bro-{{ bro_version }}"
+
+- name: Compile and Install bro
+  shell: "{{ item }}"
+  args:
+    chdir: "/tmp/bro-{{ bro_version }}"
+    creates: "{{ bro_home }}/bin/bro"
+  with_items:
+    - "./configure --prefix={{ bro_home }}"
+    - make
+    - make install
+
+- name: Configure bro
+  lineinfile:
+    dest: "{{ bro_home }}/etc/node.cfg"
+    regexp: '^interface=.*$'
+    line: 'interface={{ sniff_interface }}'

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/bro/tasks/dependencies.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/bro/tasks/dependencies.yml b/metron-deployment/roles/bro/tasks/dependencies.yml
new file mode 100644
index 0000000..431e861
--- /dev/null
+++ b/metron-deployment/roles/bro/tasks/dependencies.yml
@@ -0,0 +1,37 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Install prerequisites
+  yum: name={{ item }}
+  with_items:
+    - cmake
+    - make
+    - gcc
+    - gcc-c++
+    - flex
+    - bison
+    - libpcap
+    - libpcap-devel
+    - openssl-devel
+    - python-devel
+    - swig
+    - zlib-devel
+    - perl
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/bro/tasks/librdkafka.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/bro/tasks/librdkafka.yml b/metron-deployment/roles/bro/tasks/librdkafka.yml
new file mode 100644
index 0000000..652d319
--- /dev/null
+++ b/metron-deployment/roles/bro/tasks/librdkafka.yml
@@ -0,0 +1,39 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Download librdkafka
+  get_url:
+    url: "{{ librdkafka_url }}"
+    dest: "/tmp/librdkafka-{{ librdkafka_version }}.tar.gz"
+
+- name: Extract librdkafka tarball
+  unarchive:
+    src: "/tmp/librdkafka-{{ librdkafka_version }}.tar.gz"
+    dest: /tmp
+    copy: no
+    creates: "/tmp/librdkafka-{{ librdkafka_version }}"
+
+- name: Compile and install librdkafka
+  shell: "{{ item }}"
+  args:
+    chdir: "/tmp/librdkafka-{{ librdkafka_version }}"
+    creates: "{{ librdkafka_home }}/lib/librdkafka.so"
+  with_items:
+    - rm -rf build/
+    - "./configure --prefix={{ librdkafka_home }}"
+    - make
+    - make install

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/bro/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/bro/tasks/main.yml b/metron-deployment/roles/bro/tasks/main.yml
new file mode 100644
index 0000000..14426d6
--- /dev/null
+++ b/metron-deployment/roles/bro/tasks/main.yml
@@ -0,0 +1,22 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- include: dependencies.yml
+- include: librdkafka.yml
+- include: bro.yml
+- include: bro-plugin-kafka.yml
+- include: start-bro.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/bro/tasks/start-bro.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/bro/tasks/start-bro.yml b/metron-deployment/roles/bro/tasks/start-bro.yml
new file mode 100644
index 0000000..1a0b938
--- /dev/null
+++ b/metron-deployment/roles/bro/tasks/start-bro.yml
@@ -0,0 +1,31 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Turn on promiscuous mode for {{ sniff_interface }}
+  shell: "ip link set {{ sniff_interface }} promisc on"
+
+- name: Start bro
+  shell: "{{ bro_home }}/bin/broctl deploy"
+
+- name: Bro Cronjob
+  cron:
+    name: Bro Cron
+    minute: "{{ bro_crontab_minutes }}"
+    job: "{{ item }}"
+  with_items:
+    - "{{ bro_crontab_job }}"
+    - "{{ bro_clean_job }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/bro/vars/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/bro/vars/main.yml b/metron-deployment/roles/bro/vars/main.yml
new file mode 100644
index 0000000..2ff5177
--- /dev/null
+++ b/metron-deployment/roles/bro/vars/main.yml
@@ -0,0 +1,26 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+bro_home: /usr/local/bro
+bro_version: 2.4.1
+bro_daemon_log: /var/log/bro.log
+bro_topic: bro
+
+# bro cronjob
+bro_crontab_minutes: 0-59/5
+bro_crontab_job: "{{ bro_home }}/bin/broctl cron"
+bro_clean_job: "rm -rf {{ bro_home }}/spool/tmp/*"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/build-tools/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/build-tools/meta/main.yml b/metron-deployment/roles/build-tools/meta/main.yml
new file mode 100644
index 0000000..ddf6aa9
--- /dev/null
+++ b/metron-deployment/roles/build-tools/meta/main.yml
@@ -0,0 +1,19 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - java_jdk

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/build-tools/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/build-tools/tasks/main.yml b/metron-deployment/roles/build-tools/tasks/main.yml
new file mode 100644
index 0000000..c47ef43
--- /dev/null
+++ b/metron-deployment/roles/build-tools/tasks/main.yml
@@ -0,0 +1,34 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Install Build Tools
+  yum: name={{ item }}
+  with_items:
+    - "@Development tools"
+    - libdnet-devel
+    - rpm-build
+    - libpcap
+    - libpcap-devel
+    - pcre
+    - pcre-devel
+    - zlib
+    - zlib-devel
+    - glib2-devel
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/elasticsearch/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/elasticsearch/defaults/main.yml b/metron-deployment/roles/elasticsearch/defaults/main.yml
new file mode 100644
index 0000000..0026717
--- /dev/null
+++ b/metron-deployment/roles/elasticsearch/defaults/main.yml
@@ -0,0 +1,22 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+elasticsearch_data_dir: /var/lib/elasticsearch
+elasticsearch_network_interface: eth0
+elasticsearch_logrotate_frequency: daily
+elasticsearch_logrotate_retention: 30
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/elasticsearch/files/elasticsearch.repo
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/elasticsearch/files/elasticsearch.repo b/metron-deployment/roles/elasticsearch/files/elasticsearch.repo
new file mode 100644
index 0000000..f033ced
--- /dev/null
+++ b/metron-deployment/roles/elasticsearch/files/elasticsearch.repo
@@ -0,0 +1,23 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+[elasticsearch-1.7]
+name=Elasticsearch repository for 1.7.x packages
+baseurl=http://packages.elastic.co/elasticsearch/1.7/centos
+gpgcheck=1
+gpgkey=http://packages.elastic.co/GPG-KEY-elasticsearch
+enabled=1

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/elasticsearch/files/yaf_index.template
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/elasticsearch/files/yaf_index.template b/metron-deployment/roles/elasticsearch/files/yaf_index.template
new file mode 100644
index 0000000..15ff6f8
--- /dev/null
+++ b/metron-deployment/roles/elasticsearch/files/yaf_index.template
@@ -0,0 +1,82 @@
+{
+   "template": "yaf_index*",
+   "mappings": {
+      "yaf_doc": {
+         "properties": {
+            "timestamp": {
+               "type": "date",
+               "format": "dateOptionalTime"
+            },
+            "location_point": {
+               "type": "geo_point"
+            },
+            "end-time": {
+               "type": "string"
+            },
+            "duration": {
+               "type": "string"
+            },
+            "rtt": {
+               "type": "string"
+            },
+            "proto": {
+               "type": "string"
+            },
+            "sip": {
+               "type": "string"
+            },
+            "sp": {
+               "type": "string"
+            },
+            "dip": {
+               "type": "string"
+            },
+            "dp": {
+               "type": "string"
+            },
+            "iflags": {
+               "type": "string"
+            },
+            "uflags": {
+               "type": "string"
+            },
+            "riflags": {
+               "type": "string"
+            },
+            "ruflags": {
+               "type": "string"
+            },
+            "isn": {
+               "type": "string"
+            },
+            "risn": {
+               "type": "string"
+            },
+            "tag": {
+               "type": "string"
+            },
+            "rtag": {
+               "type": "string"
+            },
+            "pkt": {
+               "type": "string"
+            },
+            "oct": {
+               "type": "string"
+            },
+            "rpkt": {
+               "type": "string"
+            },
+            "roct": {
+               "type": "string"
+            },
+            "app": {
+               "type": "string"
+            },
+            "end-reason": {
+               "type": "string"
+            }
+         }
+      }
+   }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/elasticsearch/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/elasticsearch/meta/main.yml b/metron-deployment/roles/elasticsearch/meta/main.yml
new file mode 100644
index 0000000..f5f059a
--- /dev/null
+++ b/metron-deployment/roles/elasticsearch/meta/main.yml
@@ -0,0 +1,24 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - java_jdk
+  - epel
+  - python-pip
+  - httplib2
+  - libselinux-python
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/elasticsearch/tasks/configure_index.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/elasticsearch/tasks/configure_index.yml b/metron-deployment/roles/elasticsearch/tasks/configure_index.yml
new file mode 100644
index 0000000..09739be
--- /dev/null
+++ b/metron-deployment/roles/elasticsearch/tasks/configure_index.yml
@@ -0,0 +1,44 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name : Wait for Elasticsearch Host to Start
+  wait_for:
+    host: "{{ groups.search[0] }}"
+    port: "{{ elasticsearch_web_port }}"
+    delay: 10
+    timeout: 300
+
+- name: Wait for Green Index Status
+  uri:
+    url: "http://{{ groups.search[0] }}:{{ elasticsearch_web_port }}/_cat/health"
+    method: GET
+    status_code: 200
+    return_content: yes
+  register: result
+  until: result.content.find("green") != -1
+  retries: 10
+  delay: 60
+  run_once: yes
+
+- name: Add Elasticsearch templates for topologies
+  uri:
+    url: "http://{{ groups.search[0] }}:{{ elasticsearch_web_port }}/_template/template_yaf"
+    method: POST
+    body: "{{ lookup('file','yaf_index.template') }}"
+    status_code: 200
+    body_format: json
+  run_once: yes

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/elasticsearch/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/elasticsearch/tasks/main.yml b/metron-deployment/roles/elasticsearch/tasks/main.yml
new file mode 100644
index 0000000..26554aa
--- /dev/null
+++ b/metron-deployment/roles/elasticsearch/tasks/main.yml
@@ -0,0 +1,73 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Add Elasticsearch GPG key.
+  rpm_key:
+    key: https://packages.elastic.co/GPG-KEY-elasticsearch
+    state: present
+
+- name: Add Elasticsearch repository.
+  copy:
+    src: elasticsearch.repo
+    dest: /etc/yum.repos.d/elasticsearch.repo
+    mode: 0644
+
+- name: Install Elasticsearch.
+  yum:
+    name: elasticsearch
+    state: installed
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10
+
+- name: Create Data Directories
+  file:
+    path: "{{ item }}"
+    state: directory
+    mode: 0755
+    owner: elasticsearch
+    group: elasticsearch
+  when: elasticsearch_data_dir is defined
+  with_items:
+     - '{{ elasticsearch_data_dir.split(",") }}'
+
+- name: Configure Elasticsearch.
+  lineinfile: >
+    dest=/etc/elasticsearch/elasticsearch.yml
+    regexp="{{ item.regexp }}"
+    line="{{ item.line }}"
+    state=present
+  with_items:
+    - { regexp: '#cluster\.name', line: 'cluster.name: metron' }
+    - { regexp: '#network\.host:', line: 'network.host: _{{
+    elasticsearch_network_interface  }}:ipv4_' }
+    - { regexp: '#discovery\.zen\.ping\.unicast\.hosts',
+    line: 'discovery.zen.ping.unicast.hosts: [ {{ es_hosts }} ]'}
+    - { regexp: '#path\.data', line: 'path.data: {{     elasticsearch_data_dir }}' }
+
+- name: Start Elasticsearch.
+  service: name=elasticsearch state=started enabled=yes
+
+- include: configure_index.yml
+
+- name: Create Logrotate Script for Elasticsearch
+  template:
+    src: "metron-elasticsearch-logrotate.yml"
+    dest: "/etc/logrotate.d/metron-elasticsearch"
+    mode: 0644
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/elasticsearch/templates/metron-elasticsearch-logrotate.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/elasticsearch/templates/metron-elasticsearch-logrotate.yml b/metron-deployment/roles/elasticsearch/templates/metron-elasticsearch-logrotate.yml
new file mode 100644
index 0000000..5504ce1
--- /dev/null
+++ b/metron-deployment/roles/elasticsearch/templates/metron-elasticsearch-logrotate.yml
@@ -0,0 +1,26 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+#Elasticsearch
+/var/log/elasticsearch/*.log {
+  {{ elasticsearch_logrotate_frequency }}
+  rotate {{ elasticsearch_logrotate_retention }}
+  missingok
+  notifempty
+  copytruncate
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/epel/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/epel/tasks/main.yml b/metron-deployment/roles/epel/tasks/main.yml
new file mode 100644
index 0000000..db4e70b
--- /dev/null
+++ b/metron-deployment/roles/epel/tasks/main.yml
@@ -0,0 +1,30 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Get epel-repo rpm
+  get_url:
+    dest: /tmp/epel-release.rpm
+    url: http://dl.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm
+
+- name: Install epel-repo rpm
+  yum:
+    pkg: /tmp/epel-release.rpm
+    state: installed
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/flume/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/flume/meta/main.yml b/metron-deployment/roles/flume/meta/main.yml
new file mode 100644
index 0000000..ff35a5a
--- /dev/null
+++ b/metron-deployment/roles/flume/meta/main.yml
@@ -0,0 +1,20 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - java_jdk
+  - libselinux-python

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/flume/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/flume/tasks/main.yml b/metron-deployment/roles/flume/tasks/main.yml
new file mode 100644
index 0000000..8576c3c
--- /dev/null
+++ b/metron-deployment/roles/flume/tasks/main.yml
@@ -0,0 +1,52 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Retrieve HDP repository definition
+  get_url:
+    url: "{{ hdp_repo_def }}"
+    dest: /etc/yum.repos.d/hdp.repo
+    mode: 0644
+
+- name: Install flume
+  yum: name={{item}}
+  with_items:
+    - flume
+    - flume-agent
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10
+
+- name: Create flume-env.sh
+  shell: cp /etc/flume/conf/flume-env.sh.template /etc/flume/conf/flume-env.sh
+
+- name: Configure flume-env.sh
+  lineinfile: >
+    dest=/etc/flume/conf/flume-env.sh
+    regexp="{{ item.regexp }}"
+    line="{{ item.line }}"
+    state=present
+  with_items:
+    - { regexp: '^.*export JAVA_HOME=.*$', line: 'export JAVA_HOME={{ java_home }}' }
+
+- name: Create flume service
+  shell: "{{item}}"
+  with_items:
+    - cp /usr/hdp/current/flume-server/etc/rc.d/init.d/flume-agent /etc/init.d/
+
+- name: Remove default flume configuration
+  file: path=/etc/flume/conf/flume.conf state=absent

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/flume/vars/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/flume/vars/main.yml b/metron-deployment/roles/flume/vars/main.yml
new file mode 100644
index 0000000..351d125
--- /dev/null
+++ b/metron-deployment/roles/flume/vars/main.yml
@@ -0,0 +1,18 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+hdp_repo_def: http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.2.0/hdp.repo

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/hadoop_setup/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/hadoop_setup/defaults/main.yml b/metron-deployment/roles/hadoop_setup/defaults/main.yml
new file mode 100644
index 0000000..99a55f6
--- /dev/null
+++ b/metron-deployment/roles/hadoop_setup/defaults/main.yml
@@ -0,0 +1,25 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+num_partitions: 1
+retention_in_gb: 10
+pycapa_topic: pcap
+bro_topic: bro
+yaf_topic: yaf
+snort_topic: snort
+enrichments_topic: enrichments
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/hadoop_setup/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/hadoop_setup/meta/main.yml b/metron-deployment/roles/hadoop_setup/meta/main.yml
new file mode 100644
index 0000000..8f0bf2b
--- /dev/null
+++ b/metron-deployment/roles/hadoop_setup/meta/main.yml
@@ -0,0 +1,20 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - ambari_gather_facts
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/hadoop_setup/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/hadoop_setup/tasks/main.yml b/metron-deployment/roles/hadoop_setup/tasks/main.yml
new file mode 100644
index 0000000..de01abf
--- /dev/null
+++ b/metron-deployment/roles/hadoop_setup/tasks/main.yml
@@ -0,0 +1,37 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+#must run on hadoop host
+- name: Create HBase tables
+  shell: echo "create '{{ item }}','t'" | hbase shell -n
+  ignore_errors: yes
+  with_items:
+    - "{{ pcap_hbase_table }}"
+    - "{{ tracker_hbase_table }}"
+    - "{{ threatintel_hbase_table }}"
+    - "{{ enrichment_hbase_table }}"
+
+#if kafka topic
+- name: Create Kafka topics
+  shell: "{{ kafka_home }}/bin/kafka-topics.sh --zookeeper {{ zookeeper_url }} --create --topic {{ item }} --partitions {{ num_partitions }} --replication-factor 1 --config retention.bytes={{ retention_in_gb * 1024 * 1024 * 1024}}"
+  ignore_errors: yes
+  with_items:
+    - "{{ pycapa_topic }}"
+    - "{{ bro_topic }}"
+    - "{{ yaf_topic }}"
+    - "{{ snort_topic }}"
+    - "{{ enrichments_topic }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/hadoop_setup/vars/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/hadoop_setup/vars/main.yml b/metron-deployment/roles/hadoop_setup/vars/main.yml
new file mode 100644
index 0000000..9747044
--- /dev/null
+++ b/metron-deployment/roles/hadoop_setup/vars/main.yml
@@ -0,0 +1,18 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+kafka_home: /usr/hdp/current/kafka-broker/

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/httplib2/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/httplib2/tasks/main.yml b/metron-deployment/roles/httplib2/tasks/main.yml
new file mode 100644
index 0000000..5502cf4
--- /dev/null
+++ b/metron-deployment/roles/httplib2/tasks/main.yml
@@ -0,0 +1,20 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Install python httplib2 dependency
+  pip:
+    name: httplib2
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/java_jdk/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/java_jdk/defaults/main.yml b/metron-deployment/roles/java_jdk/defaults/main.yml
new file mode 100644
index 0000000..28f6c71
--- /dev/null
+++ b/metron-deployment/roles/java_jdk/defaults/main.yml
@@ -0,0 +1,18 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+java_home: /usr/jdk64/jdk1.8.0_40

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/java_jdk/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/java_jdk/tasks/main.yml b/metron-deployment/roles/java_jdk/tasks/main.yml
new file mode 100644
index 0000000..999b9c1
--- /dev/null
+++ b/metron-deployment/roles/java_jdk/tasks/main.yml
@@ -0,0 +1,34 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Check for java at "{{ java_home }}"
+  stat: path="{{ java_home }}"
+  register: jdk_dir
+
+- name: Alternatives link for java
+  alternatives: name={{ item.name }} link={{ item.link }}  path={{ item.path }}
+  with_items:
+    - { name: java, link: /usr/bin/java, path: "{{ java_home }}/bin/java" }
+    - { name: jar, link: /usr/bin/jar, path: "{{ java_home }}/bin/jar" }
+  when: jdk_dir.stat.exists
+
+- name: Install openjdk
+  yum: name={{item}}
+  with_items:
+    - java-1.8.0-openjdk
+    - java-1.8.0-openjdk-devel
+  when: not jdk_dir.stat.exists

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/kafka-broker/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/kafka-broker/defaults/main.yml b/metron-deployment/roles/kafka-broker/defaults/main.yml
new file mode 100644
index 0000000..351d125
--- /dev/null
+++ b/metron-deployment/roles/kafka-broker/defaults/main.yml
@@ -0,0 +1,18 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+hdp_repo_def: http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.2.0/hdp.repo

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/kafka-broker/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/kafka-broker/meta/main.yml b/metron-deployment/roles/kafka-broker/meta/main.yml
new file mode 100644
index 0000000..9587e79
--- /dev/null
+++ b/metron-deployment/roles/kafka-broker/meta/main.yml
@@ -0,0 +1,18 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+dependencies:
+  - libselinux-python

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/kafka-broker/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/kafka-broker/tasks/main.yml b/metron-deployment/roles/kafka-broker/tasks/main.yml
new file mode 100644
index 0000000..db05cb0
--- /dev/null
+++ b/metron-deployment/roles/kafka-broker/tasks/main.yml
@@ -0,0 +1,41 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Retrieve HDP repository definition
+  get_url:
+    url: "{{ hdp_repo_def }}"
+    dest: /etc/yum.repos.d/hdp.repo
+    mode: 0644
+
+- name: Install kafka
+  yum: name={{item}}
+  with_items:
+    - java-1.8.0-openjdk
+    - kafka
+    - zookeeper-server
+
+- name: Create pid directories
+  file: path={{ item }} state=directory mode=0755
+  with_items:
+    - /var/run/zookeeper
+    - /var/run/kafka
+
+- name: Start zookeeper
+  shell: /usr/hdp/current/zookeeper-server/bin/zookeeper-server start
+
+- name: Start kafka
+  shell: /usr/hdp/current/kafka-broker/bin/kafka start

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/kafka-broker/vars/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/kafka-broker/vars/main.yml b/metron-deployment/roles/kafka-broker/vars/main.yml
new file mode 100644
index 0000000..351d125
--- /dev/null
+++ b/metron-deployment/roles/kafka-broker/vars/main.yml
@@ -0,0 +1,18 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+hdp_repo_def: http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.2.0/hdp.repo

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/kafka-client/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/kafka-client/tasks/main.yml b/metron-deployment/roles/kafka-client/tasks/main.yml
new file mode 100644
index 0000000..1674225
--- /dev/null
+++ b/metron-deployment/roles/kafka-client/tasks/main.yml
@@ -0,0 +1,30 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+  - name: Retrieve HDP repository definition
+    get_url:
+      url: "{{ hdp_repo_def }}"
+      dest: /etc/yum.repos.d/hdp.repo
+      mode: 0644
+
+  - name: Install kafka
+    yum:
+      name: kafka
+    register: result
+    until: result.rc == 0
+    retries: 5
+    delay: 10

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/librdkafka/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/librdkafka/defaults/main.yml b/metron-deployment/roles/librdkafka/defaults/main.yml
new file mode 100644
index 0000000..d920883
--- /dev/null
+++ b/metron-deployment/roles/librdkafka/defaults/main.yml
@@ -0,0 +1,20 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+librdkafka_version: 0.8.6
+librdkafka_url: https://github.com/edenhill/librdkafka/archive/0.8.6.tar.gz
+librdkafka_home: /usr/local

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/librdkafka/tasks/dependencies.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/librdkafka/tasks/dependencies.yml b/metron-deployment/roles/librdkafka/tasks/dependencies.yml
new file mode 100644
index 0000000..431e861
--- /dev/null
+++ b/metron-deployment/roles/librdkafka/tasks/dependencies.yml
@@ -0,0 +1,37 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Install prerequisites
+  yum: name={{ item }}
+  with_items:
+    - cmake
+    - make
+    - gcc
+    - gcc-c++
+    - flex
+    - bison
+    - libpcap
+    - libpcap-devel
+    - openssl-devel
+    - python-devel
+    - swig
+    - zlib-devel
+    - perl
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/librdkafka/tasks/librdkafka.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/librdkafka/tasks/librdkafka.yml b/metron-deployment/roles/librdkafka/tasks/librdkafka.yml
new file mode 100644
index 0000000..652d319
--- /dev/null
+++ b/metron-deployment/roles/librdkafka/tasks/librdkafka.yml
@@ -0,0 +1,39 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Download librdkafka
+  get_url:
+    url: "{{ librdkafka_url }}"
+    dest: "/tmp/librdkafka-{{ librdkafka_version }}.tar.gz"
+
+- name: Extract librdkafka tarball
+  unarchive:
+    src: "/tmp/librdkafka-{{ librdkafka_version }}.tar.gz"
+    dest: /tmp
+    copy: no
+    creates: "/tmp/librdkafka-{{ librdkafka_version }}"
+
+- name: Compile and install librdkafka
+  shell: "{{ item }}"
+  args:
+    chdir: "/tmp/librdkafka-{{ librdkafka_version }}"
+    creates: "{{ librdkafka_home }}/lib/librdkafka.so"
+  with_items:
+    - rm -rf build/
+    - "./configure --prefix={{ librdkafka_home }}"
+    - make
+    - make install

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/librdkafka/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/librdkafka/tasks/main.yml b/metron-deployment/roles/librdkafka/tasks/main.yml
new file mode 100644
index 0000000..2144d7f
--- /dev/null
+++ b/metron-deployment/roles/librdkafka/tasks/main.yml
@@ -0,0 +1,19 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- include: dependencies.yml
+- include: librdkafka.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/libselinux-python/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/libselinux-python/tasks/main.yml b/metron-deployment/roles/libselinux-python/tasks/main.yml
new file mode 100644
index 0000000..78f5a27
--- /dev/null
+++ b/metron-deployment/roles/libselinux-python/tasks/main.yml
@@ -0,0 +1,25 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Install libselinux-python
+  yum:
+    name: libselinux-python
+    state: installed
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_common/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_common/defaults/main.yml b/metron-deployment/roles/metron_common/defaults/main.yml
new file mode 100644
index 0000000..e4a7735
--- /dev/null
+++ b/metron-deployment/roles/metron_common/defaults/main.yml
@@ -0,0 +1,19 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+metron_jar_name: metron-elasticsearch-{{ metron_version }}.jar
+metron_jar_path: "{{ playbook_dir }}/../../metron-platform/metron-elasticsearch/target/{{ metron_jar_name }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_common/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_common/meta/main.yml b/metron-deployment/roles/metron_common/meta/main.yml
new file mode 100644
index 0000000..4db50aa
--- /dev/null
+++ b/metron-deployment/roles/metron_common/meta/main.yml
@@ -0,0 +1,22 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - yum-update
+  - epel
+  - ntp
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_common/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_common/tasks/main.yml b/metron-deployment/roles/metron_common/tasks/main.yml
new file mode 100644
index 0000000..64e6ab9
--- /dev/null
+++ b/metron-deployment/roles/metron_common/tasks/main.yml
@@ -0,0 +1,35 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Check OS Version
+  fail: msg="Metron deployment supports CentOS 6 only."
+  when: (ansible_distribution != "CentOS" or ansible_distribution_major_version != "6")
+
+- name: Check for Metron jar path
+  stat: path={{ metron_jar_path }}
+  register: metron_jars
+
+- name: Verify Metron jars exist
+  fail: msg="Unable to locate staged Metron jars at {{ metron_jar_path }}.  Did you run 'mvn package'?"
+  when: metron_jars.stat.exists == True
+
+- name: Ensure iptables is stopped and is not running at boot time.
+  ignore_errors: yes
+  service:
+    name: iptables
+    state: stopped
+    enabled: no

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_pcapservice/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_pcapservice/defaults/main.yml b/metron-deployment/roles/metron_pcapservice/defaults/main.yml
new file mode 100644
index 0000000..2ff6c6b
--- /dev/null
+++ b/metron-deployment/roles/metron_pcapservice/defaults/main.yml
@@ -0,0 +1,24 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+metron_version: 0.1BETA
+metron_directory: /usr/metron/{{ metron_version }}
+pcapservice_jar_name: metron-api-{{ metron_version }}-jar-with-dependencies.jar
+pcapservice_jar_src: "{{ playbook_dir }}/../../metron-platform/metron-api/target/{{ pcapservice_jar_name }}"
+pcapservice_jar_dst: "{{ metron_directory }}/lib/{{ pcapservice_jar_name }}"
+pcapservice_port: 8081
+hbase_config_path: "/etc/hbase/conf"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_pcapservice/meta/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_pcapservice/meta/main.yml b/metron-deployment/roles/metron_pcapservice/meta/main.yml
new file mode 100644
index 0000000..ddf6aa9
--- /dev/null
+++ b/metron-deployment/roles/metron_pcapservice/meta/main.yml
@@ -0,0 +1,19 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+dependencies:
+  - java_jdk

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_pcapservice/tasks/config-hbase.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_pcapservice/tasks/config-hbase.yml b/metron-deployment/roles/metron_pcapservice/tasks/config-hbase.yml
new file mode 100644
index 0000000..b77c1ec
--- /dev/null
+++ b/metron-deployment/roles/metron_pcapservice/tasks/config-hbase.yml
@@ -0,0 +1,26 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+ - name: Fetch hbase-site.xml
+   fetch: src=/etc/hbase/conf/hbase-site.xml dest=/tmp/hbase/conf/hbase-site.xml flat=yes
+   delegate_to: "{{ groups.ambari_slave[0] }}"
+
+ - name: Create hbase conf directory
+   file: path=/etc/hbase/conf state=directory mode=0755
+
+ - name: Copy hbase-site.xml
+   copy: src=/tmp/hbase/conf/hbase-site.xml dest=/etc/hbase/conf/hbase-site.xml mode=0644

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_pcapservice/tasks/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_pcapservice/tasks/main.yml b/metron-deployment/roles/metron_pcapservice/tasks/main.yml
new file mode 100644
index 0000000..ee9cac2
--- /dev/null
+++ b/metron-deployment/roles/metron_pcapservice/tasks/main.yml
@@ -0,0 +1,25 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Check for hbase-site
+  stat: path=/etc/hbase/conf/hbase-site.xml
+  register: hbase_site
+
+- include: config-hbase.yml
+  when: not hbase_site.stat.exists
+
+- include: pcapservice.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_pcapservice/tasks/pcapservice.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_pcapservice/tasks/pcapservice.yml b/metron-deployment/roles/metron_pcapservice/tasks/pcapservice.yml
new file mode 100644
index 0000000..651f7fb
--- /dev/null
+++ b/metron-deployment/roles/metron_pcapservice/tasks/pcapservice.yml
@@ -0,0 +1,38 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Create Metron streaming directories
+  file: path={{ metron_directory }}/{{ item.name }}  state=directory mode=0755
+  with_items:
+      - { name: 'lib'}
+      - { name: 'config'}
+
+- name: Copy Metron pcapservice jar
+  copy:
+    src: "{{ pcapservice_jar_src }}"
+    dest: "{{ pcapservice_jar_dst }}"
+
+- name: Add hbase-site.xml to pcapservice jar
+  shell: "jar -uf {{ pcapservice_jar_dst }} hbase-site.xml"
+  args:
+    chdir: "{{ hbase_config_path }}"
+
+- name: Install service script
+  template: src=pcapservice dest=/etc/init.d/pcapservice mode=0755
+
+- name: Start pcapservice
+  service: name=pcapservice state=restarted

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_pcapservice/templates/pcapservice
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_pcapservice/templates/pcapservice b/metron-deployment/roles/metron_pcapservice/templates/pcapservice
new file mode 100644
index 0000000..a3ad92b
--- /dev/null
+++ b/metron-deployment/roles/metron_pcapservice/templates/pcapservice
@@ -0,0 +1,84 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# metron pcap service
+# chkconfig: 345 20 80
+# description: Metron PCAP Service Daemon
+# processname: pcapservice
+#
+NAME=pcapservice
+DESC="Metron pcap service"
+PIDFILE=/var/run/$NAME.pid
+SCRIPTNAME=/etc/init.d/$NAME
+LOGFILE="/var/log/metron_pcapservice.log"
+EXTRA_ARGS="${@:2}"
+DAEMON_PATH="/"
+DAEMON="/usr/bin/java"
+DAEMONOPTS="-cp {{ pcapservice_jar_dst }} org.apache.metron.pcapservice.rest.PcapService -port {{ pcapservice_port }}"
+
+case "$1" in
+  start)
+    printf "%-50s" "Starting $NAME..."
+
+    # kick-off the daemon
+    cd $DAEMON_PATH
+    PID=`$DAEMON $DAEMONOPTS >> $LOGFILE 2>&1 & echo $!`
+    if [ -z $PID ]; then
+        printf "%s\n" "Fail"
+    else
+        echo $PID > $PIDFILE
+        printf "%s\n" "Ok"
+    fi
+  ;;
+
+  status)
+    printf "%-50s" "Checking $NAME..."
+    if [ -f $PIDFILE ]; then
+      PID=`cat $PIDFILE`
+      if [ -z "`ps axf | grep ${PID} | grep -v grep`" ]; then
+        printf "%s\n" "Process dead but pidfile exists"
+      else
+        echo "Running"
+      fi
+    else
+      printf "%s\n" "Service not running"
+    fi
+  ;;
+
+  stop)
+    printf "%-50s" "Stopping $NAME"
+    PID=`cat $PIDFILE`
+    cd $DAEMON_PATH
+    if [ -f $PIDFILE ]; then
+        kill -HUP $PID
+        printf "%s\n" "Ok"
+        rm -f $PIDFILE
+    else
+        printf "%s\n" "pidfile not found"
+    fi
+  ;;
+
+  restart)
+    $0 stop
+    $0 start
+  ;;
+
+  *)
+    echo "Usage: $0 {status|start|stop|restart}"
+    exit 1
+esac

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/defaults/main.yml b/metron-deployment/roles/metron_streaming/defaults/main.yml
new file mode 100644
index 0000000..d799b33
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/defaults/main.yml
@@ -0,0 +1,81 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+metron_directory: /usr/metron/{{ metron_version }}
+metron_solr_jar_name: metron-solr-{{ metron_version }}.jar
+metron_elasticsearch_jar_name: metron-elasticsearch-{{ metron_version }}.jar
+metron_parsers_jar_name: metron-parsers-{{ metron_version }}.jar
+
+metron_common_bundle_name: metron-common-{{ metron_version }}-archive.tar.gz
+metron_data_management_bundle_name: metron-data-management-{{ metron_version }}-archive.tar.gz
+metron_enrichment_bundle_name: metron-enrichment-{{ metron_version }}-archive.tar.gz
+metron_solr_bundle_name: metron-solr-{{ metron_version }}-archive.tar.gz
+metron_elasticsearch_bundle_name: metron-elasticsearch-{{ metron_version }}-archive.tar.gz
+metron_parsers_bundle_name: metron-parsers-{{ metron_version }}-archive.tar.gz
+metron_common_bundle_path: "{{ playbook_dir }}/../../metron-platform/metron-common/target/{{ metron_common_bundle_name }}"
+metron_data_management_bundle_path: "{{ playbook_dir }}/../../metron-platform/metron-data-management/target/{{ metron_data_management_bundle_name }}"
+metron_enrichment_bundle_path: "{{ playbook_dir }}/../../metron-platform/metron-enrichment/target/{{ metron_enrichment_bundle_name }}"
+metron_solr_bundle_path: "{{ playbook_dir }}/../../metron-platform/metron-solr/target/{{ metron_solr_bundle_name }}"
+metron_elasticsearch_bundle_path: "{{ playbook_dir }}/../../metron-platform/metron-elasticsearch/target/{{ metron_elasticsearch_bundle_name }}"
+metron_parsers_bundle_path: "{{ playbook_dir }}/../../metron-platform/metron-parsers/target/{{ metron_parsers_bundle_name }}"
+
+
+config_path: "{{ metron_directory }}/config"
+zookeeper_config_path: "{{ config_path }}/zookeeper"
+zookeeper_global_config_path: "{{ zookeeper_config_path }}/global.json"
+metron_solr_properties_config_path: "{{ metron_directory }}/config/solr.properties"
+metron_elasticsearch_properties_config_path: "{{ metron_directory }}/config/elasticsearch.properties"
+metron_parsers_properties_config_path: "{{ metron_directory }}/config/parsers.properties"
+hbase_config_path: "/etc/hbase/conf"
+hdfs_config_path: "/etc/hadoop/conf"
+
+threat_intel_bulk_load: True
+threat_intel_bin: "{{ metron_directory }}/bin/threatintel_bulk_load.sh"
+threat_intel_work_dir: /tmp/ti_bulk
+threat_intel_csv_filename: "threat_ip.csv"
+threat_intel_csv_filepath: "{{ threat_intel_csv_filename }}"
+
+pycapa_topic: pcap
+bro_topic: bro
+yaf_topic: yaf
+snort_topic: snort
+enrichments_topic: enrichments
+storm_enrichment_topology:
+    - "{{ metron_directory }}/flux/enrichment/remote.yaml"
+storm_parser_topologies:
+    - "{{ metron_directory }}/flux/bro/remote.yaml"
+    - "{{ metron_directory }}/flux/snort/remote.yaml"
+    - "{{ metron_directory }}/flux/yaf/remote.yaml"
+
+hdfs_retention_days: 30
+hdfs_bro_purge_cronjob: "{{ metron_directory }}/bin/prune_hdfs_files.sh -f {{ hdfs_url }} -g '/apps/metron/enrichment/indexed/bro_doc/*enrichment-*' -s $(date -d '{{ hdfs_retention_days }} days ago' +%m/%d/%Y) -n 1 >> /var/log/bro-purge/cron-hdfs-bro-purge.log 2>&1"
+hdfs_yaf_purge_cronjob: "{{ metron_directory }}/bin/prune_hdfs_files.sh -f {{ hdfs_url }} -g '/apps/metron/enrichment/indexed/yaf_doc/*enrichment-*' -s $(date -d '{{ hdfs_retention_days }} days ago' +%m/%d/%Y) -n 1 >> /var/log/yaf-purge/cron-hdfs-yaf-purge.log 2>&1"
+hdfs_snort_purge_cronjob: "{{ metron_directory }}/bin/prune_hdfs_files.sh -f {{ hdfs_url }} -g '/apps/metron/enrichment/indexed/snort_doc/*enrichment-*' -s $(date -d '{{ hdfs_retention_days }} days ago' +%m/%d/%Y) -n 1 >> /var/log/yaf-purge/cron-hdfs-yaf-purge.log 2>&1"
+
+elasticsearch_config_path: /etc/elasticsearch
+elasticsearch_cluster_name: metron
+elasticsearch_transport_port: 9300
+
+es_retention_days: 30
+es_bro_purge_cronjob: "{{ metron_directory }}/bin/prune_elasticsearch_indices.sh -z {{ zookeeper_url }} -p bro_index_ -s $(date -d '{{ es_retention_days }} days ago' +%m/%d/%Y) -n 1 >> /var/log/bro-purge/cron-es-bro-purge.log 2>&1"
+es_yaf_purge_cronjob: "{{ metron_directory }}/bin/prune_elasticsearch_indices.sh -z {{ zookeeper_url }} -p yaf_index_ -s $(date -d '{{ es_retention_days }} days ago' +%m/%d/%Y) -n 1 >> /var/log/yaf-purge/cron-es-yaf-purge.log 2>&1"
+es_snort_purge_cronjob: "{{ metron_directory }}/bin/prune_elasticsearch_indices.sh -z {{ zookeeper_url }} -p yaf_index_ -s $(date -d '{{ es_retention_days }} days ago' +%m/%d/%Y) -n 1 >> /var/log/snort-purge/cron-es-snort-purge.log 2>&1"
+
+metron_hdfs_output_dir: "/apps/metron"
+metron_hdfs_rotation_policy: org.apache.storm.hdfs.bolt.rotation.TimedRotationPolicy
+metron_hdfs_rotation_policy_count: 1
+metron_hdfs_rotation_policy_units: DAYS

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/roles/metron_streaming/files/config/sensors/bro.json
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/files/config/sensors/bro.json b/metron-deployment/roles/metron_streaming/files/config/sensors/bro.json
new file mode 100644
index 0000000..2b534b4
--- /dev/null
+++ b/metron-deployment/roles/metron_streaming/files/config/sensors/bro.json
@@ -0,0 +1,19 @@
+{
+  "index": "bro",
+  "batchSize": 5,
+  "enrichmentFieldMap":
+  {
+    "geo": ["ip_dst_addr", "ip_src_addr"],
+    "host": ["host"]
+  },
+  "threatIntelFieldMap":
+  {
+    "hbaseThreatIntel": ["ip_src_addr", "ip_dst_addr"]
+  },
+  "fieldToThreatIntelTypeMap":
+  {
+    "ip_src_addr" : ["malicious_ip"],
+    "ip_dst_addr" : ["malicious_ip"]
+  }
+}
+



[45/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/yaf/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/yaf/meta/main.yml b/deployment/roles/yaf/meta/main.yml
deleted file mode 100644
index ff366b8..0000000
--- a/deployment/roles/yaf/meta/main.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - ambari_gather_facts
-  - build-tools
-  - java_jdk
-  - libselinux-python
-  - kafka-client

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/yaf/tasks/fixbuf.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/yaf/tasks/fixbuf.yml b/deployment/roles/yaf/tasks/fixbuf.yml
deleted file mode 100644
index 9cd9244..0000000
--- a/deployment/roles/yaf/tasks/fixbuf.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Download fixbuf
-  get_url:
-    url: "http://tools.netsa.cert.org/releases/libfixbuf-{{fixbuf_version}}.tar.gz"
-    dest: "/tmp/libfixbuf-{{fixbuf_version}}.tar.gz"
-
-- name: Extract fixbuf tarball
-  unarchive:
-    src: "/tmp/libfixbuf-{{fixbuf_version}}.tar.gz"
-    dest: /tmp
-    copy: no
-    creates: "/tmp/libfixbuf-{{fixbuf_version}}"
-
-- name: Compile and Install fixbuf
-  shell: "{{item}}"
-  args:
-    chdir: "/tmp/libfixbuf-{{fixbuf_version}}"
-  with_items:
-    - ./configure
-    - make
-    - make install

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/yaf/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/yaf/tasks/main.yml b/deployment/roles/yaf/tasks/main.yml
deleted file mode 100644
index 15f67f6..0000000
--- a/deployment/roles/yaf/tasks/main.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- include: fixbuf.yml
-- include: yaf.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/yaf/tasks/yaf.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/yaf/tasks/yaf.yml b/deployment/roles/yaf/tasks/yaf.yml
deleted file mode 100644
index 10d3205..0000000
--- a/deployment/roles/yaf/tasks/yaf.yml
+++ /dev/null
@@ -1,60 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Download yaf
-  get_url:
-    url: "http://tools.netsa.cert.org/releases/yaf-{{yaf_version}}.tar.gz"
-    dest: "/tmp/yaf-{{yaf_version}}.tar.gz"
-
-- name: Extract yaf tarball
-  unarchive:
-    src: "/tmp/yaf-{{yaf_version}}.tar.gz"
-    dest: /tmp
-    copy: no
-    creates: /usr/local/bin/yaf
-
-- name: Compile and install yaf
-  shell: "{{item}}"
-  args:
-    chdir: "/tmp/yaf-{{yaf_version}}"
-    creates: /usr/local/bin/yaf
-  with_items:
-    - ./configure --enable-applabel --enable-plugins
-    - make
-    - make install
-
-- name: Create yaf home directory
-  file:
-    path: "{{ yaf_home }}"
-    state: directory
-    mode: 0755
-
-- name: Install yaf start script
-  template: src=start-yaf.sh dest={{ yaf_home }}/start-yaf.sh mode=0755
-
-- name: Install init.d service script
-  template: src=yaf dest=/etc/init.d/yaf mode=0755
-
-- name: Register the service with systemd
-  shell: systemctl enable pcap-replay
-  when: ansible_distribution == "CentOS" and ansible_distribution_major_version == "7"
-
-- name: Turn on promiscuous mode for {{ sniff_interface }}
-  shell: "ip link set {{ sniff_interface }} promisc on"
-
-- name: Start yaf
-  service: name=yaf state=restarted args="{{ yaf_args }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/yaf/templates/start-yaf.sh
----------------------------------------------------------------------
diff --git a/deployment/roles/yaf/templates/start-yaf.sh b/deployment/roles/yaf/templates/start-yaf.sh
deleted file mode 100644
index 9660e72..0000000
--- a/deployment/roles/yaf/templates/start-yaf.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/env bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#
-# a very simply metron probe that captures the output of yaf - yet another
-# flowmeter - and sends the output to kafka so that it can be consumed
-# by metron
-#
-{{ yaf_bin }} --in {{ sniff_interface }} --live pcap "${@:1}" | {{ yafscii_bin }} --tabular | {{ kafka_prod }} --broker-list {{ kafka_broker_url }} --topic {{ yaf_topic }}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/yaf/templates/yaf
----------------------------------------------------------------------
diff --git a/deployment/roles/yaf/templates/yaf b/deployment/roles/yaf/templates/yaf
deleted file mode 100644
index 18bc4ac..0000000
--- a/deployment/roles/yaf/templates/yaf
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# yaf daemon
-# chkconfig: 345 20 80
-# description: Runs yaf - yet another flowmeter
-# processname: yaf
-#
-NAME=yaf
-DESC="Executes yaf - yet another flowmeter"
-PIDFILE=/var/run/$NAME.pid
-SCRIPTNAME=/etc/init.d/$NAME
-DAEMON_PATH="{{ yaf_home }}"
-DAEMON="{{ yaf_start }}"
-DAEMONOPTS="${@:2}"
-
-case "$1" in
-  start)
-    printf "%-50s" "Starting $NAME..."
-
-    # kick-off the daemon
-    cd $DAEMON_PATH
-    PID=`$DAEMON $DAEMONOPTS > /dev/null 2>&1 & echo $!`
-    if [ -z $PID ]; then
-        printf "%s\n" "Fail"
-    else
-        echo $PID > $PIDFILE
-        printf "%s\n" "Ok"
-    fi
-  ;;
-
-  status)
-    printf "%-50s" "Checking $NAME..."
-    if [ -f $PIDFILE ]; then
-      PID=`cat $PIDFILE`
-      if [ -z "`ps axf | grep ${PID} | grep -v grep`" ]; then
-        printf "%s\n" "Process dead but pidfile exists"
-      else
-        echo "Running"
-      fi
-    else
-      printf "%s\n" "Service not running"
-    fi
-  ;;
-
-  stop)
-    printf "%-50s" "Stopping $NAME"
-    PID=`cat $PIDFILE`
-    cd $DAEMON_PATH
-    if [ -f $PIDFILE ]; then
-        kill -HUP $PID
-        killall $NAME
-        printf "%s\n" "Ok"
-        rm -f $PIDFILE
-    else
-        printf "%s\n" "pidfile not found"
-    fi
-  ;;
-
-  restart)
-    $0 stop
-    $0 start
-  ;;
-
-  *)
-    echo "Usage: $0 {status|start|stop|restart}"
-    exit 1
-esac

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/yum-update/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/yum-update/tasks/main.yml b/deployment/roles/yum-update/tasks/main.yml
deleted file mode 100644
index 4db6297..0000000
--- a/deployment/roles/yum-update/tasks/main.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Yum Update Packages
-  yum:
-    name: "*"
-    state: latest
-    update_cache: yes
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/vagrant/multinode-vagrant/.gitignore
----------------------------------------------------------------------
diff --git a/deployment/vagrant/multinode-vagrant/.gitignore b/deployment/vagrant/multinode-vagrant/.gitignore
deleted file mode 100644
index 8000dd9..0000000
--- a/deployment/vagrant/multinode-vagrant/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-.vagrant

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/vagrant/multinode-vagrant/Vagrantfile
----------------------------------------------------------------------
diff --git a/deployment/vagrant/multinode-vagrant/Vagrantfile b/deployment/vagrant/multinode-vagrant/Vagrantfile
deleted file mode 100644
index 61d656f..0000000
--- a/deployment/vagrant/multinode-vagrant/Vagrantfile
+++ /dev/null
@@ -1,65 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-hosts = [
-  { hostname: "node1", ip: "192.168.66.101", memory: "2048", cpus: 2 },
-  { hostname: "node2", ip: "192.168.66.102", memory: "2048", cpus: 2 },
-  { hostname: "node3", ip: "192.168.66.103", memory: "2048", cpus: 2 },
-  { hostname: "node4", ip: "192.168.66.104", memory: "2048", cpus: 2 }
-]
-
-Vagrant.configure(2) do |config|
-
-  # all hosts built on centos 6
-  config.vm.box = "bento/centos-6.7"
-  config.ssh.insert_key = false
-
-  # enable the hostmanager plugin
-  config.hostmanager.enabled = true
-  config.hostmanager.manage_host = true
-
-  # define each host
-  hosts.each_with_index do |host, index|
-    config.vm.define host[:hostname] do |node|
-
-      # host settings
-      node.vm.hostname = host[:hostname]
-      node.vm.network "private_network", ip: host[:ip]
-
-      # vm settings
-      node.vm.provider "virtualbox" do |vb|
-        vb.memory = host[:memory]
-        vb.cpus = host[:cpus]
-      end
-
-      # enable promisc mode on the network interface
-      if host.has_key?(:promisc)
-        vb.customize ["modifyvm", :id, "--nicpromisc#{host[:promisc]}", "allow-all"]
-      end
-
-      # provisioning; only after all hosts created
-      if index == hosts.size - 1
-        node.vm.provision :ansible do |ansible|
-          ansible.playbook = "../../playbooks/metron_full_install.yml"
-          ansible.sudo = true
-          ansible.inventory_path = "../../inventory/multinode-vagrant"
-          ansible.limit = "all"
-        end
-      end
-    end
-  end
-end

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/vagrant/multinode-vagrant/ansible.cfg
----------------------------------------------------------------------
diff --git a/deployment/vagrant/multinode-vagrant/ansible.cfg b/deployment/vagrant/multinode-vagrant/ansible.cfg
deleted file mode 100644
index 7a41ec8..0000000
--- a/deployment/vagrant/multinode-vagrant/ansible.cfg
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-[defaults]
-host_key_checking = false
-library = ../../extra_modules
-roles_path = ../../roles
-pipelining = True
-log_path = ./ansible.log

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/vagrant/packet-capture/Vagrantfile
----------------------------------------------------------------------
diff --git a/deployment/vagrant/packet-capture/Vagrantfile b/deployment/vagrant/packet-capture/Vagrantfile
deleted file mode 100644
index 1303712..0000000
--- a/deployment/vagrant/packet-capture/Vagrantfile
+++ /dev/null
@@ -1,69 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-Vagrant.configure("2") do |config|
-
-  # enable hostmanager
-  config.hostmanager.enabled = true
-  config.hostmanager.manage_host = true
-
-  #
-  # source
-  #
-  config.vm.define "source" do |node|
-
-    # host settings
-    node.vm.hostname = "source"
-    node.vm.box = "bento/centos-7.1"
-    node.ssh.insert_key = "true"
-    node.vm.network :private_network, ip: "192.168.33.10", netmask: "255.255.255.0"
-
-    # provider
-    node.vm.provider "virtualbox" do |vb|
-      vb.memory = 1024
-      vb.cpus = 1
-    end
-  end
-
-  #
-  # sink
-  #
-  config.vm.define "sink" do |node|
-
-    # host settings
-    node.vm.hostname = "sink"
-    node.vm.box = "bento/centos-7.1"
-    node.ssh.insert_key = "true"
-    node.vm.network "public_network"
-    node.vm.network :private_network, ip: "192.168.33.11", netmask: "255.255.255.0"
-
-    # provider
-    node.vm.provider "virtualbox" do |vb|
-      vb.memory = 4096
-      vb.cpus = 3
-
-      # network adapter settings; [Am79C970A|Am79C973|82540EM|82543GC|82545EM|virtio]
-      vb.customize ["modifyvm", :id, "--nicpromisc2", "allow-all"]
-      vb.customize ["modifyvm", :id, "--nictype2","82545EM"]
-    end
-  end
-
-  # provision hosts
-  config.vm.provision :ansible do |ansible|
-    ansible.playbook = "playbook.yml"
-  end
-end

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/vagrant/packet-capture/ansible.cfg
----------------------------------------------------------------------
diff --git a/deployment/vagrant/packet-capture/ansible.cfg b/deployment/vagrant/packet-capture/ansible.cfg
deleted file mode 100644
index 9c650c2..0000000
--- a/deployment/vagrant/packet-capture/ansible.cfg
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-[defaults]
-host_key_checking = false
-library = ../../extra_modules
-roles_path = ../../roles
-pipelining = True

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/vagrant/packet-capture/playbook.yml
----------------------------------------------------------------------
diff --git a/deployment/vagrant/packet-capture/playbook.yml b/deployment/vagrant/packet-capture/playbook.yml
deleted file mode 100644
index 7a5128c..0000000
--- a/deployment/vagrant/packet-capture/playbook.yml
+++ /dev/null
@@ -1,43 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-#
-# produces network traffic
-#
-- hosts: source
-  become: yes
-  vars:
-    pcap_replay_interface: "enp0s8"
-  roles:
-    - role: pcap_replay
-
-#
-# consumes network traffic
-#
-- hosts: sink
-  become: yes
-  vars:
-      dpdk_device: ["00:08.0"]
-      dpdk_target: "x86_64-native-linuxapp-gcc"
-      num_huge_pages: 512
-      pcapture_portmask: 0xf
-      pcapture_topic: pcap
-      kafka_broker_url: localhost:9092
-  roles:
-    - role: librdkafka
-    - role: kafka-broker
-    - role: packet-capture

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/vagrant/singlenode-vagrant/.gitignore
----------------------------------------------------------------------
diff --git a/deployment/vagrant/singlenode-vagrant/.gitignore b/deployment/vagrant/singlenode-vagrant/.gitignore
deleted file mode 100644
index 8000dd9..0000000
--- a/deployment/vagrant/singlenode-vagrant/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-.vagrant

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/vagrant/singlenode-vagrant/Vagrantfile
----------------------------------------------------------------------
diff --git a/deployment/vagrant/singlenode-vagrant/Vagrantfile b/deployment/vagrant/singlenode-vagrant/Vagrantfile
deleted file mode 100644
index 98413d6..0000000
--- a/deployment/vagrant/singlenode-vagrant/Vagrantfile
+++ /dev/null
@@ -1,63 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-hosts = [{
-    hostname: "node1",
-    ip: "192.168.66.121",
-    memory: "8192",
-    cpus: 4,
-    promisc: 2  # enables promisc on the 'Nth' network interface
-}]
-
-Vagrant.configure(2) do |config|
-
-  # all hosts built on centos 6
-  config.vm.box = "bento/centos-6.7"
-  config.ssh.insert_key = "true"
-
-  # enable the hostmanager plugin
-  config.hostmanager.enabled = true
-  config.hostmanager.manage_host = true
-
-  # host definition
-  hosts.each_with_index do |host, index|
-    config.vm.define host[:hostname] do |node|
-
-      # host settings
-      node.vm.hostname = host[:hostname]
-      node.vm.network "private_network", ip: host[:ip]
-
-      # vm settings
-      node.vm.provider "virtualbox" do |vb|
-        vb.memory = host[:memory]
-        vb.cpus = host[:cpus]
-
-        # enable promisc mode on the network interface
-        if host.has_key?(:promisc)
-          vb.customize ["modifyvm", :id, "--nicpromisc#{host[:promisc]}", "allow-all"]
-        end
-      end
-    end
-  end
-
-  # provisioning
-  config.vm.provision :ansible do |ansible|
-    ansible.playbook = "../../playbooks/metron_full_install.yml"
-    ansible.sudo = true
-    ansible.inventory_path = "../../inventory/singlenode-vagrant"
-  end
-end

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/vagrant/singlenode-vagrant/ansible.cfg
----------------------------------------------------------------------
diff --git a/deployment/vagrant/singlenode-vagrant/ansible.cfg b/deployment/vagrant/singlenode-vagrant/ansible.cfg
deleted file mode 100644
index 7a41ec8..0000000
--- a/deployment/vagrant/singlenode-vagrant/ansible.cfg
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-[defaults]
-host_key_checking = false
-library = ../../extra_modules
-roles_path = ../../roles
-pipelining = True
-log_path = ./ansible.log

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/.gitignore
----------------------------------------------------------------------
diff --git a/metron-deployment/.gitignore b/metron-deployment/.gitignore
new file mode 100644
index 0000000..4dd9982
--- /dev/null
+++ b/metron-deployment/.gitignore
@@ -0,0 +1,2 @@
+keys/
+.vagrant
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/README.md
----------------------------------------------------------------------
diff --git a/metron-deployment/README.md b/metron-deployment/README.md
new file mode 100644
index 0000000..38ffb34
--- /dev/null
+++ b/metron-deployment/README.md
@@ -0,0 +1,97 @@
+# Overview
+This set of playbooks can be used to deploy an Ambari-managed Hadoop cluster, Metron services, or both using ansible
+playbooks. These playbooks currently only target RHEL/CentOS 6.x operating
+systems. 
+
+## Prerequisites
+The following tools are required to run these scripts:
+
+- Maven - https://maven.apache.org/
+- Git - https://git-scm.com/
+- Ansible - http://www.ansible.com/ (version 2.0 or greater)
+
+Currently Metron must be built from source.  Before running these scripts perform the following steps:
+
+1. Clone the Metron git repository with `git clone git@github.com:apache/incubator-metron.git`
+2. Navigate to `incubator-metron/metron-streaming` and run `mvn clean package`
+
+These scripts depend on two files for configuration:
+  
+- hosts - declares which Ansible roles will be run on which hosts
+- group_vars/all - various configuration settings needed to install Metron
+
+Examples can be found in the
+`incubator-metron/deployment/inventory/metron_example` directory and are a good starting point.  Copy this directory 
+into `incubator-metron/deployment/inventory/` and rename it to your `project_name`.  More information about Ansible files and directory 
+structure can be found at http://docs.ansible.com/ansible/playbooks_best_practices.html.
+
+## Ambari
+The Ambari playbook will install a Hadoop cluster with all the services and configuration required by Metron.  This
+section can be skipped if installing Metron on a pre-existing cluster.  
+
+Currently, this playbook supports building a local development cluster running on one node but options for other types
+ of clusters will be added in the future.
+
+### Setting up your inventory
+Make sure to update the hosts file in `incubator-metron/deployment/inventory/project_name/hosts` or provide an 
+alternate inventory file when you launch the playbooks, including the 
+ssh user(s) and ssh keyfile location(s). These playbooks expect two 
+host groups:
+
+- ambari_master
+- ambari_slaves
+
+### Running the playbook
+This playbook will install the Ambari server on the ambari_master, install the ambari agents on 
+the ambari_slaves, and create a cluster in Ambari with a blueprint for the required 
+Metron components.
+
+Navigate to `incubator-metron/deployment/playbooks` and run: 
+`ansible-playbook -i ../inventory/project_name ambari_install.yml`
+
+## Metron
+The Metron playbook will gather the necessary cluster settings from Ambari and install the Metron services.
+
+### Setting up your inventory
+Edit the hosts file at `incubator-metron/deployment/inventory/project_name/hosts`.  Declare where which hosts the 
+Metron services will be installed on by updating these groups:
+
+- enrichment - submits the topology code to Storm and requires a storm client
+- search - host where Elasticsearch will be run
+- web - host where the Metron UI and underlying services will run
+- sensors - host where network data will be collected and published to Kafka
+
+The Metron topologies depend on Kafka topics and HBase tables being created beforehand.  Declare a host that has Kafka
+ and HBase clients installed by updating this group:
+
+- hadoop_client
+
+If only installing Metron, these groups can be ignored:
+
+- ambari_master
+- ambari_slaves
+
+### Configuring group variables
+The Metron Ansible scripts depend on a set of variables.  These variables can be found in the file at 
+`incubator-metron/deployment/inventory/project_name/group_vars/all`.  Edit the ambari* variables to match your Ambari
+instance and update the java_home variable to match the java path on your hosts.
+
+### Running the playbook
+Navigate to `incubator-metron/deployment/playbooks` and run: 
+`ansible-playbook -i ../inventory/project_name metron_install.yml`
+
+## Vagrant
+A VagrantFile is included and will install a working version of the entire Metron stack.  The following is required to
+run this:
+
+- Vagrant - https://www.vagrantup.com/
+- Hostmanager plugin for vagrant - Run `vagrant plugin install vagrant-hostmanager` on the machine where Vagrant is
+installed
+
+Navigate to `incubator-metron/deployment/vagrant/singlenode-vagrant` and run `vagrant up`.  This also provides a good
+example of how to run a full end-to-end Metron install.
+
+
+## TODO
+- migrate existing MySQL/GeoLite playbook
+- Support Ubuntu deployments

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/amazon-ec2/.gitignore
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/.gitignore b/metron-deployment/amazon-ec2/.gitignore
new file mode 100644
index 0000000..9c214d2
--- /dev/null
+++ b/metron-deployment/amazon-ec2/.gitignore
@@ -0,0 +1,4 @@
+*.pem
+*.secret
+*.log
+*.retry

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/amazon-ec2/README.md
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/README.md b/metron-deployment/amazon-ec2/README.md
new file mode 100644
index 0000000..b4dcc6f
--- /dev/null
+++ b/metron-deployment/amazon-ec2/README.md
@@ -0,0 +1,211 @@
+Apache Metron on Amazon EC2
+===========================
+
+This project fully automates the provisioning of Apache Metron on Amazon EC2 infrastructure.  Starting with only your Amazon EC2 credentials, this project will create a fully-functioning, end-to-end, multi-node cluster running Apache Metron.
+
+Getting Started
+---------------
+
+### Prerequisites
+
+The host that will drive the provisioning process will need to have [Ansible](https://github.com/ansible/ansible), Python and PIP installed.  In most cases, a development laptop serves this purpose just fine.  Also, install the Python library `boto` and its dependencies.  
+
+```
+pip install boto six
+```
+
+Ensure that an SSH key has been generated and stored at `~/.ssh/id_rsa.pub`.  In most cases this key will already exist and no further action will be needed.
+
+### Create User
+
+1. Use Amazon's [Identity and Access Management](https://console.aws.amazon.com/iam/) tool to create a user account by navigating to `Users > Create New User`.  
+
+2. Grant the user permission by clicking on `Permissions > Attach Policy` and add the following policies.
+
+  ```
+  AmazonEC2FullAccess
+  AmazonVPCFullAccess
+  ```
+
+3. Create an access key for the user by clicking on `Security Credentials > Create Access Key`.  Save the provided access key values in a safe place.  These values cannot be retrieved from the web console at a later time.
+
+4. Use the access key by exporting its values to the shell's environment.  This allows Ansible to authenticate with Amazon EC2.  For example:
+
+  ```
+  export AWS_ACCESS_KEY_ID="AKIAI6NRFEO27E5FFELQ"
+  export AWS_SECRET_ACCESS_KEY="vTDydWJQnAer7OWauUS150i+9Np7hfCXrrVVP6ed"
+  ```
+
+### Deploy Metron
+
+1. Ensure that Metron's streaming topology uber-jar has been built.
+
+  ```
+  cd ../../metron-streaming
+  mvn clean package -DskipTests
+  ```
+
+2. Start the Metron playbook.  A full Metron deployment can consume up to 60 minutes.  Grab a coffee, relax and practice mindfulness meditation.  If the playbook fails mid-stream for any reason, simply re-run it.  
+
+  ```
+  export EC2_INI_PATH=conf/ec2.ini
+  ansible-playbook -i ec2.py playbook.yml
+  ```
+
+### Explore Metron
+
+1. After the deployment has completed successfully, a message like the following will be displayed.  Navigate to the specified resources to explore your newly minted Apache Metron environment.
+
+  ```
+  TASK [debug] *******************************************************************
+  ok: [localhost] => {
+      "Success": [
+          "Apache Metron deployed successfully",
+          "   Metron  @  http://ec2-52-37-255-142.us-west-2.compute.amazonaws.com:5000",
+          "   Ambari  @  http://ec2-52-37-225-202.us-west-2.compute.amazonaws.com:8080",
+          "   Sensors @  ec2-52-37-225-202.us-west-2.compute.amazonaws.com on tap0",
+          "For additional information, see https://metron.incubator.apache.org/'"
+      ]
+  }
+  ```
+
+2. Each of the provisioned hosts will be accessible from the internet. Connecting to one over SSH as the user `centos` will not require a password as it will authenticate with the pre-defined SSH key.  
+
+  ```
+  ssh centos@ec2-52-91-215-174.compute-1.amazonaws.com
+  ```
+
+Advanced Usage
+--------------
+
+### Multiple Environments
+
+This process can support provisioning of multiple, isolated environments.  Simply change the `env` settings in `conf/defaults.yml`.  For example, you might provision separate development, test, and production environments.
+
+```
+env: metron-test
+```
+
+### Selective Provisioning
+
+To provision only subsets of the entire Metron deployment, Ansible tags can be specified.  For example, to only deploy the sensors on an Amazon EC2 environment, run the following command.
+
+```
+ansible-playbook -i ec2.py playbook.yml --tags "ec2,sensors"
+```
+
+### Custom SSH Key
+
+
+By default, the playbook will attempt to register your public SSH key `~/.ssh/id_rsa.pub` with each provisioned host.  This enables Ansible to communicate with each host using an SSH connection.  If would prefer to use another key simply add the path to the public key file to the `key_file` property in `conf/defaults.yml`.
+
+For example, generate a new SSH key for Metron that will be stored at `~/.ssh/my-metron-key`.
+
+```
+$ ssh-keygen -q -f ~/.ssh/my-metron-key
+Enter passphrase (empty for no passphrase):
+Enter same passphrase again:
+```
+
+Add the path to the newly created SSH public key to `conf/defaults.yml`.
+
+```
+key_file: ~/.ssh/metron-private-key.pub
+```
+
+Common Errors
+-------------
+
+### Error: 'No handler was ready to authenticate...Check your credentials'
+
+```
+TASK [Define keypair] **********************************************************
+failed: [localhost] => (item=ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDXbcb1AlWsEPP
+  r9jEFrn0yun3PYNidJ/...david@hasselhoff.com) => {"failed": true, "item": "ssh-r
+  sa AAAAB3NzaC1yc2EAAAADAQABAAABAQDXbcb1AlWsEPPr9jEFr... david@hasselhoff.com",
+  "msg": "No handler was ready to authenticate. 1 handlers were checked.
+  ['HmacAuthV4Handler'] Check your credentials"}
+```
+
+#### Solution 1
+
+This occurs when Ansible does not have the correct AWS access keys.  The following commands must return a valid access key that is defined within Amazon's [Identity and Access Management](https://console.aws.amazon.com/iam/) console.  
+
+```
+$ echo $AWS_ACCESS_KEY_ID
+AKIAI6NRFEO27E5FFELQ
+
+$ echo $AWS_SECRET_ACCESS_KEY
+vTDydWJQnAer7OWauUS150i+9Np7hfCXrrVVP6ed
+```
+
+#### Solution 2
+
+This error can occur if you have exported the correct AWS access key, but you are using `sudo` to run the Ansible playbook.  Do not use the `sudo` command when running the Ansible playbook.
+
+### Error: 'OptInRequired: ... you need to accept terms and subscribe'
+
+```
+TASK [metron-test: Instantiate 1 host(s) as sensors,ambari_master,metron,ec2] **
+fatal: [localhost]: FAILED! => {"changed": false, "failed": true, "msg":
+"Instance creation failed => OptInRequired: In order to use this AWS Marketplace
+product you need to accept terms and subscribe. To do so please visit
+http://aws.amazon.com/marketplace/pp?sku=6x5jmcajty9edm3f211pqjfn2"}
+to retry, use: --limit @playbook.retry
+```
+
+#### Solution
+
+Apache Metron uses the [official CentOS 6 Amazon Machine Image](https://aws.amazon.com/marketplace/pp?sku=6x5jmcajty9edm3f211pqjfn2) when provisioning hosts. Amazon requires that you accept certain terms and conditions when using any Amazon Machine Image (AMI).  Follow the link provided in the error message to accept the terms and conditions then re-run the playbook.  
+
+### Error: 'PendingVerification: Your account is currently being verified'
+
+```
+TASK [metron-test: Instantiate 1 host(s) as sensors,ambari_master,metron,ec2] **
+fatal: [localhost]: FAILED! => {"changed": false, "failed": true, "msg":
+"Instance creation failed => PendingVerification: Your account is currently
+being verified. Verification normally takes less than 2 hours. Until your
+account is verified, you may not be able to launch additional instances or
+create additional volumes. If you are still receiving this message after more
+than 2 hours, please let us know by writing to aws-verification@amazon.com. We
+appreciate your patience."}
+to retry, use: --limit @playbook.retry
+```
+
+#### Solution
+
+This will occur if you are attempting to deploy Apache Metron using a newly created Amazon Web Services account.  Follow the advice of the message and wait until Amazon's verification process is complete.  Amazon has some additional [advice for dealing with this error and more](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/errors-overview.html).
+
+> Your account is pending verification. Until the verification process is complete, you may not be able to carry out requests with this account. If you have questions, contact [AWS Support](http://console.aws.amazon.com/support/home#/).
+
+### Error: 'Instance creation failed => InstanceLimitExceeded'
+
+```
+TASK [metron-test: Instantiate 3 host(s) as search,metron,ec2] *****************
+fatal: [localhost]: FAILED! => {"changed": false, "failed": true, "msg":
+"Instance creation failed => InstanceLimitExceeded: You have requested more
+instances (11) than your current instance limit of 10 allows for the specified
+instance type. Please visit http://aws.amazon.com/contact-us/ec2-request to
+request an adjustment to this limit."}
+to retry, use: --limit @playbook.retry
+```
+
+#### Solution
+
+This will occur if Apache Metron attempts to deploy more host instances than allowed by your account.  The total number of instances required for Apache Metron can be reduced by editing `deployment/amazon-ec/playbook.yml`.  Perhaps a better alternative is to request of Amazon that this limit be increased.  Amazon has some additional [advice for dealing with this error and more](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/errors-overview.html).
+
+> You've reached the limit on the number of instances you can run concurrently. The limit depends on the instance type. For more information, see [How many instances can I run in Amazon EC2](http://aws.amazon.com/ec2/faqs/#How_many_instances_can_I_run_in_Amazon_EC2). If you need additional instances, complete the [Amazon EC2 Instance Request Form](https://console.aws.amazon.com/support/home#/case/create?issueType=service-limit-increase&limitType=service-code-ec2-instances).
+
+### Error: 'SSH encountered an unknown error during the connection'
+
+```
+TASK [setup] *******************************************************************
+fatal: [ec2-52-26-113-221.us-west-2.compute.amazonaws.com]: UNREACHABLE! => {
+  "changed": false, "msg": "SSH encountered an unknown error during the
+  connection. We recommend you re-run the command using -vvvv, which will enable
+  SSH debugging output to help diagnose the issue", "unreachable": true}
+```
+
+#### Solution
+
+This most often indicates that Ansible cannot connect to the host with the SSH key that it has access to.  This could occur if hosts are provisioned with one SSH key, but the playbook is executed subsequently with a different SSH key.  The issue can be addressed by either altering the `key_file` variable to point to the key that was used to provision the hosts or by simply terminating all hosts and re-running the playbook.

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/amazon-ec2/ansible.cfg
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/ansible.cfg b/metron-deployment/amazon-ec2/ansible.cfg
new file mode 100644
index 0000000..c8f26c4
--- /dev/null
+++ b/metron-deployment/amazon-ec2/ansible.cfg
@@ -0,0 +1,28 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+[defaults]
+host_key_checking = False
+library = ../extra_modules
+roles_path = ../roles
+pipelining = True
+remote_user = centos
+forks = 20
+log_path = ./ansible.log
+
+# fix for "ssh throws 'unix domain socket too long' " problem
+[ssh_connection]
+control_path = %(directory)s/%%h-%%p-%%r

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/amazon-ec2/conf/defaults.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/conf/defaults.yml b/metron-deployment/amazon-ec2/conf/defaults.yml
new file mode 100644
index 0000000..76c4b98
--- /dev/null
+++ b/metron-deployment/amazon-ec2/conf/defaults.yml
@@ -0,0 +1,80 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+# ec2
+env: metron-test
+region: us-west-2
+instance_type: m4.xlarge
+image: ami-05cf2265
+volume_type: standard
+key_name: metron-key
+xvda_vol_size: 50
+xvdb_vol_size: 100
+xvdc_vol_size: 100
+
+# ambari
+ambari_host: "{{ groups.ambari_master[0] }}"
+ambari_port: 8080
+ambari_user: admin
+ambari_password: admin
+cluster_type: small_cluster
+
+# hbase
+pcap_hbase_table: pcap
+tracker_hbase_table: access_tracker
+threatintel_hbase_table: threatintel
+enrichment_hbase_table: enrichment
+
+# kafka
+num_partitions: 3
+retention_in_gb: 25
+
+# metron variables
+metron_version: 0.1BETA
+java_home: /usr/jdk64/jdk1.8.0_40
+pcapservice_port: 8081
+
+# sensors
+sensor_test_mode: True
+sniff_interface: tap0
+snort_alert_csv_path: "/var/log/snort/alert.csv"
+pcap_replay: True
+pcap_replay_interface: tap0
+install_tap: True
+pcap_path: /opt/pcap-replay
+
+# data directories
+zookeeper_data_dir: "/data1/hadoop/zookeeper"
+namenode_checkpoint_dir: "/data1/hadoop/hdfs/namesecondary"
+namenode_name_dir: "/data1/hadoop/hdfs/namenode"
+datanode_data_dir: "/data1/hadoop/hdfs/data,/data2/hadoop/hdfs/data"
+journalnode_edits_dir: "/data1/hadoop/hdfs/journalnode"
+nodemanager_local_dirs: "/data1/hadoop/yarn/local"
+timeline_ldb_store_path: "/data1/hadoop/yarn/timeline"
+timeline_ldb_state_path: "/data1/hadoop/yarn/timeline"
+nodemanager_log_dirs: "/data1/hadoop/yarn/log"
+jhs_recovery_store_ldb_path: "/data1/hadoop/mapreduce/jhs"
+storm_local_dir: "/data1/hadoop/storm"
+kafka_log_dirs: "/data2/kafka-log"
+elasticsearch_data_dir: "/data1/elasticsearch,/data2/elasticsearch"
+
+#Search
+install_elasticsearch: True
+install_solr: False
+elasticsearch_transport_port: 9300
+elasticsearch_network_interface: eth0
+elasticsearch_web_port: 9200

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/amazon-ec2/conf/ec2.ini
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/conf/ec2.ini b/metron-deployment/amazon-ec2/conf/ec2.ini
new file mode 100755
index 0000000..646ffaf
--- /dev/null
+++ b/metron-deployment/amazon-ec2/conf/ec2.ini
@@ -0,0 +1,105 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+#
+# Ansible EC2 external inventory script settings.
+#
+# Refer to https://github.com/ansible/ansible/blob/devel/contrib/inventory/ec2.ini
+# for additional information on available settings
+#
+
+[ec2]
+
+# AWS regions to make calls to. Set this to 'all' to make request to all regions
+# in AWS and merge the results together. Alternatively, set this to a comma
+# separated list of regions. E.g. 'us-east-1,us-west-1,us-west-2'
+regions = all
+
+# When generating inventory, Ansible needs to know how to address a server.
+# Each EC2 instance has a lot of variables associated with it. Here is the list:
+#   http://docs.pythonboto.org/en/latest/ref/ec2.html#module-boto.ec2.instance
+# Below are 2 variables that are used as the address of a server:
+#   - destination_variable
+#   - vpc_destination_variable
+
+# This is the normal destination variable to use. If you are running Ansible
+# from outside EC2, then 'public_dns_name' makes the most sense. If you are
+# running Ansible from within EC2, then perhaps you want to use the internal
+# address, and should set this to 'private_dns_name'. The key of an EC2 tag
+# may optionally be used; however the boto instance variables hold precedence
+# in the event of a collision.
+destination_variable = public_dns_name
+
+# For server inside a VPC, using DNS names may not make sense. When an instance
+# has 'subnet_id' set, this variable is used. If the subnet is public, setting
+# this to 'ip_address' will return the public IP address. For instances in a
+# private subnet, this should be set to 'private_ip_address', and Ansible must
+# be run from within EC2. The key of an EC2 tag may optionally be used; however
+# the boto instance variables hold precedence in the event of a collision.
+# WARNING: - instances that are in the private vpc, _without_ public ip address
+# will not be listed in the inventory until You set:
+# vpc_destination_variable = private_ip_address
+#vpc_destination_variable = ip_address
+vpc_destination_variable = public_dns_name
+
+# To exclude RDS instances from the inventory, uncomment and set to False.
+#rds = False
+
+# To exclude ElastiCache instances from the inventory, uncomment and set to False.
+#elasticache = False
+
+# API calls to EC2 are slow. For this reason, we cache the results of an API
+# call. Set this to the path you want cache files to be written to. Two files
+# will be written to this directory:
+#   - ansible-ec2.cache
+#   - ansible-ec2.index
+cache_path = ~/.ansible/tmp
+
+# The number of seconds a cache file is considered valid. After this many
+# seconds, a new API call will be made, and the cache file will be updated.
+# To disable the cache, set this value to 0
+cache_max_age = 300
+
+# Organize groups into a nested/hierarchy instead of a flat namespace.
+nested_groups = False
+
+# Replace - tags when creating groups to avoid issues with ansible
+replace_dash_in_groups = True
+
+# If set to true, any tag of the form "a,b,c" is expanded into a list
+# and the results are used to create additional tag_* inventory groups.
+expand_csv_tags = False
+
+# The EC2 inventory output can become very large. To manage its size,
+# configure which groups should be created.
+group_by_instance_id = True
+group_by_region = True
+group_by_availability_zone = True
+group_by_ami_id = True
+group_by_instance_type = True
+group_by_key_pair = True
+group_by_vpc_id = True
+group_by_security_group = True
+group_by_tag_keys = True
+group_by_tag_none = True
+group_by_route53_names = True
+group_by_rds_engine = True
+group_by_rds_parameter_group = True
+group_by_elasticache_engine = True
+group_by_elasticache_cluster = True
+group_by_elasticache_parameter_group = True
+group_by_elasticache_replication_group = True

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/amazon-ec2/playbook.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/playbook.yml b/metron-deployment/amazon-ec2/playbook.yml
new file mode 100644
index 0000000..16d281d
--- /dev/null
+++ b/metron-deployment/amazon-ec2/playbook.yml
@@ -0,0 +1,80 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+#
+# instantiate the hosts on amazon ec2
+#
+- hosts: localhost
+  vars_files:
+    - conf/defaults.yml
+  tasks:
+    - include: tasks/create-keypair.yml
+    - include: tasks/create-vpc.yml
+    - include: tasks/create-open-inbound-security-group.yml
+    - include: tasks/create-open-outbound-security-group.yml
+    - include: tasks/create-hosts.yml host_count=1 host_type=sensors,ambari_master,metron,ec2
+    - include: tasks/create-hosts.yml host_count=4 host_type=ambari_slave,ec2
+    - include: tasks/create-hosts.yml host_count=1 host_type=ambari_slave,hadoop_client,metron,ec2
+    - include: tasks/create-hosts.yml host_count=1 host_type=ambari_slave,enrichment,metron,ec2
+    - include: tasks/create-hosts.yml host_count=2 host_type=search,metron,ec2
+    - include: tasks/create-hosts.yml host_count=1 host_type=web,mysql,metron,ec2
+  tags:
+    - ec2
+
+#
+# wait for all ec2 hosts to come up
+#
+- hosts: ec2
+  become: True
+  vars_files:
+    - conf/defaults.yml
+  gather_facts: False
+  tasks:
+    - include: tasks/check-hosts.yml
+  tags:
+    - ec2
+    - wait
+
+#
+# mount additional data volumes on all ec2 hosts
+#
+- hosts: ec2
+  become: True
+  vars_files:
+    - conf/defaults.yml
+  tasks:
+    - include: tasks/mount-volume.yml vol_src=/dev/xvdb vol_mnt=/data1
+    - include: tasks/mount-volume.yml vol_src=/dev/xvdc vol_mnt=/data2
+    - include: tasks/check-volume.yml vol_name=xvda vol_src=/dev/xvda vol_size={{ xvda_vol_size }}
+  tags:
+    - ec2
+
+#
+# build the metron cluster
+#
+- include: ../playbooks/metron_full_install.yml
+
+#
+# provisioning report
+#
+- hosts: localhost
+  vars_files:
+    - conf/defaults.yml
+  tasks:
+    - include: tasks/provisioning-report.yml
+  tags:
+    - ec2

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/amazon-ec2/tasks/check-hosts.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/tasks/check-hosts.yml b/metron-deployment/amazon-ec2/tasks/check-hosts.yml
new file mode 100644
index 0000000..1a4b2c7
--- /dev/null
+++ b/metron-deployment/amazon-ec2/tasks/check-hosts.yml
@@ -0,0 +1,20 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Wait for connectivity to host(s)
+  local_action: wait_for host={{ inventory_hostname }} state=started timeout=300 delay=10
+  become: False

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/amazon-ec2/tasks/check-volume.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/tasks/check-volume.yml b/metron-deployment/amazon-ec2/tasks/check-volume.yml
new file mode 100644
index 0000000..b7ac63d
--- /dev/null
+++ b/metron-deployment/amazon-ec2/tasks/check-volume.yml
@@ -0,0 +1,26 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: "Check size of volume {{ vol_src }}"
+  shell: "lsblk | grep part | grep {{ vol_name }} | awk '{ print $4}' | sed 's/[^0-9]//g'"
+  register: current_size
+
+- name: "Status of {{ vol_src }} volume"
+  debug: msg="volume={{ vol_src }} current={{ current_size.stdout|int }} expected={{ vol_size|int }}"
+
+- include: expand-volume.yml vol_src={{ vol_src }}
+  when: current_size.stdout|int < vol_size|int

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/amazon-ec2/tasks/create-hosts.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/tasks/create-hosts.yml b/metron-deployment/amazon-ec2/tasks/create-hosts.yml
new file mode 100644
index 0000000..39bae3a
--- /dev/null
+++ b/metron-deployment/amazon-ec2/tasks/create-hosts.yml
@@ -0,0 +1,54 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: "{{ env }}: Instantiate {{ host_count }} host(s) as {{ host_type }}"
+  ec2:
+    region: "{{ region }}"
+    instance_type: "{{ instance_type }}"
+    image: "{{ image }}"
+    key_name: "{{ env }}-{{ key_name }}"
+    assign_public_ip: True
+    group: ["{{ env }}-vpc-all-inbound","{{ env }}-vpc-all-outbound"]
+    vpc_subnet_id: "{{ vpc.subnets[0].id }}"
+    instance_tags:
+      Name: "[{{ env }}] {{ host_type }}"
+      type: "{{ host_type }}"
+      env: "{{ env }}"
+    exact_count: "{{ host_count }}"
+    count_tag:
+      type: "{{ host_type }}"
+      env: "{{ env }}"
+    volumes:
+    - device_name: /dev/sda1
+      volume_type: "{{ volume_type }}"
+      volume_size: "{{ xvda_vol_size }}"
+      delete_on_termination: true
+    - device_name: /dev/xvdb
+      volume_type: "{{ volume_type }}"
+      volume_size: "{{ xvdb_vol_size }}"
+      delete_on_termination: true
+    - device_name: /dev/xvdc
+      volume_type: "{{ volume_type }}"
+      volume_size: "{{ xvdc_vol_size }}"
+      delete_on_termination: true
+    wait: yes
+  register: ec2
+
+- name: Add host(s) to a hostgroup
+  add_host: hostname={{ item.public_dns_name }} groups={{ host_type }}
+  with_items: "{{ ec2.tagged_instances }}"
+  when: item.public_dns_name is defined

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/amazon-ec2/tasks/create-keypair.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/tasks/create-keypair.yml b/metron-deployment/amazon-ec2/tasks/create-keypair.yml
new file mode 100644
index 0000000..693039e
--- /dev/null
+++ b/metron-deployment/amazon-ec2/tasks/create-keypair.yml
@@ -0,0 +1,29 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- set_fact:
+    the_key_file: "{{ key_file | default('~/.ssh/id_rsa.pub') }}"
+
+- name: Define keypair
+  ec2_key:
+    name: "{{ env }}-{{ key_name }}"
+    region: "{{ region }}"
+    key_material: "{{ item }}"
+  with_file: "{{ the_key_file }}"
+
+- debug: msg="Created keypair '{{ env }}-{{ key_name }}' from '{{ the_key_file }}'"
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/amazon-ec2/tasks/create-open-inbound-security-group.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/tasks/create-open-inbound-security-group.yml b/metron-deployment/amazon-ec2/tasks/create-open-inbound-security-group.yml
new file mode 100644
index 0000000..67e89c8
--- /dev/null
+++ b/metron-deployment/amazon-ec2/tasks/create-open-inbound-security-group.yml
@@ -0,0 +1,26 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: "{{ env }}: Define open inbound security group"
+  ec2_group:
+    name: "{{ env }}-vpc-all-inbound"
+    description: WARNING allow all inbound connections from the internet
+    region: "{{ region }}"
+    vpc_id: "{{ vpc_id }}"
+    rules:
+      - proto: all
+        cidr_ip: 0.0.0.0/0

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/amazon-ec2/tasks/create-open-outbound-security-group.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/tasks/create-open-outbound-security-group.yml b/metron-deployment/amazon-ec2/tasks/create-open-outbound-security-group.yml
new file mode 100644
index 0000000..53f505f
--- /dev/null
+++ b/metron-deployment/amazon-ec2/tasks/create-open-outbound-security-group.yml
@@ -0,0 +1,26 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: "{{ env }}: Define open outbound security group"
+  ec2_group:
+    name: "{{ env }}-vpc-all-outbound"
+    description: allow all outbound connections to the internet
+    region: "{{ region }}"
+    vpc_id: "{{ vpc_id }}"
+    rules_egress:
+      - proto: all
+        cidr_ip: 0.0.0.0/0

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/amazon-ec2/tasks/create-security-group.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/tasks/create-security-group.yml b/metron-deployment/amazon-ec2/tasks/create-security-group.yml
new file mode 100644
index 0000000..1c9b909
--- /dev/null
+++ b/metron-deployment/amazon-ec2/tasks/create-security-group.yml
@@ -0,0 +1,28 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: "{{ env }}: Define the {{ name }} security group"
+  ec2_group:
+    name: "{{ env }}-{{ name }}"
+    region: "{{ region }}"
+    description: "[{{env}}] {{ name }}/{{ proto }}/{{ port }}"
+    vpc_id: "{{ vpc_id }}"
+    rules:
+      - proto: "{{ proto }}"
+        from_port: "{{ port }}"
+        to_port: "{{ port }}"
+        cidr_ip: 0.0.0.0/0

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/amazon-ec2/tasks/create-vpc.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/tasks/create-vpc.yml b/metron-deployment/amazon-ec2/tasks/create-vpc.yml
new file mode 100644
index 0000000..7fc31e7
--- /dev/null
+++ b/metron-deployment/amazon-ec2/tasks/create-vpc.yml
@@ -0,0 +1,50 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+  - name: "{{ env }}:  Create virtual private cloud"
+    ec2_vpc:
+      region: "{{ region }}"
+      internet_gateway: True
+      resource_tags:
+        Name: "{{ env }}-virtual-private-cloud"
+        env: "{{ env }}"
+      cidr_block: 10.0.0.0/16
+      dns_hostnames: yes
+      dns_support: yes
+      subnets:
+        - cidr: 10.0.0.0/24
+          resource_tags:
+            tier: web
+        - cidr: 10.0.1.0/24
+          resource_tags:
+            tier: hdp
+        - cidr: 10.0.2.0/24
+          resource_tags:
+            tier: sensors
+      route_tables:
+        - subnets:
+          - 10.0.0.0/24
+          - 10.0.1.0/24
+          - 10.0.2.0/24
+          routes:
+          - dest: 0.0.0.0/0
+            gw: igw
+    register: vpc
+
+  - name: "[{{ env }}] Created vpc with id={{ vpc.vpc_id }}"
+    set_fact:
+      vpc_id: "{{ vpc.vpc_id }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/amazon-ec2/tasks/expand-volume.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/tasks/expand-volume.yml b/metron-deployment/amazon-ec2/tasks/expand-volume.yml
new file mode 100644
index 0000000..1e25e27
--- /dev/null
+++ b/metron-deployment/amazon-ec2/tasks/expand-volume.yml
@@ -0,0 +1,30 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: "Expand {{ vol_src }} volume"
+#          sectors  delete 1  new      primary  first    past mbr to end  bootable     write and exit
+  shell: "(echo u s; echo d 1; echo n; echo p; echo 1; echo 2048 ; echo ;echo a; echo 1; echo w) | fdisk {{ vol_src }} || true"
+  args:
+    executable: /bin/bash
+
+- name: Restart host(s)
+  command: shutdown -r now "Trigger volume changes"
+  async: 0
+  poll: 0
+  ignore_errors: True
+
+- include: tasks/check-hosts.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/amazon-ec2/tasks/mount-volume.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/tasks/mount-volume.yml b/metron-deployment/amazon-ec2/tasks/mount-volume.yml
new file mode 100644
index 0000000..11259a5
--- /dev/null
+++ b/metron-deployment/amazon-ec2/tasks/mount-volume.yml
@@ -0,0 +1,32 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Install xfsprogs
+  yum:
+    name: xfsprogs
+    state: present
+    update_cache: yes
+  register: result
+  until: result.rc == 0
+  retries: 5
+  delay: 10
+
+- name: Format data volume(s)
+  filesystem: fstype=xfs dev={{ vol_src }}
+
+- name: Mount the volume
+  mount: name={{ vol_mnt }} src={{ vol_src }} opts=noatime fstype=xfs state=mounted

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/amazon-ec2/tasks/provisioning-report.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/tasks/provisioning-report.yml b/metron-deployment/amazon-ec2/tasks/provisioning-report.yml
new file mode 100644
index 0000000..d2abec0
--- /dev/null
+++ b/metron-deployment/amazon-ec2/tasks/provisioning-report.yml
@@ -0,0 +1,35 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the 'License'); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an 'AS IS' BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+---
+- name: Known hosts groups
+  debug: var=groups
+
+- name: Sanity check Metron web
+  local_action: wait_for host="{{ groups.web[0] }}" port=5000 timeout=20
+
+- name: Sanity check Ambari web
+  local_action: wait_for host="{{ groups.ambari_master[0] }}" port="{{ ambari_port }}" timeout=20
+
+- set_fact:
+    Success:
+      - "Apache Metron deployed successfully"
+      - "   Metron  @  http://{{ groups.web[0] }}:5000"
+      - "   Ambari  @  http://{{ groups.ambari_master[0] }}:{{ ambari_port }}"
+      - "   Sensors @  {{ groups.sensors[0] }} on {{ sniff_interface }}"
+      - For additional information, see https://metron.incubator.apache.org/'
+
+- debug: var=Success

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-deployment/ansible.cfg
----------------------------------------------------------------------
diff --git a/metron-deployment/ansible.cfg b/metron-deployment/ansible.cfg
new file mode 100644
index 0000000..9b3916b
--- /dev/null
+++ b/metron-deployment/ansible.cfg
@@ -0,0 +1,23 @@
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+[defaults]
+host_key_checking = false
+library = extra_modules
+roles_path = ./roles
+
+[ssh_connection]
+control_path = %(directory)s/%%h-%%p-%%r
\ No newline at end of file


[46/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/snort/files/snort.conf
----------------------------------------------------------------------
diff --git a/deployment/roles/snort/files/snort.conf b/deployment/roles/snort/files/snort.conf
deleted file mode 100644
index 8a24e0c..0000000
--- a/deployment/roles/snort/files/snort.conf
+++ /dev/null
@@ -1,726 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-###################################################
-# This file contains a sample snort configuration.
-# You should take the following steps to create your own custom configuration:
-#
-#  1) Set the network variables.
-#  2) Configure the decoder
-#  3) Configure the base detection engine
-#  4) Configure dynamic loaded libraries
-#  5) Configure preprocessors
-#  6) Configure output plugins
-#  7) Customize your rule set
-#  8) Customize preprocessor and decoder rule set
-#  9) Customize shared object rule set
-###################################################
-
-###################################################
-# Step #1: Set the network variables.  For more information, see README.variables
-###################################################
-
-# Setup the network addresses you are protecting
-ipvar HOME_NET 10.0.0.16
-
-# Set up the external network addresses. Leave as "any" in most situations
-ipvar EXTERNAL_NET any
-
-# List of DNS servers on your network
-ipvar DNS_SERVERS $HOME_NET
-
-# List of SMTP servers on your network
-ipvar SMTP_SERVERS $HOME_NET
-
-# List of web servers on your network
-ipvar HTTP_SERVERS $HOME_NET
-
-# List of sql servers on your network
-ipvar SQL_SERVERS $HOME_NET
-
-# List of telnet servers on your network
-ipvar TELNET_SERVERS $HOME_NET
-
-# List of ssh servers on your network
-ipvar SSH_SERVERS $HOME_NET
-
-# List of ftp servers on your network
-ipvar FTP_SERVERS $HOME_NET
-
-# List of sip servers on your network
-ipvar SIP_SERVERS $HOME_NET
-
-# List of ports you run web servers on
-portvar HTTP_PORTS [36,80,81,82,83,84,85,86,87,88,89,90,311,383,555,591,593,631,801,808,818,901,972,1158,1220,1414,1533,1741,1830,1942,2231,2301,2381,2578,2809,2980,3029,3037,3057,3128,3443,3702,4000,4343,4848,5000,5117,5250,5600,5814,6080,6173,6988,7000,7001,7005,7071,7144,7145,7510,7770,7777,7778,7779,8000,8001,8008,8014,8015,8020,8028,8040,8080,8081,8082,8085,8088,8090,8118,8123,8180,8181,8182,8222,8243,8280,8300,8333,8344,8400,8443,8500,8509,8787,8800,8888,8899,8983,9000,9002,9060,9080,9090,9091,9111,9290,9443,9447,9710,9788,9999,10000,11371,12601,13014,15489,19980,29991,33300,34412,34443,34444,40007,41080,44449,50000,50002,51423,53331,55252,55555,56712]
-
-# List of ports you want to look for SHELLCODE on.
-portvar SHELLCODE_PORTS !80
-
-# List of ports you might see oracle attacks on
-portvar ORACLE_PORTS 1024:
-
-# List of ports you want to look for SSH connections on:
-portvar SSH_PORTS 22
-
-# List of ports you run ftp servers on
-portvar FTP_PORTS [21,2100,3535]
-
-# List of ports you run SIP servers on
-portvar SIP_PORTS [5060,5061,5600]
-
-# List of file data ports for file inspection
-portvar FILE_DATA_PORTS [$HTTP_PORTS,110,143]
-
-# List of GTP ports for GTP preprocessor
-portvar GTP_PORTS [2123,2152,3386]
-
-# other variables, these should not be modified
-ipvar AIM_SERVERS [64.12.24.0/23,64.12.28.0/23,64.12.161.0/24,64.12.163.0/24,64.12.200.0/24,205.188.3.0/24,205.188.5.0/24,205.188.7.0/24,205.188.9.0/24,205.188.153.0/24,205.188.179.0/24,205.188.248.0/24]
-
-# Path to your rules files (this can be a relative path)
-# Note for Windows users:  You are advised to make this an absolute path,
-# such as:  c:\snort\rules
-var RULE_PATH rules
-var SO_RULE_PATH so_rules
-var PREPROC_RULE_PATH preproc_rules
-
-# If you are using reputation preprocessor set these
-var WHITE_LIST_PATH /etc/snort/rules
-var BLACK_LIST_PATH /etc/snort/rules
-
-###################################################
-# Step #2: Configure the decoder.  For more information, see README.decode
-###################################################
-
-# Stop generic decode events:
-config disable_decode_alerts
-
-# Stop Alerts on experimental TCP options
-config disable_tcpopt_experimental_alerts
-
-# Stop Alerts on obsolete TCP options
-config disable_tcpopt_obsolete_alerts
-
-# Stop Alerts on T/TCP alerts
-config disable_tcpopt_ttcp_alerts
-
-# Stop Alerts on all other TCPOption type events:
-config disable_tcpopt_alerts
-
-# Stop Alerts on invalid ip options
-config disable_ipopt_alerts
-
-# Alert if value in length field (IP, TCP, UDP) is greater th elength of the packet
-# config enable_decode_oversized_alerts
-
-# Same as above, but drop packet if in Inline mode (requires enable_decode_oversized_alerts)
-# config enable_decode_oversized_drops
-
-# Configure IP / TCP checksum mode
-config checksum_mode: all
-
-# Configure maximum number of flowbit references.  For more information, see README.flowbits
-# config flowbits_size: 64
-
-# Configure ports to ignore
-# config ignore_ports: tcp 21 6667:6671 1356
-# config ignore_ports: udp 1:17 53
-
-# Configure active response for non inline operation. For more information, see REAMDE.active
-# config response: eth0 attempts 2
-
-# Configure DAQ related options for inline operation. For more information, see README.daq
-#
-# config daq: <type>
-# config daq_dir: <dir>
-# config daq_mode: <mode>
-# config daq_var: <var>
-#
-# <type> ::= pcap | afpacket | dump | nfq | ipq | ipfw
-# <mode> ::= read-file | passive | inline
-# <var> ::= arbitrary <name>=<value passed to DAQ
-# <dir> ::= path as to where to look for DAQ module so's
-
-# Configure specific UID and GID to run snort as after dropping privs. For more information see snort -h command line options
-#
-# config set_gid:
-# config set_uid:
-
-# Configure default snaplen. Snort defaults to MTU of in use interface. For more information see README
-#
-# config snaplen:
-#
-
-# Configure default bpf_file to use for filtering what traffic reaches snort. For more information see snort -h command line options (-F)
-#
-# config bpf_file:
-#
-
-# Configure default log directory for snort to log to.  For more information see snort -h command line options (-l)
-#
-# config logdir:
-
-
-###################################################
-# Step #3: Configure the base detection engine.  For more information, see  README.decode
-###################################################
-
-# Configure PCRE match limitations
-config pcre_match_limit: 3500
-config pcre_match_limit_recursion: 1500
-
-# Configure the detection engine  See the Snort Manual, Configuring Snort - Includes - Config
-config detection: search-method ac-split search-optimize max-pattern-len 20
-
-# Configure the event queue.  For more information, see README.event_queue
-config event_queue: max_queue 8 log 5 order_events content_length
-
-###################################################
-## Configure GTP if it is to be used.
-## For more information, see README.GTP
-####################################################
-
-# config enable_gtp
-
-###################################################
-# Per packet and rule latency enforcement
-# For more information see README.ppm
-###################################################
-
-# Per Packet latency configuration
-#config ppm: max-pkt-time 250, \
-#   fastpath-expensive-packets, \
-#   pkt-log
-
-# Per Rule latency configuration
-#config ppm: max-rule-time 200, \
-#   threshold 3, \
-#   suspend-expensive-rules, \
-#   suspend-timeout 20, \
-#   rule-log alert
-
-###################################################
-# Configure Perf Profiling for debugging
-# For more information see README.PerfProfiling
-###################################################
-
-#config profile_rules: print all, sort avg_ticks
-#config profile_preprocs: print all, sort avg_ticks
-
-###################################################
-# Configure protocol aware flushing
-# For more information see README.stream5
-###################################################
-config paf_max: 16000
-
-###################################################
-# Step #4: Configure dynamic loaded libraries.
-# For more information, see Snort Manual, Configuring Snort - Dynamic Modules
-###################################################
-
-# path to dynamic preprocessor libraries
-dynamicpreprocessor directory /usr/lib64/snort-2.9.8.0_dynamicpreprocessor
-
-# path to base preprocessor engine
-dynamicengine /usr/lib64/snort-2.9.8.0_dynamicengine/libsf_engine.so
-
-# path to dynamic rules libraries
-#dynamicdetection directory /usr/local/lib/snort_dynamicrules
-
-###################################################
-# Step #5: Configure preprocessors
-# For more information, see the Snort Manual, Configuring Snort - Preprocessors
-###################################################
-
-# GTP Control Channle Preprocessor. For more information, see README.GTP
-# preprocessor gtp: ports { 2123 3386 2152 }
-
-# Inline packet normalization. For more information, see README.normalize
-# Does nothing in IDS mode
-preprocessor normalize_ip4
-preprocessor normalize_tcp: ips ecn stream
-preprocessor normalize_icmp4
-preprocessor normalize_ip6
-preprocessor normalize_icmp6
-
-# Target-based IP defragmentation.  For more inforation, see README.frag3
-preprocessor frag3_global: max_frags 65536
-preprocessor frag3_engine: policy windows detect_anomalies overlap_limit 10 min_fragment_length 100 timeout 180
-
-# Target-Based stateful inspection/stream reassembly.  For more inforation, see README.stream5
-preprocessor stream5_global: track_tcp yes, \
-   track_udp yes, \
-   track_icmp no, \
-   max_tcp 262144, \
-   max_udp 131072, \
-   max_active_responses 2, \
-   min_response_seconds 5
-preprocessor stream5_tcp: policy windows, detect_anomalies, require_3whs 180, \
-   overlap_limit 10, small_segments 3 bytes 150, timeout 180, \
-    ports client 21 22 23 25 42 53 70 79 109 110 111 113 119 135 136 137 139 143 \
-        161 445 513 514 587 593 691 1433 1521 1741 2100 3306 6070 6665 6666 6667 6668 6669 \
-        7000 8181 32770 32771 32772 32773 32774 32775 32776 32777 32778 32779, \
-    ports both 36 80 81 82 83 84 85 86 87 88 89 90 110 311 383 443 465 563 555 591 593 631 636 801 808 818 901 972 989 992 993 994 995 1158 1220 1414 1533 1741 1830 1942 2231 2301 2381 2578 2809 2980 3029 3037 3057 3128 3443 3702 4000 4343 4848 5000 5117 5250 5600 5814 6080 6173 6988 7907 7000 7001 7005 7071 7144 7145 7510 7802 7770 7777 7778 7779 \
-        7801 7900 7901 7902 7903 7904 7905 7906 7908 7909 7910 7911 7912 7913 7914 7915 7916 \
-        7917 7918 7919 7920 8000 8001 8008 8014 8015 8020 8028 8040 8080 8081 8082 8085 8088 8090 8118 8123 8180 8181 8182 8222 8243 8280 8300 8333 8344 8400 8443 8500 8509 8787 8800 8888 8899 8983 9000 9002 9060 9080 9090 9091 9111 9290 9443 9447 9710 9788 9999 10000 11371 12601 13014 15489 19980 29991 33300 34412 34443 34444 40007 41080 44449 50000 50002 51423 53331 55252 55555 56712
-preprocessor stream5_udp: timeout 180
-
-# performance statistics.  For more information, see the Snort Manual, Configuring Snort - Preprocessors - Performance Monitor
-# preprocessor perfmonitor: time 300 file /var/snort/snort.stats pktcnt 10000
-
-# HTTP normalization and anomaly detection.  For more information, see README.http_inspect
-preprocessor http_inspect: global iis_unicode_map unicode.map 1252 compress_depth 65535 decompress_depth 65535
-preprocessor http_inspect_server: server default \
-    http_methods { GET POST PUT SEARCH MKCOL COPY MOVE LOCK UNLOCK NOTIFY POLL BCOPY BDELETE BMOVE LINK UNLINK OPTIONS HEAD DELETE TRACE TRACK CONNECT SOURCE SUBSCRIBE UNSUBSCRIBE PROPFIND PROPPATCH BPROPFIND BPROPPATCH RPC_CONNECT PROXY_SUCCESS BITS_POST CCM_POST SMS_POST RPC_IN_DATA RPC_OUT_DATA RPC_ECHO_DATA } \
-    chunk_length 500000 \
-    server_flow_depth 0 \
-    client_flow_depth 0 \
-    post_depth 65495 \
-    oversize_dir_length 500 \
-    max_header_length 750 \
-    max_headers 100 \
-    max_spaces 200 \
-    small_chunk_length { 10 5 } \
-    ports { 36 80 81 82 83 84 85 86 87 88 89 90 311 383 555 591 593 631 801 808 818 901 972 1158 1220 1414 1533 1741 1830 1942 2231 2301 2381 2578 2809 2980 3029 3037 3057 3128 3443 3702 4000 4343 4848 5000 5117 5250 5600 5814 6080 6173 6988 7000 7001 7005 7071 7144 7145 7510 7770 7777 7778 7779 8000 8001 8008 8014 8015 8020 8028 8040 8080 8081 8082 8085 8088 8090 8118 8123 8180 8181 8182 8222 8243 8280 8300 8333 8344 8400 8443 8500 8509 8787 8800 8888 8899 8983 9000 9002 9060 9080 9090 9091 9111 9290 9443 9447 9710 9788 9999 10000 11371 12601 13014 15489 19980 29991 33300 34412 34443 34444 40007 41080 44449 50000 50002 51423 53331 55252 55555 56712 } \
-    non_rfc_char { 0x00 0x01 0x02 0x03 0x04 0x05 0x06 0x07 } \
-    enable_cookie \
-    extended_response_inspection \
-    inspect_gzip \
-    normalize_utf \
-    unlimited_decompress \
-    normalize_javascript \
-    apache_whitespace no \
-    ascii no \
-    bare_byte no \
-    directory no \
-    double_decode no \
-    iis_backslash no \
-    iis_delimiter no \
-    iis_unicode no \
-    multi_slash no \
-    utf_8 no \
-    u_encode yes \
-    webroot no
-
-# ONC-RPC normalization and anomaly detection.  For more information, see the Snort Manual, Configuring Snort - Preprocessors - RPC Decode
-preprocessor rpc_decode: 111 32770 32771 32772 32773 32774 32775 32776 32777 32778 32779 no_alert_multiple_requests no_alert_large_fragments no_alert_incomplete
-
-# Back Orifice detection.
-preprocessor bo
-
-# FTP / Telnet normalization and anomaly detection.  For more information, see README.ftptelnet
-preprocessor ftp_telnet: global inspection_type stateful encrypted_traffic no check_encrypted
-preprocessor ftp_telnet_protocol: telnet \
-    ayt_attack_thresh 20 \
-    normalize ports { 23 } \
-    detect_anomalies
-preprocessor ftp_telnet_protocol: ftp server default \
-    def_max_param_len 100 \
-    ports { 21 2100 3535 } \
-    telnet_cmds yes \
-    ignore_telnet_erase_cmds yes \
-    ftp_cmds { ABOR ACCT ADAT ALLO APPE AUTH CCC CDUP } \
-    ftp_cmds { CEL CLNT CMD CONF CWD DELE ENC EPRT } \
-    ftp_cmds { EPSV ESTA ESTP FEAT HELP LANG LIST LPRT } \
-    ftp_cmds { LPSV MACB MAIL MDTM MIC MKD MLSD MLST } \
-    ftp_cmds { MODE NLST NOOP OPTS PASS PASV PBSZ PORT } \
-    ftp_cmds { PROT PWD QUIT REIN REST RETR RMD RNFR } \
-    ftp_cmds { RNTO SDUP SITE SIZE SMNT STAT STOR STOU } \
-    ftp_cmds { STRU SYST TEST TYPE USER XCUP XCRC XCWD } \
-    ftp_cmds { XMAS XMD5 XMKD XPWD XRCP XRMD XRSQ XSEM } \
-    ftp_cmds { XSEN XSHA1 XSHA256 } \
-    alt_max_param_len 0 { ABOR CCC CDUP ESTA FEAT LPSV NOOP PASV PWD QUIT REIN STOU SYST XCUP XPWD } \
-    alt_max_param_len 200 { ALLO APPE CMD HELP NLST RETR RNFR STOR STOU XMKD } \
-    alt_max_param_len 256 { CWD RNTO } \
-    alt_max_param_len 400 { PORT } \
-    alt_max_param_len 512 { SIZE } \
-    chk_str_fmt { ACCT ADAT ALLO APPE AUTH CEL CLNT CMD } \
-    chk_str_fmt { CONF CWD DELE ENC EPRT EPSV ESTP HELP } \
-    chk_str_fmt { LANG LIST LPRT MACB MAIL MDTM MIC MKD } \
-    chk_str_fmt { MLSD MLST MODE NLST OPTS PASS PBSZ PORT } \
-    chk_str_fmt { PROT REST RETR RMD RNFR RNTO SDUP SITE } \
-    chk_str_fmt { SIZE SMNT STAT STOR STRU TEST TYPE USER } \
-    chk_str_fmt { XCRC XCWD XMAS XMD5 XMKD XRCP XRMD XRSQ } \
-    chk_str_fmt { XSEM XSEN XSHA1 XSHA256 } \
-    cmd_validity ALLO < int [ char R int ] > \
-    cmd_validity EPSV < [ { char 12 | char A char L char L } ] > \
-    cmd_validity MACB < string > \
-    cmd_validity MDTM < [ date nnnnnnnnnnnnnn[.n[n[n]]] ] string > \
-    cmd_validity MODE < char ASBCZ > \
-    cmd_validity PORT < host_port > \
-    cmd_validity PROT < char CSEP > \
-    cmd_validity STRU < char FRPO [ string ] > \
-    cmd_validity TYPE < { char AE [ char NTC ] | char I | char L [ number ] } >
-preprocessor ftp_telnet_protocol: ftp client default \
-    max_resp_len 256 \
-    bounce yes \
-    ignore_telnet_erase_cmds yes \
-    telnet_cmds yes
-
-
-# SMTP normalization and anomaly detection.  For more information, see README.SMTP
-preprocessor smtp: ports { 25 465 587 691 } \
-    inspection_type stateful \
-    b64_decode_depth 0 \
-    qp_decode_depth 0 \
-    bitenc_decode_depth 0 \
-    uu_decode_depth 0 \
-    log_mailfrom \
-    log_rcptto \
-    log_filename \
-    log_email_hdrs \
-    normalize cmds \
-    normalize_cmds { ATRN AUTH BDAT CHUNKING DATA DEBUG EHLO EMAL ESAM ESND ESOM ETRN EVFY } \
-    normalize_cmds { EXPN HELO HELP IDENT MAIL NOOP ONEX QUEU QUIT RCPT RSET SAML SEND SOML } \
-    normalize_cmds { STARTTLS TICK TIME TURN TURNME VERB VRFY X-ADAT X-DRCP X-ERCP X-EXCH50 } \
-    normalize_cmds { X-EXPS X-LINK2STATE XADR XAUTH XCIR XEXCH50 XGEN XLICENSE XQUE XSTA XTRN XUSR } \
-    max_command_line_len 512 \
-    max_header_line_len 1000 \
-    max_response_line_len 512 \
-    alt_max_command_line_len 260 { MAIL } \
-    alt_max_command_line_len 300 { RCPT } \
-    alt_max_command_line_len 500 { HELP HELO ETRN EHLO } \
-    alt_max_command_line_len 255 { EXPN VRFY ATRN SIZE BDAT DEBUG EMAL ESAM ESND ESOM EVFY IDENT NOOP RSET } \
-    alt_max_command_line_len 246 { SEND SAML SOML AUTH TURN ETRN DATA RSET QUIT ONEX QUEU STARTTLS TICK TIME TURNME VERB X-EXPS X-LINK2STATE XADR XAUTH XCIR XEXCH50 XGEN XLICENSE XQUE XSTA XTRN XUSR } \
-    valid_cmds { ATRN AUTH BDAT CHUNKING DATA DEBUG EHLO EMAL ESAM ESND ESOM ETRN EVFY } \
-    valid_cmds { EXPN HELO HELP IDENT MAIL NOOP ONEX QUEU QUIT RCPT RSET SAML SEND SOML } \
-    valid_cmds { STARTTLS TICK TIME TURN TURNME VERB VRFY X-ADAT X-DRCP X-ERCP X-EXCH50 } \
-    valid_cmds { X-EXPS X-LINK2STATE XADR XAUTH XCIR XEXCH50 XGEN XLICENSE XQUE XSTA XTRN XUSR } \
-    xlink2state { enabled }
-
-# Portscan detection.  For more information, see README.sfportscan
-preprocessor sfportscan: proto  { all } memcap { 10000000 } sense_level { low }
-
-# ARP spoof detection.  For more information, see the Snort Manual - Configuring Snort - Preprocessors - ARP Spoof Preprocessor
-# preprocessor arpspoof
-# preprocessor arpspoof_detect_host: 192.168.40.1 f0:0f:00:f0:0f:00
-
-# SSH anomaly detection.  For more information, see README.ssh
-preprocessor ssh: server_ports { 22 } \
-                  autodetect \
-                  max_client_bytes 19600 \
-                  max_encrypted_packets 20 \
-                  max_server_version_len 100 \
-                  enable_respoverflow enable_ssh1crc32 \
-                  enable_srvoverflow enable_protomismatch
-
-# SMB / DCE-RPC normalization and anomaly detection.  For more information, see README.dcerpc2
-preprocessor dcerpc2: memcap 102400, events [co ]
-preprocessor dcerpc2_server: default, policy WinXP, \
-    detect [smb [139,445], tcp 135, udp 135, rpc-over-http-server 593], \
-    autodetect [tcp 1025:, udp 1025:, rpc-over-http-server 1025:], \
-    smb_max_chain 3, smb_invalid_shares ["C$", "D$", "ADMIN$"]
-
-# DNS anomaly detection.  For more information, see README.dns
-preprocessor dns: ports { 53 } enable_rdata_overflow
-
-# SSL anomaly detection and traffic bypass.  For more information, see README.ssl
-preprocessor ssl: ports { 443 465 563 636 989 992 993 994 995 5061 7801 7802 7900 7901 7902 7903 7904 7905 7906 7907 7908 7909 7910 7911 7912 7913 7914 7915 7916 7917 7918 7919 7920 }, trustservers, noinspect_encrypted
-
-# SDF sensitive data preprocessor.  For more information see README.sensitive_data
-preprocessor sensitive_data: alert_threshold 25
-
-# SIP Session Initiation Protocol preprocessor.  For more information see README.sip
-preprocessor sip: max_sessions 40000, \
-   ports { 5060 5061 5600 }, \
-   methods { invite \
-             cancel \
-             ack \
-             bye \
-             register \
-             options \
-             refer \
-             subscribe \
-             update \
-             join \
-             info \
-             message \
-             notify \
-             benotify \
-             do \
-             qauth \
-             sprack \
-             publish \
-             service \
-             unsubscribe \
-             prack }, \
-   max_uri_len 512, \
-   max_call_id_len 80, \
-   max_requestName_len 20, \
-   max_from_len 256, \
-   max_to_len 256, \
-   max_via_len 1024, \
-   max_contact_len 512, \
-   max_content_len 2048
-
-# IMAP preprocessor.  For more information see README.imap
-preprocessor imap: \
-   ports { 143 } \
-   b64_decode_depth 0 \
-   qp_decode_depth 0 \
-   bitenc_decode_depth 0 \
-   uu_decode_depth 0
-
-# POP preprocessor. For more information see README.pop
-preprocessor pop: \
-   ports { 110 } \
-   b64_decode_depth 0 \
-   qp_decode_depth 0 \
-   bitenc_decode_depth 0 \
-   uu_decode_depth 0
-
-# Modbus preprocessor. For more information see README.modbus
-preprocessor modbus: ports { 502 }
-
-# DNP3 preprocessor. For more information see README.dnp3
-preprocessor dnp3: ports { 20000 } \
-   memcap 262144 \
-   check_crc
-
-# Reputation preprocessor. For more information see README.reputation
-preprocessor reputation: \
-   memcap 500, \
-   priority whitelist, \
-   nested_ip inner, \
-   whitelist $WHITE_LIST_PATH/white_list.rules, \
-   blacklist $BLACK_LIST_PATH/black_list.rules
-
-###################################################
-# Step #6: Configure output plugins
-# For more information, see Snort Manual, Configuring Snort - Output Modules
-###################################################
-
-# unified2
-# Recommended for most installs
-# output unified2: filename merged.log, limit 128, nostamp, mpls_event_types, vlan_event_types
-
-# Additional configuration for specific types of installs
-# output alert_unified2: filename snort.alert, limit 128, nostamp
-# output log_unified2: filename snort.log, limit 128, nostamp
-
-# syslog
-# output alert_syslog: LOG_AUTH LOG_ALERT
-
-# pcap
-# output log_tcpdump: tcpdump.log
-
-# metadata reference data.  do not modify these lines
-include classification.config
-include reference.config
-
-
-###################################################
-# Step #7: Customize your rule set
-# For more information, see Snort Manual, Writing Snort Rules
-#
-# NOTE: All categories are enabled in this conf file
-###################################################
-
-include $RULE_PATH/community.rules
-
-# site specific rules
-# include $RULE_PATH/local.rules
-# include $RULE_PATH/app-detect.rules
-# include $RULE_PATH/attack-responses.rules
-# include $RULE_PATH/backdoor.rules
-# include $RULE_PATH/bad-traffic.rules
-# include $RULE_PATH/blacklist.rules
-# include $RULE_PATH/botnet-cnc.rules
-# include $RULE_PATH/browser-chrome.rules
-# include $RULE_PATH/browser-firefox.rules
-# include $RULE_PATH/browser-ie.rules
-# include $RULE_PATH/browser-other.rules
-# include $RULE_PATH/browser-plugins.rules
-# include $RULE_PATH/browser-webkit.rules
-# include $RULE_PATH/chat.rules
-# include $RULE_PATH/content-replace.rules
-# include $RULE_PATH/ddos.rules
-# include $RULE_PATH/dns.rules
-# include $RULE_PATH/dos.rules
-# include $RULE_PATH/experimental.rules
-# include $RULE_PATH/exploit-kit.rules
-# include $RULE_PATH/exploit.rules
-# include $RULE_PATH/file-executable.rules
-# include $RULE_PATH/file-flash.rules
-# include $RULE_PATH/file-identify.rules
-# include $RULE_PATH/file-image.rules
-# include $RULE_PATH/file-java.rules
-# include $RULE_PATH/file-multimedia.rules
-# include $RULE_PATH/file-office.rules
-# include $RULE_PATH/file-other.rules
-# include $RULE_PATH/file-pdf.rules
-# include $RULE_PATH/finger.rules
-# include $RULE_PATH/ftp.rules
-# include $RULE_PATH/icmp-info.rules
-# include $RULE_PATH/icmp.rules
-# include $RULE_PATH/imap.rules
-# include $RULE_PATH/indicator-compromise.rules
-# include $RULE_PATH/indicator-obfuscation.rules
-# include $RULE_PATH/indicator-scan.rules
-# include $RULE_PATH/indicator-shellcode.rules
-# include $RULE_PATH/info.rules
-# include $RULE_PATH/malware-backdoor.rules
-# include $RULE_PATH/malware-cnc.rules
-# include $RULE_PATH/malware-other.rules
-# include $RULE_PATH/malware-tools.rules
-# include $RULE_PATH/misc.rules
-# include $RULE_PATH/multimedia.rules
-# include $RULE_PATH/mysql.rules
-# include $RULE_PATH/netbios.rules
-# include $RULE_PATH/nntp.rules
-# include $RULE_PATH/oracle.rules
-# include $RULE_PATH/os-linux.rules
-# include $RULE_PATH/os-mobile.rules
-# include $RULE_PATH/os-other.rules
-# include $RULE_PATH/os-solaris.rules
-# include $RULE_PATH/os-windows.rules
-# include $RULE_PATH/other-ids.rules
-# include $RULE_PATH/p2p.rules
-# include $RULE_PATH/phishing-spam.rules
-# include $RULE_PATH/policy-multimedia.rules
-# include $RULE_PATH/policy-other.rules
-# include $RULE_PATH/policy.rules
-# include $RULE_PATH/policy-social.rules
-# include $RULE_PATH/policy-spam.rules
-# include $RULE_PATH/pop2.rules
-# include $RULE_PATH/pop3.rules
-# include $RULE_PATH/protocol-dns.rules
-# include $RULE_PATH/protocol-finger.rules
-# include $RULE_PATH/protocol-ftp.rules
-# include $RULE_PATH/protocol-icmp.rules
-# include $RULE_PATH/protocol-imap.rules
-# include $RULE_PATH/protocol-nntp.rules
-# include $RULE_PATH/protocol-other.rules
-# include $RULE_PATH/protocol-pop.rules
-# include $RULE_PATH/protocol-rpc.rules
-# include $RULE_PATH/protocol-scada.rules
-# include $RULE_PATH/protocol-services.rules
-# include $RULE_PATH/protocol-snmp.rules
-# include $RULE_PATH/protocol-telnet.rules
-# include $RULE_PATH/protocol-tftp.rules
-# include $RULE_PATH/protocol-voip.rules
-# include $RULE_PATH/pua-adware.rules
-# include $RULE_PATH/pua-other.rules
-# include $RULE_PATH/pua-p2p.rules
-# include $RULE_PATH/pua-toolbars.rules
-# include $RULE_PATH/rpc.rules
-# include $RULE_PATH/rservices.rules
-# include $RULE_PATH/scada.rules
-# include $RULE_PATH/scan.rules
-# include $RULE_PATH/server-apache.rules
-# include $RULE_PATH/server-iis.rules
-# include $RULE_PATH/server-mail.rules
-# include $RULE_PATH/server-mssql.rules
-# include $RULE_PATH/server-mysql.rules
-# include $RULE_PATH/server-oracle.rules
-# include $RULE_PATH/server-other.rules
-# include $RULE_PATH/server-samba.rules
-# include $RULE_PATH/server-webapp.rules
-# include $RULE_PATH/shellcode.rules
-# include $RULE_PATH/smtp.rules
-# include $RULE_PATH/snmp.rules
-# include $RULE_PATH/specific-threats.rules
-# include $RULE_PATH/spyware-put.rules
-# include $RULE_PATH/sql.rules
-# include $RULE_PATH/telnet.rules
-# include $RULE_PATH/tftp.rules
-# include $RULE_PATH/virus.rules
-# include $RULE_PATH/voip.rules
-# include $RULE_PATH/web-activex.rules
-# include $RULE_PATH/web-attacks.rules
-# include $RULE_PATH/web-cgi.rules
-# include $RULE_PATH/web-client.rules
-# include $RULE_PATH/web-coldfusion.rules
-# include $RULE_PATH/web-frontpage.rules
-# include $RULE_PATH/web-iis.rules
-# include $RULE_PATH/web-misc.rules
-# include $RULE_PATH/web-php.rules
-# include $RULE_PATH/x11.rules
-
-###################################################
-# Step #8: Customize your preprocessor and decoder alerts
-# For more information, see README.decoder_preproc_rules
-###################################################
-
-# decoder and preprocessor event rules
-# include $PREPROC_RULE_PATH/preprocessor.rules
-# include $PREPROC_RULE_PATH/decoder.rules
-# include $PREPROC_RULE_PATH/sensitive-data.rules
-
-###################################################
-# Step #9: Customize your Shared Object Snort Rules
-# For more information, see http://vrt-blog.snort.org/2009/01/using-vrt-certified-shared-object-rules.html
-###################################################
-
-# dynamic library rules
-# include $SO_RULE_PATH/browser-ie.rules
-# include $SO_RULE_PATH/browser-other.rules
-# include $SO_RULE_PATH/exploit-kit.rules
-# include $SO_RULE_PATH/file-flash.rules
-# include $SO_RULE_PATH/file-image.rules
-# include $SO_RULE_PATH/file-java.rules
-# include $SO_RULE_PATH/file-multimedia.rules
-# include $SO_RULE_PATH/file-office.rules
-# include $SO_RULE_PATH/file-other.rules
-# include $SO_RULE_PATH/file-pdf.rules
-# include $SO_RULE_PATH/indicator-shellcode.rules
-# include $SO_RULE_PATH/malware-cnc.rules
-# include $SO_RULE_PATH/malware-other.rules
-# include $SO_RULE_PATH/netbios.rules
-# include $SO_RULE_PATH/os-linux.rules
-# include $SO_RULE_PATH/os-other.rules
-# include $SO_RULE_PATH/os-windows.rules
-# include $SO_RULE_PATH/policy-social.rules
-# include $SO_RULE_PATH/protocol-dns.rules
-# include $SO_RULE_PATH/protocol-nntp.rules
-# include $SO_RULE_PATH/protocol-other.rules
-# include $SO_RULE_PATH/protocol-snmp.rules
-# include $SO_RULE_PATH/protocol-voip.rules
-# include $SO_RULE_PATH/pua-p2p.rules
-# include $SO_RULE_PATH/server-apache.rules
-# include $SO_RULE_PATH/server-iis.rules
-# include $SO_RULE_PATH/server-mail.rules
-# include $SO_RULE_PATH/server-mysql.rules
-# include $SO_RULE_PATH/server-oracle.rules
-# include $SO_RULE_PATH/server-other.rules
-# include $SO_RULE_PATH/server-webapp.rules
-
-# legacy dynamic library rule files
-# include $SO_RULE_PATH/bad-traffic.rules
-# include $SO_RULE_PATH/browser-ie.rules
-# include $SO_RULE_PATH/chat.rules
-# include $SO_RULE_PATH/dos.rules
-# include $SO_RULE_PATH/exploit.rules
-# include $SO_RULE_PATH/file-flash.rules
-# include $SO_RULE_PATH/icmp.rules
-# include $SO_RULE_PATH/imap.rules
-# include $SO_RULE_PATH/misc.rules
-# include $SO_RULE_PATH/multimedia.rules
-# include $SO_RULE_PATH/netbios.rules
-# include $SO_RULE_PATH/nntp.rules
-# include $SO_RULE_PATH/p2p.rules
-# include $SO_RULE_PATH/smtp.rules
-# include $SO_RULE_PATH/snmp.rules
-# include $SO_RULE_PATH/specific-threats.rules
-# include $SO_RULE_PATH/web-activex.rules
-# include $SO_RULE_PATH/web-client.rules
-# include $SO_RULE_PATH/web-iis.rules
-# include $SO_RULE_PATH/web-misc.rules
-
-# Event thresholding or suppression commands. See threshold.conf
-include threshold.conf

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/snort/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/snort/meta/main.yml b/deployment/roles/snort/meta/main.yml
deleted file mode 100644
index f742973..0000000
--- a/deployment/roles/snort/meta/main.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - ambari_gather_facts
-  - epel
-  - libselinux-python
-  - build-tools
-  - kafka-client
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/snort/tasks/daq.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/snort/tasks/daq.yml b/deployment/roles/snort/tasks/daq.yml
deleted file mode 100644
index c8bd4b0..0000000
--- a/deployment/roles/snort/tasks/daq.yml
+++ /dev/null
@@ -1,36 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Download daq
-  get_url:
-    url: "{{ dag_src_url }}"
-    dest: "/tmp/daq-{{ daq_version }}.src.rpm"
-
-- name: Build daq
-  shell: "rpmbuild --rebuild daq-{{ daq_version }}.src.rpm"
-  args:
-    chdir: /tmp
-    creates: /root/rpmbuild/RPMS/x86_64/daq-{{ daq_version }}.x86_64.rpm
-
-- name: Install daq
-  yum:
-    name: /root/rpmbuild/RPMS/x86_64/daq-{{ daq_version }}.x86_64.rpm
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/snort/tasks/flume.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/snort/tasks/flume.yml b/deployment/roles/snort/tasks/flume.yml
deleted file mode 100644
index e5bd593..0000000
--- a/deployment/roles/snort/tasks/flume.yml
+++ /dev/null
@@ -1,31 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Install flume configurations
-  copy: src={{ item.src }} dest={{ item.dest }}
-  with_items:
-    - { src: flume-snort.conf, dest: /etc/flume/conf/flume-snort.conf }
-
-- name: Configure destination for snort alerts
-  lineinfile: dest=/etc/flume/conf/flume-snort.conf regexp={{ item.regexp }} line={{ item.line }}
-  with_items:
-    - { regexp: '^snort\.sinks\.kafka-sink\.brokerList.*$',
-        line: 'snort.sinks.kafka-sink.brokerList = {{ kafka_broker_url }}' }
-    - { regexp: '^snort\.sinks\.kafka-sink\.topic.*$',
-        line: 'snort.sinks.kafka-sink.topic = {{ snort_topic }}'}
-    - { regexp: '^snort.sources.exec-source.command.*$',
-        line: 'snort.sources.exec-source.command = tail -F {{ snort_alert_csv_path }}' }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/snort/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/snort/tasks/main.yml b/deployment/roles/snort/tasks/main.yml
deleted file mode 100644
index 80755be..0000000
--- a/deployment/roles/snort/tasks/main.yml
+++ /dev/null
@@ -1,31 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- include: daq.yml
-
-- include: snort.yml
-
-- include: flume.yml
-
-- name: Turn on promiscuous mode for {{ sniff_interface }}
-  shell: "ip link set {{ sniff_interface }} promisc on"
-
-- name: Start snort
-  service: name=snortd state=restarted
-
-- name: Start flume service to consume snort alerts
-  service: name=flume-agent state=restarted args=snort

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/snort/tasks/snort.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/snort/tasks/snort.yml b/deployment/roles/snort/tasks/snort.yml
deleted file mode 100644
index 6bfecc2..0000000
--- a/deployment/roles/snort/tasks/snort.yml
+++ /dev/null
@@ -1,85 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Download snort
-  get_url:
-    url: "{{ snort_src_url }}"
-    dest: "/tmp/snort-{{ snort_version }}.src.rpm"
-
-- name: Build snort
-  shell: "rpmbuild --rebuild snort-{{ snort_version }}.src.rpm"
-  args:
-    chdir: /tmp
-    creates: /root/rpmbuild/RPMS/x86_64/snort-{{ snort_version }}.x86_64.rpm
-
-- name: Install snort
-  yum:
-    name: /root/rpmbuild/RPMS/x86_64/snort-{{ snort_version }}.x86_64.rpm
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10
-
-- name: Download snort community rules
-  get_url:
-    url: "{{ snort_community_rules_url }}"
-    dest: "/tmp/community-rules.tar.gz"
-
-- name: Extract tarball
-  unarchive:
-    src: "/tmp/community-rules.tar.gz"
-    dest: /tmp
-    copy: no
-    creates: "/tmp/community-rules"
-
-- name: Install snort rules
-  shell: "{{ item }}"
-  args:
-    chdir: /tmp
-  with_items:
-    - cp -r community-rules/community.rules /etc/snort/rules
-    - touch /etc/snort/rules/white_list.rules
-    - touch /etc/snort/rules/black_list.rules
-    - touch /var/log/snort/alerts
-    - chown -R snort:snort /etc/snort
-
-- name: Uncomment all snort community rules
-  shell: sed -i 's/^# alert/alert/' /etc/snort/rules/community.rules
-
-- name: Download snort configuration
-  copy: src=snort.conf dest=/etc/snort/snort.conf
-
-- name: Configure network
-  lineinfile:
-    dest: /etc/snort/snort.conf
-    regexp: "^ipvar HOME_NET.*$"
-    line: "ipvar HOME_NET {{ ansible_eth0.ipv4.address }}"
-
-- name: Configure alerting
-  lineinfile:
-    dest: /etc/snort/snort.conf
-    line: "output alert_csv: {{ snort_alert_csv_path }} default"
-
-- name: Configure sysconfig
-  lineinfile:
-    dest: /etc/sysconfig/snort
-    regexp: "{{ item.regexp }}"
-    line: "{{ item.line }}"
-  with_items:
-    - { regexp: "^ALERTMODE=.*$",     line: "ALERTMODE=" }
-    - { regexp: "^NO_PACKET_LOG=.*$", line: "NO_PACKET_LOG=1" }
-    - { regexp: "^INTERFACE=.*$",     line: "INTERFACE={{ sniff_interface }}" }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/solr/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/solr/defaults/main.yml b/deployment/roles/solr/defaults/main.yml
deleted file mode 100644
index b40d534..0000000
--- a/deployment/roles/solr/defaults/main.yml
+++ /dev/null
@@ -1,29 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-rhel_hdp_utils_install_url: http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6/hdp-util.repo
-solr_install_path: /opt/lucidworks-hdpsearch/solr
-solr_user: solr
-solr_collection_name: Metron
-solr_config_dir: "{{ solr_install_path }}/server/solr/configsets/basic_configs/conf"
-solr_bin_dir: "/opt/lucidworks-hdpsearch/solr/bin"
-solr_config_name: "metron_conf"
-solr_number_shards: "{{ groups['search'] | length }}"
-solr_replication_factor: 1
-solr_autoSoftCommit_maxTime: 60
-solr_cmd: "{{ solr_bin_dir}}/solr create_collection -c  {{ solr_collection_name }} -d {{ solr_config_dir }} -n {{ solr_config_name }} -shards {{ solr_number_shards }} -replicationFactor {{ solr_replication_factor }}"
-hdp_utils_repo_path: /etc/yum.repos.d/HDP-UTILS.repo
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/solr/files/schema.xml
----------------------------------------------------------------------
diff --git a/deployment/roles/solr/files/schema.xml b/deployment/roles/solr/files/schema.xml
deleted file mode 100644
index 43452a2..0000000
--- a/deployment/roles/solr/files/schema.xml
+++ /dev/null
@@ -1,191 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<schema name="metron" version="1.5">
-
-    <field name="_version_" type="long" indexed="true" stored="true"/>
-    <field name="id" type="string" indexed="true" stored="true" required="true" multiValued="false"/>
-    <field name="sensorType" type="string" indexed="true" stored="true" required="true"/>;
-
-    <dynamicField name="*_i" type="int" indexed="true" stored="true"/>
-    <dynamicField name="*_is" type="int" indexed="true" stored="true" multiValued="true"/>
-    <dynamicField name="*_s" type="string" indexed="true" stored="true"/>
-    <dynamicField name="*_ss" type="string" indexed="true" stored="true" multiValued="true"/>
-    <dynamicField name="*_l" type="long" indexed="true" stored="true"/>
-    <dynamicField name="*_ls" type="long" indexed="true" stored="true" multiValued="true"/>
-    <dynamicField name="*_t" type="text_general" indexed="true" stored="true"/>
-    <dynamicField name="*_txt" type="text_general" indexed="true" stored="true" multiValued="true"/>
-    <dynamicField name="*_en" type="text_en" indexed="true" stored="true" multiValued="true"/>
-    <dynamicField name="*_b" type="boolean" indexed="true" stored="true"/>
-    <dynamicField name="*_bs" type="boolean" indexed="true" stored="true" multiValued="true"/>
-    <dynamicField name="*_f" type="float" indexed="true" stored="true"/>
-    <dynamicField name="*_fs" type="float" indexed="true" stored="true" multiValued="true"/>
-    <dynamicField name="*_d" type="double" indexed="true" stored="true"/>
-    <dynamicField name="*_ds" type="double" indexed="true" stored="true" multiValued="true"/>
-    <dynamicField name="*_coordinate" type="tdouble" indexed="true" stored="false"/>
-    <dynamicField name="*_dt" type="date" indexed="true" stored="true"/>
-    <dynamicField name="*_dts" type="date" indexed="true" stored="true" multiValued="true"/>
-    <dynamicField name="*_p" type="location" indexed="true" stored="true"/>
-    <dynamicField name="*_ti" type="tint" indexed="true" stored="true"/>
-    <dynamicField name="*_tl" type="tlong" indexed="true" stored="true"/>
-    <dynamicField name="*_tf" type="tfloat" indexed="true" stored="true"/>
-    <dynamicField name="*_td" type="tdouble" indexed="true" stored="true"/>
-    <dynamicField name="*_tdt" type="tdate" indexed="true" stored="true"/>
-    <dynamicField name="*_c" type="currency" indexed="true" stored="true"/>
-    <dynamicField name="ignored_*" type="ignored" multiValued="true"/>
-    <dynamicField name="attr_*" type="text_general" indexed="true" stored="true" multiValued="true"/>
-    <dynamicField name="random_*" type="random"/>
-
-    <uniqueKey>id</uniqueKey>
-
-    <fieldType name="string" class="solr.StrField" sortMissingLast="true"/>
-    <fieldType name="boolean" class="solr.BoolField" sortMissingLast="true"/>
-    <fieldType name="int" class="solr.TrieIntField" precisionStep="0" positionIncrementGap="0"/>
-    <fieldType name="float" class="solr.TrieFloatField" precisionStep="0" positionIncrementGap="0"/>
-    <fieldType name="long" class="solr.TrieLongField" precisionStep="0" positionIncrementGap="0"/>
-    <fieldType name="double" class="solr.TrieDoubleField" precisionStep="0" positionIncrementGap="0"/>
-    <fieldType name="tint" class="solr.TrieIntField" precisionStep="8" positionIncrementGap="0"/>
-    <fieldType name="tfloat" class="solr.TrieFloatField" precisionStep="8" positionIncrementGap="0"/>
-    <fieldType name="tlong" class="solr.TrieLongField" precisionStep="8" positionIncrementGap="0"/>
-    <fieldType name="tdouble" class="solr.TrieDoubleField" precisionStep="8" positionIncrementGap="0"/>
-    <fieldType name="date" class="solr.TrieDateField" precisionStep="0" positionIncrementGap="0"/>
-    <fieldType name="tdate" class="solr.TrieDateField" precisionStep="6" positionIncrementGap="0"/>
-    <fieldType name="binary" class="solr.BinaryField"/>
-    <fieldType name="random" class="solr.RandomSortField" indexed="true"/>
-    <fieldType name="text_ws" class="solr.TextField" positionIncrementGap="100">
-        <analyzer>
-            <tokenizer class="solr.WhitespaceTokenizerFactory"/>
-        </analyzer>
-    </fieldType>
-    <fieldType name="text_general" class="solr.TextField" positionIncrementGap="100">
-        <analyzer type="index">
-            <tokenizer class="solr.StandardTokenizerFactory"/>
-            <filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>
-            <filter class="solr.LowerCaseFilterFactory"/>
-        </analyzer>
-        <analyzer type="query">
-            <tokenizer class="solr.StandardTokenizerFactory"/>
-            <filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>
-            <filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
-            <filter class="solr.LowerCaseFilterFactory"/>
-        </analyzer>
-    </fieldType>
-    <fieldType name="text_en" class="solr.TextField" positionIncrementGap="100">
-        <analyzer type="index">
-            <tokenizer class="solr.StandardTokenizerFactory"/>
-            <filter class="solr.StopFilterFactory"
-                    ignoreCase="true"
-                    words="lang/stopwords_en.txt"
-            />
-            <filter class="solr.LowerCaseFilterFactory"/>
-            <filter class="solr.EnglishPossessiveFilterFactory"/>
-            <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
-            <filter class="solr.PorterStemFilterFactory"/>
-        </analyzer>
-        <analyzer type="query">
-            <tokenizer class="solr.StandardTokenizerFactory"/>
-            <filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
-            <filter class="solr.StopFilterFactory"
-                    ignoreCase="true"
-                    words="lang/stopwords_en.txt"
-            />
-            <filter class="solr.LowerCaseFilterFactory"/>
-            <filter class="solr.EnglishPossessiveFilterFactory"/>
-            <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
-            <filter class="solr.PorterStemFilterFactory"/>
-        </analyzer>
-    </fieldType>
-    <fieldType name="text_en_splitting" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="true">
-        <analyzer type="index">
-            <tokenizer class="solr.WhitespaceTokenizerFactory"/>
-            <filter class="solr.StopFilterFactory"
-                    ignoreCase="true"
-                    words="lang/stopwords_en.txt"
-            />
-            <filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="1" catenateNumbers="1" catenateAll="0" splitOnCaseChange="1"/>
-            <filter class="solr.LowerCaseFilterFactory"/>
-            <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
-            <filter class="solr.PorterStemFilterFactory"/>
-        </analyzer>
-        <analyzer type="query">
-            <tokenizer class="solr.WhitespaceTokenizerFactory"/>
-            <filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
-            <filter class="solr.StopFilterFactory"
-                    ignoreCase="true"
-                    words="lang/stopwords_en.txt"
-            />
-            <filter class="solr.WordDelimiterFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="0" catenateNumbers="0" catenateAll="0" splitOnCaseChange="1"/>
-            <filter class="solr.LowerCaseFilterFactory"/>
-            <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
-            <filter class="solr.PorterStemFilterFactory"/>
-        </analyzer>
-    </fieldType>
-
-    <fieldType name="text_en_splitting_tight" class="solr.TextField" positionIncrementGap="100" autoGeneratePhraseQueries="true">
-        <analyzer>
-            <tokenizer class="solr.WhitespaceTokenizerFactory"/>
-            <filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="false"/>
-            <filter class="solr.StopFilterFactory" ignoreCase="true" words="lang/stopwords_en.txt"/>
-            <filter class="solr.WordDelimiterFilterFactory" generateWordParts="0" generateNumberParts="0" catenateWords="1" catenateNumbers="1" catenateAll="0"/>
-            <filter class="solr.LowerCaseFilterFactory"/>
-            <filter class="solr.KeywordMarkerFilterFactory" protected="protwords.txt"/>
-            <filter class="solr.EnglishMinimalStemFilterFactory"/>
-            <filter class="solr.RemoveDuplicatesTokenFilterFactory"/>
-        </analyzer>
-    </fieldType>
-    <fieldType name="text_general_rev" class="solr.TextField" positionIncrementGap="100">
-        <analyzer type="index">
-            <tokenizer class="solr.StandardTokenizerFactory"/>
-            <filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>
-            <filter class="solr.LowerCaseFilterFactory"/>
-            <filter class="solr.ReversedWildcardFilterFactory" withOriginal="true"
-                    maxPosAsterisk="3" maxPosQuestion="2" maxFractionAsterisk="0.33"/>
-        </analyzer>
-        <analyzer type="query">
-            <tokenizer class="solr.StandardTokenizerFactory"/>
-            <filter class="solr.SynonymFilterFactory" synonyms="synonyms.txt" ignoreCase="true" expand="true"/>
-            <filter class="solr.StopFilterFactory" ignoreCase="true" words="stopwords.txt"/>
-            <filter class="solr.LowerCaseFilterFactory"/>
-        </analyzer>
-    </fieldType>
-    <fieldType name="alphaOnlySort" class="solr.TextField" sortMissingLast="true" omitNorms="true">
-        <analyzer>
-            <tokenizer class="solr.KeywordTokenizerFactory"/>
-            <filter class="solr.LowerCaseFilterFactory"/>
-            <filter class="solr.TrimFilterFactory"/>
-            <filter class="solr.PatternReplaceFilterFactory"
-                    pattern="([^a-z])" replacement="" replace="all"
-            />
-        </analyzer>
-    </fieldType>
-    <fieldType name="lowercase" class="solr.TextField" positionIncrementGap="100">
-        <analyzer>
-            <tokenizer class="solr.KeywordTokenizerFactory"/>
-            <filter class="solr.LowerCaseFilterFactory"/>
-        </analyzer>
-    </fieldType>
-    <fieldType name="ignored" stored="false" indexed="false" multiValued="true" class="solr.StrField"/>
-    <fieldType name="point" class="solr.PointType" dimension="2" subFieldSuffix="_d"/>
-    <fieldType name="location" class="solr.LatLonType" subFieldSuffix="_coordinate"/>
-    <fieldType name="location_rpt" class="solr.SpatialRecursivePrefixTreeFieldType"
-               geo="true" distErrPct="0.025" maxDistErr="0.001" distanceUnits="kilometers"/>
-    <fieldType name="bbox" class="solr.BBoxField"
-               geo="true" distanceUnits="kilometers" numberType="_bbox_coord"/>
-    <fieldType name="_bbox_coord" class="solr.TrieDoubleField" precisionStep="8" docValues="true" stored="false"/>
-    <fieldType name="currency" class="solr.CurrencyField" precisionStep="8" defaultCurrency="USD" currencyConfig="currency.xml"/>
-</schema>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/solr/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/solr/meta/main.yml b/deployment/roles/solr/meta/main.yml
deleted file mode 100644
index 454dd37..0000000
--- a/deployment/roles/solr/meta/main.yml
+++ /dev/null
@@ -1,21 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - ambari_gather_facts
-  - java_jdk
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/solr/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/solr/tasks/main.yml b/deployment/roles/solr/tasks/main.yml
deleted file mode 100644
index cfbb6b5..0000000
--- a/deployment/roles/solr/tasks/main.yml
+++ /dev/null
@@ -1,74 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Check for Metron jar path
-  stat: path={{ hdp_utils_repo_path }}
-  register: hdp_utils
-
-
-- name: Install HDP-UTILs Repo
-  get_url:
-    url: "{{ rhel_hdp_utils_install_url }}"
-    dest: /etc/yum.repos.d/HDP-UTILS.repo
-  when: hdp_utils.stat.exists == False
-
-- name: Install HDP-UTIL gpg key
-  rpm_key:
-    state: present
-    key: http://pgp.mit.edu/pks/lookup?op=get&search=0xB9733A7A07513CAD
-  when: hdp_utils.stat.exists == False
-
-- name: Install Solr
-  yum:
-    name: lucidworks-hdpsearch
-    state: present
-
-- name: Create solr.xml from template
-  template:
-    src: solr.xml
-    dest: "{{ solr_install_path }}/server/solr"
-    mode: 0644
-    owner: "{{ solr_user }}"
-    group: "{{ solr_user }}"
-
-- name: Copy solrschema.xml to {{ inventory_hostname }}
-  copy:
-    src: schema.xml
-    dest: "{{ solr_config_dir }}"
-    mode: 0644
-    owner: "{{ solr_user }}"
-    group: "{{ solr_user }}"
-
-- name: Create solrconfig.xml from template
-  template:
-    src: solrconfig.xml
-    dest: "{{ solr_config_dir }}"
-    mode: 0644
-    owner: "{{ solr_user }}"
-    group: "{{ solr_user }}"
-
-- name: Start Solr
-  service:
-    name: solr
-    state: restarted
-    enabled: yes
-
-- name: Create Collection {{ solr_collection_name }} with {{ solr_number_shards }} shard(s) and replication factor {{ solr_replication_factor }}
-  shell: "{{ solr_cmd }}"
-  ignore_errors: yes
-  register: result
-  failed_when: result.rc == 1 and result.stderr.find("already exists!") == -1

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/solr/templates/solr.xml
----------------------------------------------------------------------
diff --git a/deployment/roles/solr/templates/solr.xml b/deployment/roles/solr/templates/solr.xml
deleted file mode 100644
index 407df13..0000000
--- a/deployment/roles/solr/templates/solr.xml
+++ /dev/null
@@ -1,52 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<!--
-   This is an example of a simple "solr.xml" file for configuring one or 
-   more Solr Cores, as well as allowing Cores to be added, removed, and 
-   reloaded via HTTP requests.
-
-   More information about options available in this configuration file, 
-   and Solr Core administration can be found online:
-   http://wiki.apache.org/solr/CoreAdmin
--->
-
-<solr>
-
-  <solrcloud>
-
-    <str name="host">${host:}</str>
-    <int name="hostPort">${jetty.port:8983}</int>
-    <str name="hostContext">${hostContext:solr}</str>
-
-    <bool name="genericCoreNodeNames">${genericCoreNodeNames:true}</bool>
-
-    <str name="zkHost">{{ zookeeper_url }}</str>
-    <int name="zkClientTimeout">${zkClientTimeout:30000}</int>
-    <int name="distribUpdateSoTimeout">${distribUpdateSoTimeout:600000}</int>
-    <int name="distribUpdateConnTimeout">${distribUpdateConnTimeout:60000}</int>
-
-  </solrcloud>
-
-  <shardHandlerFactory name="shardHandlerFactory"
-    class="HttpShardHandlerFactory">
-    <int name="socketTimeout">${socketTimeout:600000}</int>
-    <int name="connTimeout">${connTimeout:60000}</int>
-  </shardHandlerFactory>
-
-</solr>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/solr/templates/solrconfig.xml
----------------------------------------------------------------------
diff --git a/deployment/roles/solr/templates/solrconfig.xml b/deployment/roles/solr/templates/solrconfig.xml
deleted file mode 100644
index b00af0f..0000000
--- a/deployment/roles/solr/templates/solrconfig.xml
+++ /dev/null
@@ -1,583 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<!-- 
-     For more details about configurations options that may appear in
-     this file, see http://wiki.apache.org/solr/SolrConfigXml. 
--->
-<config>
-  <!-- In all configuration below, a prefix of "solr." for class names
-       is an alias that causes solr to search appropriate packages,
-       including org.apache.solr.(search|update|request|core|analysis)
-
-       You may also specify a fully qualified Java classname if you
-       have your own custom plugins.
-    -->
-
-  <!-- Controls what version of Lucene various components of Solr
-       adhere to.  Generally, you want to use the latest version to
-       get all bug fixes and improvements. It is highly recommended
-       that you fully re-index after changing this setting as it can
-       affect both how text is indexed and queried.
-  -->
-  <luceneMatchVersion>5.2.1</luceneMatchVersion>
-
-  <!-- Data Directory
-
-       Used to specify an alternate directory to hold all index data
-       other than the default ./data under the Solr home.  If
-       replication is in use, this should match the replication
-       configuration.
-    -->
-  <dataDir>${solr.data.dir:}</dataDir>
-
-
-  <!-- The DirectoryFactory to use for indexes.
-       
-       solr.StandardDirectoryFactory is filesystem
-       based and tries to pick the best implementation for the current
-       JVM and platform.  solr.NRTCachingDirectoryFactory, the default,
-       wraps solr.StandardDirectoryFactory and caches small files in memory
-       for better NRT performance.
-
-       One can force a particular implementation via solr.MMapDirectoryFactory,
-       solr.NIOFSDirectoryFactory, or solr.SimpleFSDirectoryFactory.
-
-       solr.RAMDirectoryFactory is memory based, not
-       persistent, and doesn't work with replication.
-    -->
-  <directoryFactory name="DirectoryFactory" 
-                    class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}">
-  </directoryFactory> 
-
-  <!-- The CodecFactory for defining the format of the inverted index.
-       The default implementation is SchemaCodecFactory, which is the official Lucene
-       index format, but hooks into the schema to provide per-field customization of
-       the postings lists and per-document values in the fieldType element
-       (postingsFormat/docValuesFormat). Note that most of the alternative implementations
-       are experimental, so if you choose to customize the index format, it's a good
-       idea to convert back to the official format e.g. via IndexWriter.addIndexes(IndexReader)
-       before upgrading to a newer version to avoid unnecessary reindexing.
-  -->
-  <codecFactory class="solr.SchemaCodecFactory"/>
-
-  <schemaFactory class="ClassicIndexSchemaFactory"/>
-
-  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-       Index Config - These settings control low-level behavior of indexing
-       Most example settings here show the default value, but are commented
-       out, to more easily see where customizations have been made.
-       
-       Note: This replaces <indexDefaults> and <mainIndex> from older versions
-       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
-  <indexConfig>
-
-    <!-- LockFactory 
-
-         This option specifies which Lucene LockFactory implementation
-         to use.
-      
-         single = SingleInstanceLockFactory - suggested for a
-                  read-only index or when there is no possibility of
-                  another process trying to modify the index.
-         native = NativeFSLockFactory - uses OS native file locking.
-                  Do not use when multiple solr webapps in the same
-                  JVM are attempting to share a single index.
-         simple = SimpleFSLockFactory  - uses a plain file for locking
-
-         Defaults: 'native' is default for Solr3.6 and later, otherwise
-                   'simple' is the default
-
-         More details on the nuances of each LockFactory...
-         http://wiki.apache.org/lucene-java/AvailableLockFactories
-    -->
-    <lockType>${solr.lock.type:native}</lockType>
-
-    <!-- Lucene Infostream
-       
-         To aid in advanced debugging, Lucene provides an "InfoStream"
-         of detailed information when indexing.
-
-         Setting the value to true will instruct the underlying Lucene
-         IndexWriter to write its info stream to solr's log. By default,
-         this is enabled here, and controlled through log4j.properties.
-      -->
-     <infoStream>true</infoStream>
-  </indexConfig>
-
-
-  <!-- JMX
-       
-       This example enables JMX if and only if an existing MBeanServer
-       is found, use this if you want to configure JMX through JVM
-       parameters. Remove this to disable exposing Solr configuration
-       and statistics to JMX.
-
-       For more details see http://wiki.apache.org/solr/SolrJmx
-    -->
-  <jmx />
-  <!-- If you want to connect to a particular server, specify the
-       agentId 
-    -->
-  <!-- <jmx agentId="myAgent" /> -->
-  <!-- If you want to start a new MBeanServer, specify the serviceUrl -->
-  <!-- <jmx serviceUrl="service:jmx:rmi:///jndi/rmi://localhost:9999/solr"/>
-    -->
-
-  <!-- The default high-performance update handler -->
-  <updateHandler class="solr.DirectUpdateHandler2">
-
-    <!-- Enables a transaction log, used for real-time get, durability, and
-         and solr cloud replica recovery.  The log can grow as big as
-         uncommitted changes to the index, so use of a hard autoCommit
-         is recommended (see below).
-         "dir" - the target directory for transaction logs, defaults to the
-                solr data directory.
-         "numVersionBuckets" - sets the number of buckets used to keep
-                track of max version values when checking for re-ordered
-                updates; increase this value to reduce the cost of
-                synchronizing access to version buckets during high-volume
-                indexing, this requires 8 bytes (long) * numVersionBuckets
-                of heap space per Solr core.
-    -->
-    <updateLog>
-      <str name="dir">${solr.ulog.dir:}</str>
-      <int name="numVersionBuckets">${solr.ulog.numVersionBuckets:65536}</int>
-    </updateLog>
- 
-    <!-- AutoCommit
-
-         Perform a hard commit automatically under certain conditions.
-         Instead of enabling autoCommit, consider using "commitWithin"
-         when adding documents. 
-
-         http://wiki.apache.org/solr/UpdateXmlMessages
-
-         maxDocs - Maximum number of documents to add since the last
-                   commit before automatically triggering a new commit.
-
-         maxTime - Maximum amount of time in ms that is allowed to pass
-                   since a document was added before automatically
-                   triggering a new commit. 
-         openSearcher - if false, the commit causes recent index changes
-           to be flushed to stable storage, but does not cause a new
-           searcher to be opened to make those changes visible.
-
-         If the updateLog is enabled, then it's highly recommended to
-         have some sort of hard autoCommit to limit the log size.
-      -->
-     <autoCommit> 
-       <maxTime>${solr.autoCommit.maxTime:15000}</maxTime> 
-       <openSearcher>false</openSearcher> 
-     </autoCommit>
-
-    <!-- softAutoCommit is like autoCommit except it causes a
-         'soft' commit which only ensures that changes are visible
-         but does not ensure that data is synced to disk.  This is
-         faster and more near-realtime friendly than a hard commit.
-      -->
-     <autoSoftCommit>
-       <maxTime>${solr.autoSoftCommit.maxTime:{{ solr_autoSoftCommit_maxTime }}}</maxTime>
-     </autoSoftCommit>
-
-  </updateHandler>
-  
-  <!-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-       Query section - these settings control query time things like caches
-       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -->
-  <query>
-    <!-- Max Boolean Clauses
-
-         Maximum number of clauses in each BooleanQuery,  an exception
-         is thrown if exceeded.
-
-         ** WARNING **
-         
-         This option actually modifies a global Lucene property that
-         will affect all SolrCores.  If multiple solrconfig.xml files
-         disagree on this property, the value at any given moment will
-         be based on the last SolrCore to be initialized.
-         
-      -->
-    <maxBooleanClauses>1024</maxBooleanClauses>
-
-
-    <!-- Solr Internal Query Caches
-
-         There are two implementations of cache available for Solr,
-         LRUCache, based on a synchronized LinkedHashMap, and
-         FastLRUCache, based on a ConcurrentHashMap.  
-
-         FastLRUCache has faster gets and slower puts in single
-         threaded operation and thus is generally faster than LRUCache
-         when the hit ratio of the cache is high (> 75%), and may be
-         faster under other scenarios on multi-cpu systems.
-    -->
-
-    <!-- Filter Cache
-
-         Cache used by SolrIndexSearcher for filters (DocSets),
-         unordered sets of *all* documents that match a query.  When a
-         new searcher is opened, its caches may be prepopulated or
-         "autowarmed" using data from caches in the old searcher.
-         autowarmCount is the number of items to prepopulate.  For
-         LRUCache, the autowarmed items will be the most recently
-         accessed items.
-
-         Parameters:
-           class - the SolrCache implementation LRUCache or
-               (LRUCache or FastLRUCache)
-           size - the maximum number of entries in the cache
-           initialSize - the initial capacity (number of entries) of
-               the cache.  (see java.util.HashMap)
-           autowarmCount - the number of entries to prepopulate from
-               and old cache.  
-      -->
-    <filterCache class="solr.FastLRUCache"
-                 size="512"
-                 initialSize="512"
-                 autowarmCount="0"/>
-
-    <!-- Query Result Cache
-
-        Caches results of searches - ordered lists of document ids
-        (DocList) based on a query, a sort, and the range of documents requested.
-        Additional supported parameter by LRUCache:
-           maxRamMB - the maximum amount of RAM (in MB) that this cache is allowed
-                      to occupy
-     -->
-    <queryResultCache class="solr.LRUCache"
-                     size="512"
-                     initialSize="512"
-                     autowarmCount="0"/>
-   
-    <!-- Document Cache
-
-         Caches Lucene Document objects (the stored fields for each
-         document).  Since Lucene internal document ids are transient,
-         this cache will not be autowarmed.  
-      -->
-    <documentCache class="solr.LRUCache"
-                   size="512"
-                   initialSize="512"
-                   autowarmCount="0"/>
-    
-    <!-- custom cache currently used by block join --> 
-    <cache name="perSegFilter"
-      class="solr.search.LRUCache"
-      size="10"
-      initialSize="0"
-      autowarmCount="10"
-      regenerator="solr.NoOpRegenerator" />
-
-    <!-- Lazy Field Loading
-
-         If true, stored fields that are not requested will be loaded
-         lazily.  This can result in a significant speed improvement
-         if the usual case is to not load all stored fields,
-         especially if the skipped fields are large compressed text
-         fields.
-    -->
-    <enableLazyFieldLoading>true</enableLazyFieldLoading>
-
-   <!-- Result Window Size
-
-        An optimization for use with the queryResultCache.  When a search
-        is requested, a superset of the requested number of document ids
-        are collected.  For example, if a search for a particular query
-        requests matching documents 10 through 19, and queryWindowSize is 50,
-        then documents 0 through 49 will be collected and cached.  Any further
-        requests in that range can be satisfied via the cache.  
-     -->
-   <queryResultWindowSize>20</queryResultWindowSize>
-
-   <!-- Maximum number of documents to cache for any entry in the
-        queryResultCache. 
-     -->
-   <queryResultMaxDocsCached>200</queryResultMaxDocsCached>
-
-    <!-- Use Cold Searcher
-
-         If a search request comes in and there is no current
-         registered searcher, then immediately register the still
-         warming searcher and use it.  If "false" then all requests
-         will block until the first searcher is done warming.
-      -->
-    <useColdSearcher>false</useColdSearcher>
-
-    <!-- Max Warming Searchers
-         
-         Maximum number of searchers that may be warming in the
-         background concurrently.  An error is returned if this limit
-         is exceeded.
-
-         Recommend values of 1-2 for read-only slaves, higher for
-         masters w/o cache warming.
-      -->
-    <maxWarmingSearchers>2</maxWarmingSearchers>
-
-  </query>
-
-
-  <!-- Request Dispatcher
-
-       This section contains instructions for how the SolrDispatchFilter
-       should behave when processing requests for this SolrCore.
-
-       handleSelect is a legacy option that affects the behavior of requests
-       such as /select?qt=XXX
-
-       handleSelect="true" will cause the SolrDispatchFilter to process
-       the request and dispatch the query to a handler specified by the 
-       "qt" param, assuming "/select" isn't already registered.
-
-       handleSelect="false" will cause the SolrDispatchFilter to
-       ignore "/select" requests, resulting in a 404 unless a handler
-       is explicitly registered with the name "/select"
-
-       handleSelect="true" is not recommended for new users, but is the default
-       for backwards compatibility
-    -->
-  <requestDispatcher handleSelect="false" >
-    <!-- Request Parsing
-
-         These settings indicate how Solr Requests may be parsed, and
-         what restrictions may be placed on the ContentStreams from
-         those requests
-
-         enableRemoteStreaming - enables use of the stream.file
-         and stream.url parameters for specifying remote streams.
-
-         multipartUploadLimitInKB - specifies the max size (in KiB) of
-         Multipart File Uploads that Solr will allow in a Request.
-         
-         formdataUploadLimitInKB - specifies the max size (in KiB) of
-         form data (application/x-www-form-urlencoded) sent via
-         POST. You can use POST to pass request parameters not
-         fitting into the URL.
-         
-         addHttpRequestToContext - if set to true, it will instruct
-         the requestParsers to include the original HttpServletRequest
-         object in the context map of the SolrQueryRequest under the 
-         key "httpRequest". It will not be used by any of the existing
-         Solr components, but may be useful when developing custom 
-         plugins.
-         
-         *** WARNING ***
-         The settings below authorize Solr to fetch remote files, You
-         should make sure your system has some authentication before
-         using enableRemoteStreaming="true"
-
-      --> 
-    <requestParsers enableRemoteStreaming="true" 
-                    multipartUploadLimitInKB="2048000"
-                    formdataUploadLimitInKB="2048"
-                    addHttpRequestToContext="false"/>
-
-    <!-- HTTP Caching
-
-         Set HTTP caching related parameters (for proxy caches and clients).
-
-         The options below instruct Solr not to output any HTTP Caching
-         related headers
-      -->
-    <httpCaching never304="true" />
-
-  </requestDispatcher>
-
-  <!-- Request Handlers 
-
-       http://wiki.apache.org/solr/SolrRequestHandler
-
-       Incoming queries will be dispatched to a specific handler by name
-       based on the path specified in the request.
-
-       Legacy behavior: If the request path uses "/select" but no Request
-       Handler has that name, and if handleSelect="true" has been specified in
-       the requestDispatcher, then the Request Handler is dispatched based on
-       the qt parameter.  Handlers without a leading '/' are accessed this way
-       like so: http://host/app/[core/]select?qt=name  If no qt is
-       given, then the requestHandler that declares default="true" will be
-       used or the one named "standard".
-
-       If a Request Handler is declared with startup="lazy", then it will
-       not be initialized until the first request that uses it.
-
-    -->
-  <!-- SearchHandler
-
-       http://wiki.apache.org/solr/SearchHandler
-
-       For processing Search Queries, the primary Request Handler
-       provided with Solr is "SearchHandler" It delegates to a sequent
-       of SearchComponents (see below) and supports distributed
-       queries across multiple shards
-    -->
-  <requestHandler name="/select" class="solr.SearchHandler">
-    <!-- default values for query parameters can be specified, these
-         will be overridden by parameters in the request
-      -->
-     <lst name="defaults">
-       <str name="echoParams">explicit</str>
-       <int name="rows">10</int>
-     </lst>
-
-    </requestHandler>
-
-  <!-- A request handler that returns indented JSON by default -->
-  <requestHandler name="/query" class="solr.SearchHandler">
-     <lst name="defaults">
-       <str name="echoParams">explicit</str>
-       <str name="wt">json</str>
-       <str name="indent">true</str>
-       <str name="df">text</str>
-     </lst>
-  </requestHandler>
-
-  <!--
-    The export request handler is used to export full sorted result sets.
-    Do not change these defaults.
-  -->
-  <requestHandler name="/export" class="solr.SearchHandler">
-    <lst name="invariants">
-      <str name="rq">{!xport}</str>
-      <str name="wt">xsort</str>
-      <str name="distrib">false</str>
-    </lst>
-
-    <arr name="components">
-      <str>query</str>
-    </arr>
-  </requestHandler>
-
-
-  <initParams path="/update/**,/query,/select,/tvrh,/elevate,/spell">
-    <lst name="defaults">
-      <str name="df">text</str>
-    </lst>
-  </initParams>
-
-  <!-- Field Analysis Request Handler
-
-       RequestHandler that provides much the same functionality as
-       analysis.jsp. Provides the ability to specify multiple field
-       types and field names in the same request and outputs
-       index-time and query-time analysis for each of them.
-
-       Request parameters are:
-       analysis.fieldname - field name whose analyzers are to be used
-
-       analysis.fieldtype - field type whose analyzers are to be used
-       analysis.fieldvalue - text for index-time analysis
-       q (or analysis.q) - text for query time analysis
-       analysis.showmatch (true|false) - When set to true and when
-           query analysis is performed, the produced tokens of the
-           field value analysis will be marked as "matched" for every
-           token that is produces by the query analysis
-   -->
-  <requestHandler name="/analysis/field" 
-                  startup="lazy"
-                  class="solr.FieldAnalysisRequestHandler" />
-
-
-  <!-- Document Analysis Handler
-
-       http://wiki.apache.org/solr/AnalysisRequestHandler
-
-       An analysis handler that provides a breakdown of the analysis
-       process of provided documents. This handler expects a (single)
-       content stream with the following format:
-
-       <docs>
-         <doc>
-           <field name="id">1</field>
-           <field name="name">The Name</field>
-           <field name="text">The Text Value</field>
-         </doc>
-         <doc>...</doc>
-         <doc>...</doc>
-         ...
-       </docs>
-
-    Note: Each document must contain a field which serves as the
-    unique key. This key is used in the returned response to associate
-    an analysis breakdown to the analyzed document.
-
-    Like the FieldAnalysisRequestHandler, this handler also supports
-    query analysis by sending either an "analysis.query" or "q"
-    request parameter that holds the query text to be analyzed. It
-    also supports the "analysis.showmatch" parameter which when set to
-    true, all field tokens that match the query tokens will be marked
-    as a "match". 
-  -->
-  <requestHandler name="/analysis/document" 
-                  class="solr.DocumentAnalysisRequestHandler" 
-                  startup="lazy" />
-
-  <!-- Echo the request contents back to the client -->
-  <requestHandler name="/debug/dump" class="solr.DumpRequestHandler" >
-    <lst name="defaults">
-     <str name="echoParams">explicit</str> 
-     <str name="echoHandler">true</str>
-    </lst>
-  </requestHandler>
-  
-
-
-  <!-- Search Components
-
-       Search components are registered to SolrCore and used by 
-       instances of SearchHandler (which can access them by name)
-       
-       By default, the following components are available:
-       
-       <searchComponent name="query"     class="solr.QueryComponent" />
-       <searchComponent name="facet"     class="solr.FacetComponent" />
-       <searchComponent name="mlt"       class="solr.MoreLikeThisComponent" />
-       <searchComponent name="highlight" class="solr.HighlightComponent" />
-       <searchComponent name="stats"     class="solr.StatsComponent" />
-       <searchComponent name="debug"     class="solr.DebugComponent" />
-       
-     -->
-
-  <!-- Terms Component
-
-       http://wiki.apache.org/solr/TermsComponent
-
-       A component to return terms and document frequency of those
-       terms
-    -->
-  <searchComponent name="terms" class="solr.TermsComponent"/>
-
-  <!-- A request handler for demonstrating the terms component -->
-  <requestHandler name="/terms" class="solr.SearchHandler" startup="lazy">
-     <lst name="defaults">
-      <bool name="terms">true</bool>
-      <bool name="distrib">false</bool>
-    </lst>     
-    <arr name="components">
-      <str>terms</str>
-    </arr>
-  </requestHandler>
-
-  <!-- Legacy config for the admin interface -->
-  <admin>
-    <defaultQuery>*:*</defaultQuery>
-  </admin>
-
-</config>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/tap_interface/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/tap_interface/defaults/main.yml b/deployment/roles/tap_interface/defaults/main.yml
deleted file mode 100644
index ca752b4..0000000
--- a/deployment/roles/tap_interface/defaults/main.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-tap_if: tap0
-tap_ip: 10.0.0.1

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/tap_interface/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/tap_interface/tasks/main.yml b/deployment/roles/tap_interface/tasks/main.yml
deleted file mode 100644
index 1de3abe..0000000
--- a/deployment/roles/tap_interface/tasks/main.yml
+++ /dev/null
@@ -1,35 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Install tunctl
-  yum:
-    name: tunctl
-    state: installed
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10
-
-- name: Create {{ tap_if }}
-  command: tunctl -p
-
-- name: Bring up {{ tap_if }} on {{ tap_ip }}
-  command: ifconfig {{ tap_if }} {{ tap_ip }} up
-
-- name:  Put {{ tap_if }} in PROMISC
-  command: ip link set {{ tap_if }} promisc on
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/yaf/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/yaf/defaults/main.yml b/deployment/roles/yaf/defaults/main.yml
deleted file mode 100644
index d0b53c3..0000000
--- a/deployment/roles/yaf/defaults/main.yml
+++ /dev/null
@@ -1,30 +0,0 @@
-#
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-fixbuf_version: 1.7.1
-yaf_version: 2.8.0
-yaf_home: /opt/yaf
-yaf_topic: yaf
-hdp_repo_def: http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.3.2.0/hdp.repo
-yaf_bin: /usr/local/bin/yaf
-yafscii_bin: /usr/local/bin/yafscii
-yaf_log: /var/log/yaf.log
-kafka_prod: /usr/hdp/current/kafka-broker/bin/kafka-console-producer.sh
-daemon_bin: /usr/local/bin/airdaemon
-yaf_start: /opt/yaf/start-yaf.sh
-yaf_args:



[47/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/mysql_server/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/mysql_server/tasks/main.yml b/deployment/roles/mysql_server/tasks/main.yml
deleted file mode 100644
index a484ed0..0000000
--- a/deployment/roles/mysql_server/tasks/main.yml
+++ /dev/null
@@ -1,93 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Create temporary directories
-  file:
-    path: "/tmp/geoip"
-    state: directory
-    mode: 0755
-
-- name: Install Mysql Community Release Repo Def
-  get_url:
-    dest: /tmp/{{ mysql_rpm_version }}.rpm
-    url: "{{ mysql_yum_repo_url }}"
-
-- name: Install Mysql Community Release Repo
-  yum:
-    pkg: /tmp/{{ mysql_rpm_version }}.rpm
-    state: installed
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10
-
-- name: Install MySQL
-  yum:
-    name: "{{ item }}"
-    state: latest
-  with_items:
-    - "mysql-community-server"
-    - "MySQL-python"
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10
-
-- name: Start MySQL
-  service:
-    name: mysqld
-    state: started
-    enabled: yes
-
-- name: Retrieve temporary root password
-  shell: "grep 'temporary password' /var/log/mysqld.log | sed 's/.*root@localhost: //'"
-  args:
-    creates: ~/.my.cnf
-  register: temp_root_password
-
-- name: Update mysql root password
-  command: "mysqladmin --user=root --password='{{ temp_root_password.stdout }}' password '{{ mysql_root_password }}'"
-  ignore_errors: yes
-  args:
-    creates: ~/.my.cnf
-
-- name: Create .my.cnf
-  template:
-    src: "../roles/mysql_server/templates/.my.cnf"
-    dest: ~/.my.cnf
-
-
-- name: Download GeoIP databases
-  unarchive:
-    src:  http://geolite.maxmind.com/download/geoip/database/GeoLiteCity_CSV/GeoLiteCity-latest.tar.xz
-    dest: /tmp/geoip
-    copy: no
-    creates: /tmp/geopip/*/GeoLiteCity-Blocks.csv
-
-- name: Copy to MySQL import directory
-  shell: "cp /tmp/geoip/*/*.csv /var/lib/mysql-files/"
-
-- name: Copy DDL
-  copy:
-    src: geoip_ddl.sql
-    dest: /tmp/geoip_ddl.sql
-
-- name: Import GeoIP DDL
-  mysql_db:
-    name: all
-    state: import
-    target: /tmp/geoip_ddl.sql

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/mysql_server/templates/.my.cnf
----------------------------------------------------------------------
diff --git a/deployment/roles/mysql_server/templates/.my.cnf b/deployment/roles/mysql_server/templates/.my.cnf
deleted file mode 100644
index d5c0825..0000000
--- a/deployment/roles/mysql_server/templates/.my.cnf
+++ /dev/null
@@ -1,20 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-[client]
-user=root
-password={{ mysql_root_password }}
-host=localhost
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/ntp/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/ntp/tasks/main.yml b/deployment/roles/ntp/tasks/main.yml
deleted file mode 100644
index 7b1b9a8..0000000
--- a/deployment/roles/ntp/tasks/main.yml
+++ /dev/null
@@ -1,31 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Install ntp
-  yum:
-    name: ntp
-    state: present
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10
-
-- name: Ensure ntp is running and enabled
-  service:
-    name: ntpd
-    state: started
-    enabled: yes

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/packet-capture/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/packet-capture/defaults/main.yml b/deployment/roles/packet-capture/defaults/main.yml
deleted file mode 100644
index 3e6358c..0000000
--- a/deployment/roles/packet-capture/defaults/main.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-# dpdk
-dpdk_home: "/usr/local/dpdk"
-dpdk_version: "2.2.0"
-dpdk_sdk: "/root/dpdk-{{ dpdk_version }}"
-dpdk_target: "x86_64-native-linuxapp-gcc"
-num_huge_pages: 512
-extra_cflags: -g
-
-# pcapture
-pcapture_work_dir: /root/packet-capture
-pcapture_prefix: /usr/local/bin
-pcapture_ld_library_path: /usr/local/lib
-pcapture_portmask: 0x01
-pcapture_kafka_config: /etc/pcapture.conf
-pcapture_bin: pcapture

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/packet-capture/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/packet-capture/meta/main.yml b/deployment/roles/packet-capture/meta/main.yml
deleted file mode 100644
index d253e88..0000000
--- a/deployment/roles/packet-capture/meta/main.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - librdkafka

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/packet-capture/tasks/debug.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/packet-capture/tasks/debug.yml b/deployment/roles/packet-capture/tasks/debug.yml
deleted file mode 100644
index 06f1526..0000000
--- a/deployment/roles/packet-capture/tasks/debug.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-  - name: Install debug utilities
-    yum: name=yum-utils
-    tags:
-      - debug
-
-  - name: Install debug symbols
-    shell: debuginfo-install -y glibc glib2 zlib
-    tags:
-      - debug

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/packet-capture/tasks/dependencies.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/packet-capture/tasks/dependencies.yml b/deployment/roles/packet-capture/tasks/dependencies.yml
deleted file mode 100644
index 4d6edc4..0000000
--- a/deployment/roles/packet-capture/tasks/dependencies.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-  - name: Install dependencies
-    yum: name={{ item }}
-    with_items:
-      - "@Development tools"
-      - pciutils
-      - net-tools
-      - glib2
-      - glib2-devel
-      - git
-
-  #
-  # install prerequisite packages and the latest kernel headers.  need to
-  # ensure that the kernel headers match the current running kernel version.
-  # if this is not the case, the DPDK build process will fail
-  #
-  - name: Install latest kernel headers and source
-    yum: name={{ item }} state=latest
-    with_items:
-      - kernel
-      - kernel-devel
-      - kernel-headers

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/packet-capture/tasks/dpdk.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/packet-capture/tasks/dpdk.yml b/deployment/roles/packet-capture/tasks/dpdk.yml
deleted file mode 100644
index 3780be7..0000000
--- a/deployment/roles/packet-capture/tasks/dpdk.yml
+++ /dev/null
@@ -1,59 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-  - name: "Download DPDK version {{ dpdk_version }}"
-    unarchive:
-      src: "http://dpdk.org/browse/dpdk/snapshot/dpdk-{{ dpdk_version }}.tar.gz"
-      dest: "/root"
-      creates: "{{ dpdk_sdk }}"
-      copy: no
-
-  - name: "Configure DPDK for the target environment: {{ dpdk_target }}"
-    shell: "make config T={{ dpdk_target }} DESTDIR={{ dpdk_home }}"
-    args:
-      chdir: "{{ dpdk_sdk }}"
-      creates: "{{ dpdk_home }}"
-
-  - name: "Turn on debug flags"
-    lineinfile:
-      dest: "{{ dpdk_sdk }}/config/common_linuxapp"
-      regexp: 'DEBUG=n'
-      line: 'DEBUG=y'
-    tags:
-      - debug
-
-  - name: "Build DPDK for the target environment: {{ dpdk_target }}"
-    shell: "make install T={{ dpdk_target }} DESTDIR={{ dpdk_home }} EXTRA_CFLAGS={{ extra_cflags }}"
-    args:
-      chdir: "{{ dpdk_sdk }}"
-      creates: "{{ dpdk_home }}"
-
-  - name: Load kernel modules to enable userspace IO
-    shell: "{{ item }}"
-    with_items:
-      - modprobe uio_pci_generic
-      - modprobe vfio-pci
-
-  - name: Bind the device to the loaded kernel module(s)
-    shell: "{{ dpdk_home }}/sbin/dpdk_nic_bind --force --bind=uio_pci_generic {{ item }}"
-    with_items: "{{ dpdk_device }}"
-
-  - name: Set useful environment variables
-    lineinfile: "dest=/root/.bash_profile line={{ item }}"
-    with_items:
-      - "export RTE_SDK={{ dpdk_sdk }}"
-      - "export RTE_TARGET={{ dpdk_target }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/packet-capture/tasks/kernel.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/packet-capture/tasks/kernel.yml b/deployment/roles/packet-capture/tasks/kernel.yml
deleted file mode 100644
index cd4abe6..0000000
--- a/deployment/roles/packet-capture/tasks/kernel.yml
+++ /dev/null
@@ -1,51 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-#
-# DPDK requires specific kernel boot parameters.  set the params and reboot
-# the host, if the actual params differ from what is expected.
-#
----
-  - set_fact:
-      expected_kernel_params: "default_hugepagesz=1G hugepagesz=1G hugepages={{ num_huge_pages }} iommu=pt intel_iommu=on"
-
-  - name: Check kernel boot parameters
-    shell: "cat /proc/cmdline"
-    register: actual_kernel_params
-
-  - name: Alter kernel boot parameters
-    lineinfile:
-      dest: /etc/default/grub
-      regexp:  '^(GRUB_CMDLINE_LINUX=\"[^\"]+)\"$'
-      line: '\1 {{ expected_kernel_params }}"'
-      backrefs: yes
-    when: not expected_kernel_params in actual_kernel_params.stdout
-
-  - name: Update grub with kernel boot parameters
-    shell: /sbin/grub2-mkconfig -o /boot/grub2/grub.cfg
-    when: not expected_kernel_params in actual_kernel_params.stdout
-
-  - name: Restart for modified kernel params
-    command: shutdown -r now "modified kernel params"
-    async: 0
-    poll: 0
-    ignore_errors: true
-    when: not expected_kernel_params in actual_kernel_params.stdout
-    
-  - name: Wait for reboot of '{{ inventory_hostname }}'
-    local_action: wait_for host={{ inventory_hostname }} state=started port=22 timeout=300 delay=10
-    become: false
-    when: not expected_kernel_params in actual_kernel_params.stdout

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/packet-capture/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/packet-capture/tasks/main.yml b/deployment/roles/packet-capture/tasks/main.yml
deleted file mode 100644
index f096178..0000000
--- a/deployment/roles/packet-capture/tasks/main.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-  - include: dependencies.yml
-  - include: kernel.yml
-  - include: dpdk.yml
-  - include: pcapture.yml
-  - include: debug.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/packet-capture/tasks/pcapture.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/packet-capture/tasks/pcapture.yml b/deployment/roles/packet-capture/tasks/pcapture.yml
deleted file mode 100644
index d00d379..0000000
--- a/deployment/roles/packet-capture/tasks/pcapture.yml
+++ /dev/null
@@ -1,49 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Distribute pcapture
-  copy: src=../../../metron-sensors/packet-capture dest={{ pcapture_work_dir | dirname }} mode=0755
-
-- name: Build pcapture
-  shell: "{{ item }}"
-  args:
-    chdir: "{{ pcapture_work_dir }}"
-  with_items:
-    - make
-  environment:
-    RTE_SDK: "{{ dpdk_sdk }}"
-    RTE_TARGET: "{{ dpdk_target }}"
-    LD_LIBRARY_PATH: "{{ pcapture_ld_library_path }}"
-
-- name: Install pcapture
-  shell: "cp {{ pcapture_work_dir }}/src/build/app/{{ pcapture_bin }} {{ pcapture_prefix }}"
-  args:
-    chdir: "{{ pcapture_work_dir }}"
-    creates: "{{ pcapture_prefix }}/{{ pcapture_bin }}"
-
-- name: Deploy configuration
-  template: src=pcapture.conf dest={{ pcapture_kafka_config }} mode=0755
-
-- name: Deploy service
-  template: src=pcapture dest=/etc/init.d/ mode=0755
-
-- name: Register the service with systemd
-  shell: systemctl enable pcapture
-  when: ansible_distribution == "CentOS" and ansible_distribution_major_version == "7"
-
-- name: Run pcapture
-  service: name=pcapture state=restarted

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/packet-capture/templates/pcapture
----------------------------------------------------------------------
diff --git a/deployment/roles/packet-capture/templates/pcapture b/deployment/roles/packet-capture/templates/pcapture
deleted file mode 100644
index 8c2221a..0000000
--- a/deployment/roles/packet-capture/templates/pcapture
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/usr/bin/env bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# pcapture daemon
-# chkconfig: 345 20 80
-# description: Packet capture probe
-# processname: pcapture
-#
-
-export RTE_SDK="{{ dpdk_sdk }}"
-export RTE_TARGET="{{ dpdk_target }}"
-export LD_LIBRARY_PATH="{{ pcapture_ld_library_path }}"
-
-DAEMON_PATH="{{ dpdk_sdk }}"
-DAEMON="{{ pcapture_prefix }}/{{ pcapture_bin }}"
-DAEMONOPTS+=" -- "
-DAEMONOPTS+="-p {{ pcapture_portmask }} "
-DAEMONOPTS+="-t {{ pcapture_topic }} "
-DAEMONOPTS+="-c {{ pcapture_kafka_config }} "
-
-NAME="pcapture"
-DESC="Metron network packet capture probe"
-PIDFILE=/var/run/$NAME.pid
-SCRIPTNAME=/etc/init.d/$NAME
-DAEMONLOG=/var/log/$NAME.log
-NOW=`date`
-
-case "$1" in
-  start)
-    printf "%-50s" "Starting $NAME..."
-    echo "$NOW:  Starting $NAME..." >> $DAEMONLOG
-    cd $DAEMON_PATH
-    PID=`$DAEMON $DAEMONOPTS >> $DAEMONLOG 2>&1 & echo $!`
-    if [ -z $PID ]; then
-        printf "%s\n" "Fail"
-    else
-        echo $PID > $PIDFILE
-        printf "%s\n" "Ok"
-    fi
-  ;;
-
-  status)
-    printf "%-50s" "Checking $NAME..."
-    if [ -f $PIDFILE ]; then
-      PID=`cat $PIDFILE`
-      if [ -z "`ps axf | grep ${PID} | grep -v grep`" ]; then
-        printf "%s\n" "Process dead but pidfile exists"
-      else
-        echo "Running"
-      fi
-    else
-      printf "%s\n" "Service not running"
-    fi
-  ;;
-
-  stop)
-    printf "%-50s" "Stopping $NAME"
-    PID=`cat $PIDFILE`
-    cd $DAEMON_PATH
-    if [ -f $PIDFILE ]; then
-        echo "$NOW:  Stopping $NAME with pid=$PID" >> $DAEMONLOG
-        kill -HUP $PID
-        printf "%s\n" "Ok"
-        rm -f $PIDFILE
-    else
-        printf "%s\n" "pidfile not found"
-    fi
-  ;;
-
-  restart)
-    $0 stop
-    $0 start
-  ;;
-
-  *)
-    echo "Usage: $0 {status|start|stop|restart}"
-    exit 1
-esac

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/packet-capture/templates/pcapture.conf
----------------------------------------------------------------------
diff --git a/deployment/roles/packet-capture/templates/pcapture.conf b/deployment/roles/packet-capture/templates/pcapture.conf
deleted file mode 100644
index e404476..0000000
--- a/deployment/roles/packet-capture/templates/pcapture.conf
+++ /dev/null
@@ -1,67 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-#
-# kafka global settings
-#
-[kafka-global]
-
-# initial list of kafka brokers
-metadata.broker.list = {{ kafka_broker_url }}
-
-# identifies the client to kafka
-client.id = metron-packet-capture
-
-# max number of messages allowed on the producer queue
-queue.buffering.max.messages = 1000
-
-# maximum time, in milliseconds, for buffering data on the producer queue
-queue.buffering.max.ms = 3000
-
-# compression codec = none, gzip or snappy
-compression.codec = snappy
-
-# maximum number of messages batched in one MessageSet (increase for better compression)
-batch.num.messages = 10
-
-# max times to retry sending a failed message set
-message.send.max.retries = 5
-
-# backoff time before retrying a message send
-retry.backoff.ms = 250
-
-# how often statistics are emitted; 0 = never
-statistics.interval.ms = 0
-
-# only provide delivery reports for failed messages
-delivery.report.only.error = false
-
-#
-# kafka topic settings
-#
-[kafka-topic]
-
-# broker acks { 1 = leader ack, 0 = no acks, -1 = in sync replica ack }
-request.required.acks = 1
-
-# local message timeout. This value is only enforced locally and limits the time a
-# produced message waits for successful delivery. A time of 0 is infinite.
-message.timeout.ms = 10000
-
-# report offset of produced message back to application. The application must be
-# use the dr_msg_cb to retrieve the offset from rd_kafka_message_t.offset
-produce.offset.report = false

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/pcap_replay/README.md
----------------------------------------------------------------------
diff --git a/deployment/roles/pcap_replay/README.md b/deployment/roles/pcap_replay/README.md
deleted file mode 100644
index 8bc92c9..0000000
--- a/deployment/roles/pcap_replay/README.md
+++ /dev/null
@@ -1,44 +0,0 @@
-Pcap Replay
-===========
-
-This project enables packet capture data to be replayed through a network interface to simulate live network traffic.  This can be used to support functional, performance, and load testing of Apache Metron.
-
-Getting Started
----------------
-
-To replay packet capture data, simply start the `pcap-replay` SysV service.  To do this run the following command.
-
-```
-service pcap-replay start
-```
-
-All additional options accepted by `tcpreplay` can be passed to the service script to modify how the network data is replayed.  For example, this makes it simple to control the amount and rate of data replayed during functional, performance and load testing.
-
-Example: Replay data at a rate of 10 mbps.
-
-```
-service pcap-replay start --mbps 10
-```
-
-Example: Replay data at a rate of 10 packets per second.
-
-```
-service pcap-replay start --pps 10
-```
-
-All nodes on the same subnet with their network interface set to promiscuous mode will then be able to capture the network traffic being replayed.  To validate, simply run something like the following.
-
-```
-tcpdump -i eth1
-```
-
-Data
-----
-
-An example packet capture file has been installed at `/opt/pcap-replay/example.pcap`.  By default, the network traffic contained within this file is continually replayed.   
-
-To replay your own packet capture data, simply add any number of files containing `libpcap` formatted packet capture data to `/opt/pcap-replay`.  The files must end with the `.pcap` extension.  To pick up newly installed files, simply restart the service.
-
-```
-service pcap-replay restart
-```

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/pcap_replay/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/pcap_replay/defaults/main.yml b/deployment/roles/pcap_replay/defaults/main.yml
deleted file mode 100644
index b1fae1e..0000000
--- a/deployment/roles/pcap_replay/defaults/main.yml
+++ /dev/null
@@ -1,21 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-pcap_replay_interface: eth0
-pcap_path: /opt/pcap-replay
-tcpreplay_version: 4.1.1
-tcpreplay_prefix: /opt

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/pcap_replay/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/pcap_replay/meta/main.yml b/deployment/roles/pcap_replay/meta/main.yml
deleted file mode 100644
index 0c47853..0000000
--- a/deployment/roles/pcap_replay/meta/main.yml
+++ /dev/null
@@ -1,21 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - libselinux-python
-  - build-tools
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/pcap_replay/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/pcap_replay/tasks/main.yml b/deployment/roles/pcap_replay/tasks/main.yml
deleted file mode 100644
index 06919ed..0000000
--- a/deployment/roles/pcap_replay/tasks/main.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- include: tcpreplay.yml
-- include: service.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/pcap_replay/tasks/service.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/pcap_replay/tasks/service.yml b/deployment/roles/pcap_replay/tasks/service.yml
deleted file mode 100644
index 9e13e7f..0000000
--- a/deployment/roles/pcap_replay/tasks/service.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Create pcap directory
-  file: path={{ pcap_path }} state=directory mode=0755
-
-- name: Install init.d service script
-  template: src=pcap-replay dest=/etc/init.d/pcap-replay mode=0755

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/pcap_replay/tasks/tcpreplay.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/pcap_replay/tasks/tcpreplay.yml b/deployment/roles/pcap_replay/tasks/tcpreplay.yml
deleted file mode 100644
index e24dcf1..0000000
--- a/deployment/roles/pcap_replay/tasks/tcpreplay.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Download tcpreplay
-  get_url:
-    url: "https://github.com/appneta/tcpreplay/releases/download/v{{ tcpreplay_version }}/tcpreplay-{{ tcpreplay_version }}.tar.gz"
-    dest: "/tmp/tcpreplay-{{ tcpreplay_version }}.tar.gz"
-
-- name: Extract tcpreplay tarball
-  unarchive:
-    src: "/tmp/tcpreplay-{{ tcpreplay_version }}.tar.gz"
-    dest: /opt
-    copy: no
-    creates: "/opt/tcpreplay-{{ tcpreplay_version }}"
-
-- name: Compile and install tcpreplay
-  shell: "{{ item }}"
-  args:
-    chdir: "/opt/tcpreplay-{{ tcpreplay_version }}"
-    creates: "{{ tcpreplay_prefix }}/bin/tcpreplay"
-  with_items:
-    - "./configure --prefix={{ tcpreplay_prefix }}"
-    - make
-    - make install

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/pcap_replay/templates/pcap-replay
----------------------------------------------------------------------
diff --git a/deployment/roles/pcap_replay/templates/pcap-replay b/deployment/roles/pcap_replay/templates/pcap-replay
deleted file mode 100644
index b9ae0c3..0000000
--- a/deployment/roles/pcap_replay/templates/pcap-replay
+++ /dev/null
@@ -1,92 +0,0 @@
-#!/usr/bin/env bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# pcap replay daemon
-# chkconfig: 345 20 80
-# description: Replays packet capture data stored in libpcap format
-# processname: pcap-replay
-#
-
-DAEMON_PATH="{{ pcap_path }}"
-PCAPIN=`ls $DAEMON_PATH/*.pcap 2> /dev/null`
-IFACE="{{ pcap_replay_interface }}"
-EXTRA_ARGS="${@:2}"
-DAEMON="{{ tcpreplay_prefix }}/bin/tcpreplay"
-DAEMONOPTS="--intf1=$IFACE --loop=0 $EXTRA_ARGS $PCAPIN"
-
-NAME=pcap-replay
-DESC="Replay packet capture data"
-PIDFILE=/var/run/$NAME.pid
-SCRIPTNAME=/etc/init.d/$NAME
-
-case "$1" in
-  start)
-    printf "%-50s" "Starting $NAME..."
-
-    # ensure that a pcap file exists to replay
-    if [ -z "$PCAPIN" ]; then
-      printf "%s: %s\n" "Fail: No pcap files found at " $DAEMON_PATH
-    else
-      # kick-off the daemon
-      cd $DAEMON_PATH
-      PID=`$DAEMON $DAEMONOPTS > /dev/null 2>&1 & echo $!`
-      if [ -z $PID ]; then
-          printf "%s\n" "Fail"
-      else
-          echo $PID > $PIDFILE
-          printf "%s\n" "Ok"
-      fi
-    fi
-  ;;
-
-  status)
-    printf "%-50s" "Checking $NAME..."
-    if [ -f $PIDFILE ]; then
-      PID=`cat $PIDFILE`
-      if [ -z "`ps axf | grep ${PID} | grep -v grep`" ]; then
-        printf "%s\n" "Process dead but pidfile exists"
-      else
-        echo "Running"
-      fi
-    else
-      printf "%s\n" "Service not running"
-    fi
-  ;;
-
-  stop)
-    printf "%-50s" "Stopping $NAME"
-    PID=`cat $PIDFILE`
-    cd $DAEMON_PATH
-    if [ -f $PIDFILE ]; then
-        kill -HUP $PID
-        printf "%s\n" "Ok"
-        rm -f $PIDFILE
-    else
-        printf "%s\n" "pidfile not found"
-    fi
-  ;;
-
-  restart)
-    $0 stop
-    $0 start
-  ;;
-
-  *)
-    echo "Usage: $0 {status|start|stop|restart}"
-    exit 1
-esac

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/pycapa/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/pycapa/meta/main.yml b/deployment/roles/pycapa/meta/main.yml
deleted file mode 100644
index 3aaa18d..0000000
--- a/deployment/roles/pycapa/meta/main.yml
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - ambari_gather_facts
-  - epel
-  - python-pip
-  - kafka-client

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/pycapa/tasks/dependencies.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/pycapa/tasks/dependencies.yml b/deployment/roles/pycapa/tasks/dependencies.yml
deleted file mode 100644
index 19efdcd..0000000
--- a/deployment/roles/pycapa/tasks/dependencies.yml
+++ /dev/null
@@ -1,34 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Install epel-release repository
-  yum: name=epel-release
-
-- name: Install python and tcpdump
-  yum:
-    name: "{{item}}"
-  with_items:
-    - python
-    - tcpdump
-    - git
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10
-
-- name: Install pip
-  easy_install: name=pip

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/pycapa/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/pycapa/tasks/main.yml b/deployment/roles/pycapa/tasks/main.yml
deleted file mode 100644
index 76bdc1c..0000000
--- a/deployment/roles/pycapa/tasks/main.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- include: dependencies.yml
-- include: pycapa.yml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/pycapa/tasks/pycapa.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/pycapa/tasks/pycapa.yml b/deployment/roles/pycapa/tasks/pycapa.yml
deleted file mode 100644
index 4d1a64e..0000000
--- a/deployment/roles/pycapa/tasks/pycapa.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Clone pycapa repo
-  git: repo={{ pycapa_repo }} dest={{ pycapa_home }}
-
-- name: Build pycapa
-  shell: "{{ item }}"
-  args:
-    chdir: "{{ pycapa_home }}"
-  with_items:
-    - pip install -r requirements.txt
-    - pip install argparse
-    - python setup.py install
-
-- name: Turn on promiscuous mode for {{ pycapa_sniff_interface }}
-  shell: "ip link set {{ pycapa_sniff_interface }} promisc on"
-
-- name: Install service script
-  template: src=pycapa dest=/etc/init.d/pycapa mode=0755
-
-- name: Start pycapa
-  service: name=pycapa state=restarted

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/pycapa/templates/pycapa
----------------------------------------------------------------------
diff --git a/deployment/roles/pycapa/templates/pycapa b/deployment/roles/pycapa/templates/pycapa
deleted file mode 100644
index fffa13f..0000000
--- a/deployment/roles/pycapa/templates/pycapa
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/env bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# metron pycapa service
-# chkconfig: 345 20 80
-# description: Metron Pycapa Packet Capture Daemon
-# processname: pycapa
-#
-NAME=pycapa
-DESC="Metron Pycapa Packet Capture"
-PIDFILE=/var/run/$NAME.pid
-SCRIPTNAME=/etc/init.d/$NAME
-LOGFILE="{{ pycapa_log }}"
-EXTRA_ARGS="${@:2}"
-DAEMON_PATH="{{ pycapa_home }}"
-DAEMON="/usr/bin/python"
-DAEMONOPTS="{{ pycapa_main }} -z {{ zookeeper_url }} -t {{ pycapa_topic }} -i {{ pycapa_sniff_interface }}"
-
-case "$1" in
-  start)
-    printf "%-50s" "Starting $NAME..."
-
-    # kick-off the daemon
-    cd $DAEMON_PATH
-    PID=`$DAEMON $DAEMONOPTS >> $LOGFILE 2>&1 & echo $!`
-    if [ -z $PID ]; then
-        printf "%s\n" "Fail"
-    else
-        echo $PID > $PIDFILE
-        printf "%s\n" "Ok"
-    fi
-  ;;
-
-  status)
-    printf "%-50s" "Checking $NAME..."
-    if [ -f $PIDFILE ]; then
-      PID=`cat $PIDFILE`
-      if [ -z "`ps axf | grep ${PID} | grep -v grep`" ]; then
-        printf "%s\n" "Process dead but pidfile exists"
-      else
-        echo "Running"
-      fi
-    else
-      printf "%s\n" "Service not running"
-    fi
-  ;;
-
-  stop)
-    printf "%-50s" "Stopping $NAME"
-    PID=`cat $PIDFILE`
-    cd $DAEMON_PATH
-    if [ -f $PIDFILE ]; then
-        kill -HUP $PID
-        printf "%s\n" "Ok"
-        rm -f $PIDFILE
-    else
-        printf "%s\n" "pidfile not found"
-    fi
-  ;;
-
-  restart)
-    $0 stop
-    $0 start
-  ;;
-
-  *)
-    echo "Usage: $0 {status|start|stop|restart}"
-    exit 1
-esac

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/pycapa/vars/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/pycapa/vars/main.yml b/deployment/roles/pycapa/vars/main.yml
deleted file mode 100644
index 5618a8e..0000000
--- a/deployment/roles/pycapa/vars/main.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-pycapa_repo: https://github.com/OpenSOC/pycapa.git
-pycapa_home: /usr/local/pycapa
-pycapa_main: "{{ pycapa_home }}/pycapa/pycapa_cli.py"
-pycapa_log: /var/log/pycapa.log
-pycapa_topic: pcap
-pycapa_sniff_interface: "{{ sniff_interface }}"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/python-pip/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/python-pip/tasks/main.yml b/deployment/roles/python-pip/tasks/main.yml
deleted file mode 100644
index 809aca4..0000000
--- a/deployment/roles/python-pip/tasks/main.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-- name: Install python-pip
-  yum:
-    name: python-pip
-    state: installed
-  register: result
-  until: result.rc == 0
-  retries: 5
-  delay: 10

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/sensor-test-mode/README.md
----------------------------------------------------------------------
diff --git a/deployment/roles/sensor-test-mode/README.md b/deployment/roles/sensor-test-mode/README.md
deleted file mode 100644
index 76a3220..0000000
--- a/deployment/roles/sensor-test-mode/README.md
+++ /dev/null
@@ -1,27 +0,0 @@
-Sensor Test Mode
-================
-
-A role that configures each of the sensors to produce the maximum amount of telemetry data.  This role is useful only for testing.  It can be useful to support functional, performance, and load testing of Apache Metron.
-
-The role does the following to maximize the amount of telemetry data produced by each Metron sensor.
-
-- Plays a packet capture file through a network interface to simulate live network traffic.
-- Configures [YAF](https://tools.netsa.cert.org/yaf/yaf.html) with `idle-timeout=0`.  This causes a flow record to be produced for every network packet received.
-- Configures [Snort](https://www.snort.org/) to produce an alert for every network packet received.
-
-Getting Started
----------------
-
-To enable the `sensor-test-mode` role apply the role to the `sensors` host group in your Ansible playbook.
-
-```
-- hosts: sensors
-  roles:
-    - role: sensor-test-mode
-```
-
-The role has also been added to the default `metron_install.yml` playbook so that it can be turned on/off with a property in both the local Virtualbox and the remote EC2 deployments.
-
-```
-sensor_test_mode: True
-```

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/sensor-test-mode/files/example.pcap
----------------------------------------------------------------------
diff --git a/deployment/roles/sensor-test-mode/files/example.pcap b/deployment/roles/sensor-test-mode/files/example.pcap
deleted file mode 100644
index 06594ec..0000000
Binary files a/deployment/roles/sensor-test-mode/files/example.pcap and /dev/null differ

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/sensor-test-mode/meta/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/sensor-test-mode/meta/main.yml b/deployment/roles/sensor-test-mode/meta/main.yml
deleted file mode 100644
index 0e9e5b3..0000000
--- a/deployment/roles/sensor-test-mode/meta/main.yml
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-dependencies:
-  - pcap_replay

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/sensor-test-mode/tasks/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/sensor-test-mode/tasks/main.yml b/deployment/roles/sensor-test-mode/tasks/main.yml
deleted file mode 100644
index 26b4e2a..0000000
--- a/deployment/roles/sensor-test-mode/tasks/main.yml
+++ /dev/null
@@ -1,56 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-#
-# load example pcap data to replay
-#
-- name: Install example pcap file
-  copy: src=example.pcap dest={{ pcap_path }}/
-
-- name: Start the pcap-replay service
-  service: name=pcap-replay state=restarted
-
-#
-# configure yaf to generate a flow record for every packet
-#
-- name: Stop running instances of yaf
-  become: True
-  service: name=yaf state=stopped
-
-- name: Configure yaf to generate a flow record for every network packet
-  become: True
-  service: name=yaf state=started args="--idle-timeout 0"
-
-#
-# configure snort to alert on every packet
-#
-- name: Configure snort to use a set of test rules
-  become: True
-  lineinfile:
-    dest: /etc/snort/snort.conf
-    line: "include $RULE_PATH/test.rules"
-
-- name: Create a snort alert for testing that alerts on every packet
-  become: True
-  lineinfile:
-    dest: /etc/snort/rules/test.rules
-    line: "alert tcp any any -> any any (msg:'snort test alert'; sid:999158; )"
-    create: yes
-
-- name: Restart snort
-  become: True
-  service: name=snortd state=restarted

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/snort/defaults/main.yml
----------------------------------------------------------------------
diff --git a/deployment/roles/snort/defaults/main.yml b/deployment/roles/snort/defaults/main.yml
deleted file mode 100644
index 6c6c0ea..0000000
--- a/deployment/roles/snort/defaults/main.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
----
-snort_version: 2.9.8.0-1
-daq_version: 2.0.6-1
-snort_topic: snort
-snort_alert_csv_path: /var/log/snort/alert.csv
-snort_src_url: "https://snort.org/downloads/archive/snort/snort-{{ snort_version }}.src.rpm"
-snort_community_rules_url: "https://www.snort.org/downloads/community/community-rules.tar.gz"
-dag_src_url: "https://snort.org/downloads/snort/daq-{{ daq_version }}.src.rpm"
-

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/deployment/roles/snort/files/flume-snort.conf
----------------------------------------------------------------------
diff --git a/deployment/roles/snort/files/flume-snort.conf b/deployment/roles/snort/files/flume-snort.conf
deleted file mode 100644
index 7dea516..0000000
--- a/deployment/roles/snort/files/flume-snort.conf
+++ /dev/null
@@ -1,44 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one or more
-#  contributor license agreements.  See the NOTICE file distributed with
-#  this work for additional information regarding copyright ownership.
-#  The ASF licenses this file to You under the Apache License, Version 2.0
-#  (the "License"); you may not use this file except in compliance with
-#  the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-snort.sources = exec-source
-snort.channels = memory-channel
-snort.sinks = kafka-sink logger-sink
-
-# snort alerts are logged to a file
-snort.sources.exec-source.type = exec
-snort.sources.exec-source.command = tail -F /var/log/snort/alert
-snort.sources.exec-source.restart = true
-snort.sources.exec-source.logStdErr = true
-
-# snort alerts are sent to kafka
-snort.sinks.kafka-sink.type = org.apache.flume.sink.kafka.KafkaSink
-snort.sinks.kafka-sink.brokerList = localhost:9092
-snort.sinks.kafka-sink.topic = snort
-
-# also log events
-snort.sinks.logger-sink.type = logger
-
-# buffer events in memory
-snort.channels.memory-channel.type = memory
-snort.channels.memory-channel.capacity = 1000
-snort.channels.memory-channel.transactionCapacity = 100
-
-# bind the source and sink to the channel
-snort.sources.exec-source.channels = memory-channel
-snort.sinks.kafka-sink.channel = memory-channel
-snort.sinks.logger-sink.channel = memory-channel


[04/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/JavaCharStream.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/JavaCharStream.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/JavaCharStream.java
deleted file mode 100644
index eb3a999..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/JavaCharStream.java
+++ /dev/null
@@ -1,633 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/* Generated By:JavaCC: Do not edit this line. JavaCharStream.java Version 5.0 */
-/* JavaCCOptions:STATIC=false,SUPPORT_CLASS_VISIBILITY_PUBLIC=false */
-package org.apache.metron.ise.parser;
-
-/**
- * An implementation of interface CharStream, where the stream is assumed to
- * contain only ASCII characters (with java-like unicode escape processing).
- */
-
-class JavaCharStream
-{
-  /** Whether parser is static. */
-  public static final boolean staticFlag = false;
-
-  static final int hexval(char c) throws java.io.IOException {
-    switch(c)
-    {
-       case '0' :
-          return 0;
-       case '1' :
-          return 1;
-       case '2' :
-          return 2;
-       case '3' :
-          return 3;
-       case '4' :
-          return 4;
-       case '5' :
-          return 5;
-       case '6' :
-          return 6;
-       case '7' :
-          return 7;
-       case '8' :
-          return 8;
-       case '9' :
-          return 9;
-
-       case 'a' :
-       case 'A' :
-          return 10;
-       case 'b' :
-       case 'B' :
-          return 11;
-       case 'c' :
-       case 'C' :
-          return 12;
-       case 'd' :
-       case 'D' :
-          return 13;
-       case 'e' :
-       case 'E' :
-          return 14;
-       case 'f' :
-       case 'F' :
-          return 15;
-    }
-
-    throw new java.io.IOException(); // Should never come here
-  }
-
-/** Position in buffer. */
-  public int bufpos = -1;
-  int bufsize;
-  int available;
-  int tokenBegin;
-  protected int bufline[];
-  protected int bufcolumn[];
-
-  protected int column = 0;
-  protected int line = 1;
-
-  protected boolean prevCharIsCR = false;
-  protected boolean prevCharIsLF = false;
-
-  protected java.io.Reader inputStream;
-
-  protected char[] nextCharBuf;
-  protected char[] buffer;
-  protected int maxNextCharInd = 0;
-  protected int nextCharInd = -1;
-  protected int inBuf = 0;
-  protected int tabSize = 8;
-
-  protected void setTabSize(int i) { tabSize = i; }
-  protected int getTabSize(int i) { return tabSize; }
-
-  protected void ExpandBuff(boolean wrapAround)
-  {
-    char[] newbuffer = new char[bufsize + 2048];
-    int newbufline[] = new int[bufsize + 2048];
-    int newbufcolumn[] = new int[bufsize + 2048];
-
-    try
-    {
-      if (wrapAround)
-      {
-        System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
-        System.arraycopy(buffer, 0, newbuffer, bufsize - tokenBegin, bufpos);
-        buffer = newbuffer;
-
-        System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
-        System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos);
-        bufline = newbufline;
-
-        System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
-        System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos);
-        bufcolumn = newbufcolumn;
-
-        bufpos += (bufsize - tokenBegin);
-    }
-    else
-    {
-        System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
-        buffer = newbuffer;
-
-        System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
-        bufline = newbufline;
-
-        System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
-        bufcolumn = newbufcolumn;
-
-        bufpos -= tokenBegin;
-      }
-    }
-    catch (Throwable t)
-    {
-      throw new Error(t.getMessage());
-    }
-
-    available = (bufsize += 2048);
-    tokenBegin = 0;
-  }
-
-  protected void FillBuff() throws java.io.IOException
-  {
-    int i;
-    if (maxNextCharInd == 4096)
-      maxNextCharInd = nextCharInd = 0;
-
-    try {
-      if ((i = inputStream.read(nextCharBuf, maxNextCharInd,
-                                          4096 - maxNextCharInd)) == -1)
-      {
-        inputStream.close();
-        throw new java.io.IOException();
-      }
-      else
-         maxNextCharInd += i;
-      return;
-    }
-    catch(java.io.IOException e) {
-      if (bufpos != 0)
-      {
-        --bufpos;
-        backup(0);
-      }
-      else
-      {
-        bufline[bufpos] = line;
-        bufcolumn[bufpos] = column;
-      }
-      throw e;
-    }
-  }
-
-  protected char ReadByte() throws java.io.IOException
-  {
-    if (++nextCharInd >= maxNextCharInd)
-      FillBuff();
-
-    return nextCharBuf[nextCharInd];
-  }
-
-/** @return starting character for token. */
-  public char BeginToken() throws java.io.IOException
-  {
-    if (inBuf > 0)
-    {
-      --inBuf;
-
-      if (++bufpos == bufsize)
-        bufpos = 0;
-
-      tokenBegin = bufpos;
-      return buffer[bufpos];
-    }
-
-    tokenBegin = 0;
-    bufpos = -1;
-
-    return readChar();
-  }
-
-  protected void AdjustBuffSize()
-  {
-    if (available == bufsize)
-    {
-      if (tokenBegin > 2048)
-      {
-        bufpos = 0;
-        available = tokenBegin;
-      }
-      else
-        ExpandBuff(false);
-    }
-    else if (available > tokenBegin)
-      available = bufsize;
-    else if ((tokenBegin - available) < 2048)
-      ExpandBuff(true);
-    else
-      available = tokenBegin;
-  }
-
-  protected void UpdateLineColumn(char c)
-  {
-    column++;
-
-    if (prevCharIsLF)
-    {
-      prevCharIsLF = false;
-      line += (column = 1);
-    }
-    else if (prevCharIsCR)
-    {
-      prevCharIsCR = false;
-      if (c == '\n')
-      {
-        prevCharIsLF = true;
-      }
-      else
-        line += (column = 1);
-    }
-
-    switch (c)
-    {
-      case '\r' :
-        prevCharIsCR = true;
-        break;
-      case '\n' :
-        prevCharIsLF = true;
-        break;
-      case '\t' :
-        column--;
-        column += (tabSize - (column % tabSize));
-        break;
-      default :
-        break;
-    }
-
-    bufline[bufpos] = line;
-    bufcolumn[bufpos] = column;
-  }
-
-/** Read a character. */
-  public char readChar() throws java.io.IOException
-  {
-    if (inBuf > 0)
-    {
-      --inBuf;
-
-      if (++bufpos == bufsize)
-        bufpos = 0;
-
-      return buffer[bufpos];
-    }
-
-    char c;
-
-    if (++bufpos == available)
-      AdjustBuffSize();
-
-    if ((buffer[bufpos] = c = ReadByte()) == '\\')
-    {
-      UpdateLineColumn(c);
-
-      int backSlashCnt = 1;
-
-      for (;;) // Read all the backslashes
-      {
-        if (++bufpos == available)
-          AdjustBuffSize();
-
-        try
-        {
-          if ((buffer[bufpos] = c = ReadByte()) != '\\')
-          {
-            UpdateLineColumn(c);
-            // found a non-backslash char.
-            if ((c == 'u') && ((backSlashCnt & 1) == 1))
-            {
-              if (--bufpos < 0)
-                bufpos = bufsize - 1;
-
-              break;
-            }
-
-            backup(backSlashCnt);
-            return '\\';
-          }
-        }
-        catch(java.io.IOException e)
-        {
-	  // We are returning one backslash so we should only backup (count-1)
-          if (backSlashCnt > 1)
-            backup(backSlashCnt-1);
-
-          return '\\';
-        }
-
-        UpdateLineColumn(c);
-        backSlashCnt++;
-      }
-
-      // Here, we have seen an odd number of backslash's followed by a 'u'
-      try
-      {
-        while ((c = ReadByte()) == 'u')
-          ++column;
-
-        buffer[bufpos] = c = (char)(hexval(c) << 12 |
-                                    hexval(ReadByte()) << 8 |
-                                    hexval(ReadByte()) << 4 |
-                                    hexval(ReadByte()));
-
-        column += 4;
-      }
-      catch(java.io.IOException e)
-      {
-        throw new Error("Invalid escape character at line " + line +
-                                         " column " + column + ".");
-      }
-
-      if (backSlashCnt == 1)
-        return c;
-      else
-      {
-        backup(backSlashCnt - 1);
-        return '\\';
-      }
-    }
-    else
-    {
-      UpdateLineColumn(c);
-      return c;
-    }
-  }
-
-  @Deprecated
-  /**
-   * @deprecated
-   * @see #getEndColumn
-   */
-  public int getColumn() {
-    return bufcolumn[bufpos];
-  }
-
-  @Deprecated
-  /**
-   * @deprecated
-   * @see #getEndLine
-   */
-  public int getLine() {
-    return bufline[bufpos];
-  }
-
-/** Get end column. */
-  public int getEndColumn() {
-    return bufcolumn[bufpos];
-  }
-
-/** Get end line. */
-  public int getEndLine() {
-    return bufline[bufpos];
-  }
-
-/** @return column of token start */
-  public int getBeginColumn() {
-    return bufcolumn[tokenBegin];
-  }
-
-/** @return line number of token start */
-  public int getBeginLine() {
-    return bufline[tokenBegin];
-  }
-
-/** Retreat. */
-  public void backup(int amount) {
-
-    inBuf += amount;
-    if ((bufpos -= amount) < 0)
-      bufpos += bufsize;
-  }
-
-/** Constructor. */
-  public JavaCharStream(java.io.Reader dstream,
-                 int startline, int startcolumn, int buffersize)
-  {
-    inputStream = dstream;
-    line = startline;
-    column = startcolumn - 1;
-
-    available = bufsize = buffersize;
-    buffer = new char[buffersize];
-    bufline = new int[buffersize];
-    bufcolumn = new int[buffersize];
-    nextCharBuf = new char[4096];
-  }
-
-/** Constructor. */
-  public JavaCharStream(java.io.Reader dstream,
-                                        int startline, int startcolumn)
-  {
-    this(dstream, startline, startcolumn, 4096);
-  }
-
-/** Constructor. */
-  public JavaCharStream(java.io.Reader dstream)
-  {
-    this(dstream, 1, 1, 4096);
-  }
-/** Reinitialise. */
-  public void ReInit(java.io.Reader dstream,
-                 int startline, int startcolumn, int buffersize)
-  {
-    inputStream = dstream;
-    line = startline;
-    column = startcolumn - 1;
-
-    if (buffer == null || buffersize != buffer.length)
-    {
-      available = bufsize = buffersize;
-      buffer = new char[buffersize];
-      bufline = new int[buffersize];
-      bufcolumn = new int[buffersize];
-      nextCharBuf = new char[4096];
-    }
-    prevCharIsLF = prevCharIsCR = false;
-    tokenBegin = inBuf = maxNextCharInd = 0;
-    nextCharInd = bufpos = -1;
-  }
-
-/** Reinitialise. */
-  public void ReInit(java.io.Reader dstream,
-                                        int startline, int startcolumn)
-  {
-    ReInit(dstream, startline, startcolumn, 4096);
-  }
-
-/** Reinitialise. */
-  public void ReInit(java.io.Reader dstream)
-  {
-    ReInit(dstream, 1, 1, 4096);
-  }
-/** Constructor. */
-  public JavaCharStream(java.io.InputStream dstream, String encoding, int startline,
-  int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
-  {
-    this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
-  }
-
-/** Constructor. */
-  public JavaCharStream(java.io.InputStream dstream, int startline,
-  int startcolumn, int buffersize)
-  {
-    this(new java.io.InputStreamReader(dstream), startline, startcolumn, 4096);
-  }
-
-/** Constructor. */
-  public JavaCharStream(java.io.InputStream dstream, String encoding, int startline,
-                        int startcolumn) throws java.io.UnsupportedEncodingException
-  {
-    this(dstream, encoding, startline, startcolumn, 4096);
-  }
-
-/** Constructor. */
-  public JavaCharStream(java.io.InputStream dstream, int startline,
-                        int startcolumn)
-  {
-    this(dstream, startline, startcolumn, 4096);
-  }
-
-/** Constructor. */
-  public JavaCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
-  {
-    this(dstream, encoding, 1, 1, 4096);
-  }
-
-/** Constructor. */
-  public JavaCharStream(java.io.InputStream dstream)
-  {
-    this(dstream, 1, 1, 4096);
-  }
-
-/** Reinitialise. */
-  public void ReInit(java.io.InputStream dstream, String encoding, int startline,
-  int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
-  {
-    ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
-  }
-
-/** Reinitialise. */
-  public void ReInit(java.io.InputStream dstream, int startline,
-  int startcolumn, int buffersize)
-  {
-    ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize);
-  }
-/** Reinitialise. */
-  public void ReInit(java.io.InputStream dstream, String encoding, int startline,
-                     int startcolumn) throws java.io.UnsupportedEncodingException
-  {
-    ReInit(dstream, encoding, startline, startcolumn, 4096);
-  }
-/** Reinitialise. */
-  public void ReInit(java.io.InputStream dstream, int startline,
-                     int startcolumn)
-  {
-    ReInit(dstream, startline, startcolumn, 4096);
-  }
-/** Reinitialise. */
-  public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
-  {
-    ReInit(dstream, encoding, 1, 1, 4096);
-  }
-
-/** Reinitialise. */
-  public void ReInit(java.io.InputStream dstream)
-  {
-    ReInit(dstream, 1, 1, 4096);
-  }
-
-  /** @return token image as String */
-  public String GetImage()
-  {
-    if (bufpos >= tokenBegin)
-      return new String(buffer, tokenBegin, bufpos - tokenBegin + 1);
-    else
-      return new String(buffer, tokenBegin, bufsize - tokenBegin) +
-                              new String(buffer, 0, bufpos + 1);
-  }
-
-  /** @return suffix */
-  public char[] GetSuffix(int len)
-  {
-    char[] ret = new char[len];
-
-    if ((bufpos + 1) >= len)
-      System.arraycopy(buffer, bufpos - len + 1, ret, 0, len);
-    else
-    {
-      System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0,
-                                                        len - bufpos - 1);
-      System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1);
-    }
-
-    return ret;
-  }
-
-  /** Set buffers back to null when finished. */
-  public void Done()
-  {
-    nextCharBuf = null;
-    buffer = null;
-    bufline = null;
-    bufcolumn = null;
-  }
-
-  /**
-   * Method to adjust line and column numbers for the start of a token.
-   */
-  public void adjustBeginLineColumn(int newLine, int newCol)
-  {
-    int start = tokenBegin;
-    int len;
-
-    if (bufpos >= tokenBegin)
-    {
-      len = bufpos - tokenBegin + inBuf + 1;
-    }
-    else
-    {
-      len = bufsize - tokenBegin + bufpos + 1 + inBuf;
-    }
-
-    int i = 0, j = 0, k = 0;
-    int nextColDiff = 0, columnDiff = 0;
-
-    while (i < len && bufline[j = start % bufsize] == bufline[k = ++start % bufsize])
-    {
-      bufline[j] = newLine;
-      nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j];
-      bufcolumn[j] = newCol + columnDiff;
-      columnDiff = nextColDiff;
-      i++;
-    }
-
-    if (i < len)
-    {
-      bufline[j] = newLine++;
-      bufcolumn[j] = newCol + columnDiff;
-
-      while (i++ < len)
-      {
-        if (bufline[j = start % bufsize] != bufline[++start % bufsize])
-          bufline[j] = newLine++;
-        else
-          bufline[j] = newLine;
-      }
-    }
-
-    line = bufline[j];
-    column = bufcolumn[j];
-  }
-
-}
-/* JavaCC - OriginalChecksum=96a5b0b0fa09286690f250998f047719 (do not edit this line) */

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ParseException.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ParseException.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ParseException.java
deleted file mode 100644
index 4143060..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ParseException.java
+++ /dev/null
@@ -1,204 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/* Generated By:JavaCC: Do not edit this line. ParseException.java Version 5.0 */
-/* JavaCCOptions:KEEP_LINE_COL=null */
-package org.apache.metron.ise.parser;
-
-/**
- * This exception is thrown when parse errors are encountered.
- * You can explicitly create objects of this exception type by
- * calling the method generateParseException in the generated
- * parser.
- *
- * You can modify this class to customize your error reporting
- * mechanisms so long as you retain the public fields.
- */
-public class ParseException extends Exception {
-
-  /**
-   * The version identifier for this Serializable class.
-   * Increment only if the <i>serialized</i> form of the
-   * class changes.
-   */
-  private static final long serialVersionUID = 1L;
-
-  /**
-   * This constructor is used by the method "generateParseException"
-   * in the generated parser.  Calling this constructor generates
-   * a new object of this type with the fields "currentToken",
-   * "expectedTokenSequences", and "tokenImage" set.
-   */
-  public ParseException(Token currentTokenVal,
-                        int[][] expectedTokenSequencesVal,
-                        String[] tokenImageVal
-                       )
-  {
-    super(initialise(currentTokenVal, expectedTokenSequencesVal, tokenImageVal));
-    currentToken = currentTokenVal;
-    expectedTokenSequences = expectedTokenSequencesVal;
-    tokenImage = tokenImageVal;
-  }
-
-  /**
-   * The following constructors are for use by you for whatever
-   * purpose you can think of.  Constructing the exception in this
-   * manner makes the exception behave in the normal way - i.e., as
-   * documented in the class "Throwable".  The fields "errorToken",
-   * "expectedTokenSequences", and "tokenImage" do not contain
-   * relevant information.  The JavaCC generated code does not use
-   * these constructors.
-   */
-
-  public ParseException() {
-    super();
-  }
-
-  /** Constructor with message. */
-  public ParseException(String message) {
-    super(message);
-  }
-
-
-  /**
-   * This is the last token that has been consumed successfully.  If
-   * this object has been created due to a parse error, the token
-   * followng this token will (therefore) be the first error token.
-   */
-  public Token currentToken;
-
-  /**
-   * Each entry in this array is an array of integers.  Each array
-   * of integers represents a sequence of tokens (by their ordinal
-   * values) that is expected at this point of the parse.
-   */
-  public int[][] expectedTokenSequences;
-
-  /**
-   * This is a reference to the "tokenImage" array of the generated
-   * parser within which the parse error occurred.  This array is
-   * defined in the generated ...Constants interface.
-   */
-  public String[] tokenImage;
-
-  /**
-   * It uses "currentToken" and "expectedTokenSequences" to generate a parse
-   * error message and returns it.  If this object has been created
-   * due to a parse error, and you do not catch it (it gets thrown
-   * from the parser) the correct error message
-   * gets displayed.
-   */
-  private static String initialise(Token currentToken,
-                           int[][] expectedTokenSequences,
-                           String[] tokenImage) {
-    String eol = System.getProperty("line.separator", "\n");
-    StringBuffer expected = new StringBuffer();
-    int maxSize = 0;
-    for (int i = 0; i < expectedTokenSequences.length; i++) {
-      if (maxSize < expectedTokenSequences[i].length) {
-        maxSize = expectedTokenSequences[i].length;
-      }
-      for (int j = 0; j < expectedTokenSequences[i].length; j++) {
-        expected.append(tokenImage[expectedTokenSequences[i][j]]).append(' ');
-      }
-      if (expectedTokenSequences[i][expectedTokenSequences[i].length - 1] != 0) {
-        expected.append("...");
-      }
-      expected.append(eol).append("    ");
-    }
-    String retval = "Encountered \"";
-    Token tok = currentToken.next;
-    for (int i = 0; i < maxSize; i++) {
-      if (i != 0) retval += " ";
-      if (tok.kind == 0) {
-        retval += tokenImage[0];
-        break;
-      }
-      retval += " " + tokenImage[tok.kind];
-      retval += " \"";
-      retval += add_escapes(tok.image);
-      retval += " \"";
-      tok = tok.next;
-    }
-    retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn;
-    retval += "." + eol;
-    if (expectedTokenSequences.length == 1) {
-      retval += "Was expecting:" + eol + "    ";
-    } else {
-      retval += "Was expecting one of:" + eol + "    ";
-    }
-    retval += expected.toString();
-    return retval;
-  }
-
-  /**
-   * The end of line string for this machine.
-   */
-  protected String eol = System.getProperty("line.separator", "\n");
-
-  /**
-   * Used to convert raw characters to their escaped version
-   * when these raw version cannot be used as part of an ASCII
-   * string literal.
-   */
-  static String add_escapes(String str) {
-      StringBuffer retval = new StringBuffer();
-      char ch;
-      for (int i = 0; i < str.length(); i++) {
-        switch (str.charAt(i))
-        {
-           case 0 :
-              continue;
-           case '\b':
-              retval.append("\\b");
-              continue;
-           case '\t':
-              retval.append("\\t");
-              continue;
-           case '\n':
-              retval.append("\\n");
-              continue;
-           case '\f':
-              retval.append("\\f");
-              continue;
-           case '\r':
-              retval.append("\\r");
-              continue;
-           case '\"':
-              retval.append("\\\"");
-              continue;
-           case '\'':
-              retval.append("\\\'");
-              continue;
-           case '\\':
-              retval.append("\\\\");
-              continue;
-           default:
-              if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) {
-                 String s = "0000" + Integer.toString(ch, 16);
-                 retval.append("\\u" + s.substring(s.length() - 4, s.length()));
-              } else {
-                 retval.append(ch);
-              }
-              continue;
-        }
-      }
-      return retval.toString();
-   }
-
-}
-/* JavaCC - OriginalChecksum=f9f7217056f99de5708d01ebd497dede (do not edit this line) */

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/Token.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/Token.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/Token.java
deleted file mode 100644
index 349baf4..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/Token.java
+++ /dev/null
@@ -1,148 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/* Generated By:JavaCC: Do not edit this line. Token.java Version 5.0 */
-/* JavaCCOptions:TOKEN_EXTENDS=,KEEP_LINE_COL=null,SUPPORT_CLASS_VISIBILITY_PUBLIC=false */
-package org.apache.metron.ise.parser;
-
-/**
- * Describes the input token stream.
- */
-
-class Token implements java.io.Serializable {
-
-  /**
-   * The version identifier for this Serializable class.
-   * Increment only if the <i>serialized</i> form of the
-   * class changes.
-   */
-  private static final long serialVersionUID = 1L;
-
-  /**
-   * An integer that describes the kind of this token.  This numbering
-   * system is determined by JavaCCParser, and a table of these numbers is
-   * stored in the file ...Constants.java.
-   */
-  public int kind;
-
-  /** The line number of the first character of this Token. */
-  public int beginLine;
-  /** The column number of the first character of this Token. */
-  public int beginColumn;
-  /** The line number of the last character of this Token. */
-  public int endLine;
-  /** The column number of the last character of this Token. */
-  public int endColumn;
-
-  /**
-   * The string image of the token.
-   */
-  public String image;
-
-  /**
-   * A reference to the next regular (non-special) token from the input
-   * stream.  If this is the last token from the input stream, or if the
-   * token manager has not read tokens beyond this one, this field is
-   * set to null.  This is true only if this token is also a regular
-   * token.  Otherwise, see below for a description of the contents of
-   * this field.
-   */
-  public Token next;
-
-  /**
-   * This field is used to access special tokens that occur prior to this
-   * token, but after the immediately preceding regular (non-special) token.
-   * If there are no such special tokens, this field is set to null.
-   * When there are more than one such special token, this field refers
-   * to the last of these special tokens, which in turn refers to the next
-   * previous special token through its specialToken field, and so on
-   * until the first special token (whose specialToken field is null).
-   * The next fields of special tokens refer to other special tokens that
-   * immediately follow it (without an intervening regular token).  If there
-   * is no such token, this field is null.
-   */
-  public Token specialToken;
-
-  /**
-   * An optional attribute value of the Token.
-   * Tokens which are not used as syntactic sugar will often contain
-   * meaningful values that will be used later on by the compiler or
-   * interpreter. This attribute value is often different from the image.
-   * Any subclass of Token that actually wants to return a non-null value can
-   * override this method as appropriate.
-   */
-  public Object getValue() {
-    return null;
-  }
-
-  /**
-   * No-argument constructor
-   */
-  public Token() {}
-
-  /**
-   * Constructs a new token for the specified Image.
-   */
-  public Token(int kind)
-  {
-    this(kind, null);
-  }
-
-  /**
-   * Constructs a new token for the specified Image and Kind.
-   */
-  public Token(int kind, String image)
-  {
-    this.kind = kind;
-    this.image = image;
-  }
-
-  /**
-   * Returns the image.
-   */
-  public String toString()
-  {
-    return image;
-  }
-
-  /**
-   * Returns a new Token object, by default. However, if you want, you
-   * can create and return subclass objects based on the value of ofKind.
-   * Simply add the cases to the switch for all those special cases.
-   * For example, if you have a subclass of Token called IDToken that
-   * you want to create if ofKind is ID, simply add something like :
-   *
-   *    case MyParserConstants.ID : return new IDToken(ofKind, image);
-   *
-   * to the following switch statement. Then you can cast matchedToken
-   * variable to the appropriate type and use sit in your lexical actions.
-   */
-  public static Token newToken(int ofKind, String image)
-  {
-    switch(ofKind)
-    {
-      default : return new Token(ofKind, image);
-    }
-  }
-
-  public static Token newToken(int ofKind)
-  {
-    return newToken(ofKind, null);
-  }
-
-}
-/* JavaCC - OriginalChecksum=99daf0baa94b6c270eea5be0575db6aa (do not edit this line) */

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/TokenMgrError.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/TokenMgrError.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/TokenMgrError.java
deleted file mode 100644
index 2e1f52c..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/TokenMgrError.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 5.0 */
-/* JavaCCOptions: */
-package org.apache.metron.ise.parser;
-
-/** Token Manager Error. */
-class TokenMgrError extends Error
-{
-
-  /**
-   * The version identifier for this Serializable class.
-   * Increment only if the <i>serialized</i> form of the
-   * class changes.
-   */
-  private static final long serialVersionUID = 1L;
-
-  /*
-   * Ordinals for various reasons why an Error of this type can be thrown.
-   */
-
-  /**
-   * Lexical error occurred.
-   */
-  static final int LEXICAL_ERROR = 0;
-
-  /**
-   * An attempt was made to create a second instance of a static token manager.
-   */
-  static final int STATIC_LEXER_ERROR = 1;
-
-  /**
-   * Tried to change to an invalid lexical state.
-   */
-  static final int INVALID_LEXICAL_STATE = 2;
-
-  /**
-   * Detected (and bailed out of) an infinite loop in the token manager.
-   */
-  static final int LOOP_DETECTED = 3;
-
-  /**
-   * Indicates the reason why the exception is thrown. It will have
-   * one of the above 4 values.
-   */
-  int errorCode;
-
-  /**
-   * Replaces unprintable characters by their escaped (or unicode escaped)
-   * equivalents in the given string
-   */
-  protected static final String addEscapes(String str) {
-    StringBuffer retval = new StringBuffer();
-    char ch;
-    for (int i = 0; i < str.length(); i++) {
-      switch (str.charAt(i))
-      {
-        case 0 :
-          continue;
-        case '\b':
-          retval.append("\\b");
-          continue;
-        case '\t':
-          retval.append("\\t");
-          continue;
-        case '\n':
-          retval.append("\\n");
-          continue;
-        case '\f':
-          retval.append("\\f");
-          continue;
-        case '\r':
-          retval.append("\\r");
-          continue;
-        case '\"':
-          retval.append("\\\"");
-          continue;
-        case '\'':
-          retval.append("\\\'");
-          continue;
-        case '\\':
-          retval.append("\\\\");
-          continue;
-        default:
-          if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) {
-            String s = "0000" + Integer.toString(ch, 16);
-            retval.append("\\u" + s.substring(s.length() - 4, s.length()));
-          } else {
-            retval.append(ch);
-          }
-          continue;
-      }
-    }
-    return retval.toString();
-  }
-
-  /**
-   * Returns a detailed message for the Error when it is thrown by the
-   * token manager to indicate a lexical error.
-   * Parameters :
-   *    EOFSeen     : indicates if EOF caused the lexical error
-   *    curLexState : lexical state in which this error occurred
-   *    errorLine   : line number when the error occurred
-   *    errorColumn : column number when the error occurred
-   *    errorAfter  : prefix that was seen before this error occurred
-   *    curchar     : the offending character
-   * Note: You can customize the lexical error message by modifying this method.
-   */
-  protected static String LexicalError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar) {
-    return("Lexical error at line " +
-          errorLine + ", column " +
-          errorColumn + ".  Encountered: " +
-          (EOFSeen ? "<EOF> " : ("\"" + addEscapes(String.valueOf(curChar)) + "\"") + " (" + (int)curChar + "), ") +
-          "after : \"" + addEscapes(errorAfter) + "\"");
-  }
-
-  /**
-   * You can also modify the body of this method to customize your error messages.
-   * For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not
-   * of end-users concern, so you can return something like :
-   *
-   *     "Internal Error : Please file a bug report .... "
-   *
-   * from this method for such cases in the release version of your parser.
-   */
-  public String getMessage() {
-    return super.getMessage();
-  }
-
-  /*
-   * Constructors of various flavors follow.
-   */
-
-  /** No arg constructor. */
-  public TokenMgrError() {
-  }
-
-  /** Constructor with message and reason. */
-  public TokenMgrError(String message, int reason) {
-    super(message);
-    errorCode = reason;
-  }
-
-  /** Full Constructor. */
-  public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int reason) {
-    this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason);
-  }
-}
-/* JavaCC - OriginalChecksum=5fbf6813c9d6a1d713f1d4a002af1322 (do not edit this line) */

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/json/serialization/JSONDecoderHelper.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/json/serialization/JSONDecoderHelper.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/json/serialization/JSONDecoderHelper.java
deleted file mode 100644
index 366e655..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/json/serialization/JSONDecoderHelper.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.json.serialization;
-
-import java.io.DataInputStream;
-import java.io.IOException;
-
-import org.json.simple.JSONArray;
-import org.json.simple.JSONObject;
-
-/**
- * Helper class used for decoding objects from byte arrays 
- *
- * @author kiran
- * 
- */
-public class JSONDecoderHelper {
-
-	public static String getString(DataInputStream data) throws IOException {
-
-		int strSize = data.readInt();
-
-		byte[] bytes = new byte[strSize];
-		data.read(bytes);
-		return new String(bytes);
-	}
-
-	public static Number getNumber(DataInputStream data) throws IOException {
-		// Treating all ints,shorts, long as long.
-		// Everything else as Double
-		int flag = data.readByte();
-		if (flag == 0)
-			return data.readDouble();
-
-		return data.readLong();
-	}
-
-	public static Boolean getBoolean(DataInputStream data) throws IOException {
-
-		return data.readBoolean();
-	}
-
-	@SuppressWarnings("unchecked")
-	public static JSONArray getArray(DataInputStream data) throws IOException {
-		// TODO Auto-generated method stub
-		JSONArray output = new JSONArray();
-		int size = data.readInt();
-
-		for (int i = 0; i < size; i++) {
-			Object value = getObject(data);
-			output.add(value);
-		}
-
-		return output;
-	}
-
-	@SuppressWarnings("unchecked")
-	public static JSONObject getJSON(DataInputStream data) throws IOException {
-		// TODO Auto-generated method stub
-		JSONObject output = new JSONObject();
-		int size = data.readInt();
-
-		for (int i = 0; i < size; i++) {
-			String key = (String) getObject(data);
-			Object value = getObject(data);
-			output.put(key, value);
-		}
-
-		return output;
-	}
-
-	public static Object getObject(DataInputStream data) throws IOException {
-		// TODO Auto-generated method stub
-		byte objID = data.readByte();
-
-		if (objID == JSONKafkaSerializer.StringID)
-			return getString(data);
-
-		if (objID == JSONKafkaSerializer.JSONObjectID)
-			return getJSON(data);
-
-		if (objID == JSONKafkaSerializer.NumberID)
-			return getNumber(data);
-
-		if (objID == JSONKafkaSerializer.BooleanID)
-			return getBoolean(data);
-
-		if (objID == JSONKafkaSerializer.NULLID)
-			return null;
-
-		if (objID == JSONKafkaSerializer.JSONArrayID)
-			return getArray(data);
-
-		return null;
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/json/serialization/JSONEncoderHelper.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/json/serialization/JSONEncoderHelper.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/json/serialization/JSONEncoderHelper.java
deleted file mode 100644
index 697b7cc..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/json/serialization/JSONEncoderHelper.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.json.serialization;
-
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.Iterator;
-
-import org.apache.commons.configuration.Configuration;
-import org.json.simple.JSONObject;
-
-/**
- * Helper class used for encoding objects into byte arrays 
- *
- * @author kiran
- * 
- */
-public class JSONEncoderHelper {
-
-	public static void putNull(DataOutputStream data, Object value)
-			throws IOException {
-		// TODO Auto-generated method stub
-		data.writeByte(JSONKafkaSerializer.NULLID);
-
-	}
-
-	public static void putBoolean(DataOutputStream data, Boolean value)
-			throws IOException {
-		// TODO Auto-generated method stub
-		data.writeByte(JSONKafkaSerializer.BooleanID);
-		data.writeBoolean(value);
-
-	}
-
-	public static void putNumber(DataOutputStream data, Number value)
-			throws IOException {
-		// TODO Auto-generated method stub
-		data.writeByte(JSONKafkaSerializer.NumberID);
-		if (value instanceof Double) {
-			data.writeByte(0);
-			data.writeDouble((Double) value);
-			return;
-		}
-		data.writeByte(1);
-		data.writeLong((Long) value);
-
-	}
-
-	public static void putString(DataOutputStream data, String str)
-			throws IOException {
-		// String ID is 1
-		data.writeByte(JSONKafkaSerializer.StringID);
-		data.writeInt(str.length());
-		data.write(str.getBytes());
-
-	}
-
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	public static JSONObject getJSON(Configuration config) {
-
-		JSONObject output = new JSONObject();
-
-		if (!config.isEmpty()) {
-			Iterator it = config.getKeys();
-			while (it.hasNext()) {
-				String k = (String) it.next();
-				// noinspection unchecked
-				String v = (String) config.getProperty(k);
-				output.put(k, v);
-			}
-		}
-		return output;
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/json/serialization/JSONKafkaSerializer.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/json/serialization/JSONKafkaSerializer.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/json/serialization/JSONKafkaSerializer.java
deleted file mode 100644
index 1ecaefb..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/json/serialization/JSONKafkaSerializer.java
+++ /dev/null
@@ -1,266 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.json.serialization;
-
-import static org.apache.metron.json.serialization.JSONDecoderHelper.getObject;
-import static org.apache.metron.json.serialization.JSONEncoderHelper.putBoolean;
-import static org.apache.metron.json.serialization.JSONEncoderHelper.putNull;
-import static org.apache.metron.json.serialization.JSONEncoderHelper.putNumber;
-import static org.apache.metron.json.serialization.JSONEncoderHelper.putString;
-
-import java.io.BufferedReader;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.FileReader;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import kafka.serializer.Decoder;
-import kafka.serializer.Encoder;
-import kafka.utils.VerifiableProperties;
-
-import org.json.simple.JSONArray;
-import org.json.simple.JSONObject;
-import org.json.simple.parser.JSONParser;
-import org.json.simple.parser.ParseException;
-
-/**
- * JSON Serailization class for kafka. Implements kafka Encoder and Decoder
- * String, JSONObject, Number, Boolean,JSONObject.NULL JSONArray
- * 
- * @author kiran
- * 
- */
-
-public class JSONKafkaSerializer implements Encoder<JSONObject>,
-		Decoder<JSONObject> {
-
-	// Object ID's for different types
-	public static final byte StringID = 1;
-	public static final byte JSONObjectID = 2;
-	public static final byte NumberID = 3;
-	public static final byte BooleanID = 4;
-	public static final byte NULLID = 5;
-	public static final byte JSONArrayID = 6;
-
-	public JSONKafkaSerializer() {
-		// Blank constructor needed by Storm
-
-	}
-
-	public JSONKafkaSerializer(VerifiableProperties props) {
-		// Do Nothing. constructor needed by Storm
-	}
-
-	/*
-	 * Main Method for unit testing
-	 */
-	public static void main(String args[]) throws IOException {
-
-		//String Input = "/home/kiran/git/metron-streaming/Metron-Common/BroExampleOutput";
-		String Input = "/tmp/test";
-
-		BufferedReader reader = new BufferedReader(new FileReader(Input));
-
-		// String jsonString =
-		// "{\"dns\":{\"ts\":[14.0,12,\"kiran\"],\"uid\":\"abullis@mail.csuchico.edu\",\"id.orig_h\":\"10.122.196.204\", \"endval\":null}}";
-		String jsonString ="";// reader.readLine();
-		JSONParser parser = new JSONParser();
-		JSONObject json = null;
-		int count = 1;
-
-		if (args.length > 0)
-			count = Integer.parseInt(args[0]);
-
-		//while ((jsonString = reader.readLine()) != null) 
-		jsonString = reader.readLine();
-		{
-			try {
-				json = (JSONObject) parser.parse(jsonString);
-				System.out.println(json);
-			} catch (ParseException e) {
-				// TODO Auto-generated catch block
-				e.printStackTrace();
-			}
-
-			String jsonString2 = null;
-
-			JSONKafkaSerializer ser = new JSONKafkaSerializer();
-
-			for (int i = 0; i < count; i++) {
-				byte[] bytes = ser.toBytes(json);
-
-				jsonString2 = ((JSONObject)ser.fromBytes(bytes)).toJSONString();
-			}
-			System.out.println((jsonString2));
-			System.out
-					.println(jsonString2.equalsIgnoreCase(json.toJSONString()));
-		}
-
-	}
-
-	@SuppressWarnings("unchecked")
-	public JSONObject fromBytes(byte[] input) {
-
-		ByteArrayInputStream inputBuffer = new ByteArrayInputStream(input);
-		DataInputStream data = new DataInputStream(inputBuffer);
-
-		JSONObject output = new JSONObject();
-
-		try {
-			int mapSize = data.readInt();
-
-			for (int i = 0; i < mapSize; i++) {
-				String key = (String) getObject(data);
-				// System.out.println("Key Found"+ key);
-				Object val = getObject(data);
-				output.put(key, val);
-			}
-
-		} catch (IOException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-			return null;
-		}
-
-		return output;
-	}
-	
-	@SuppressWarnings("unchecked")
-	public JSONObject fromBytes1(DataInputStream data) {
-
-		//ByteArrayInputStream inputBuffer = new ByteArrayInputStream(input);
-		//DataInputStream data = new DataInputStream(inputBuffer);
-
-		JSONObject output = new JSONObject();
-
-		try {
-			int mapSize = data.readInt();
-
-			for (int i = 0; i < mapSize; i++) {
-				String key = (String) getObject(data);
-				// System.out.println("Key Found"+ key);
-				Object val = getObject(data);
-				output.put(key, val);
-			}
-
-		} catch (IOException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-			return null;
-		}
-
-		return output;
-	}
-
-	@SuppressWarnings({ "unchecked", "rawtypes" })
-	public byte[] toBytes(JSONObject input) {
-
-		ByteArrayOutputStream outputBuffer = new ByteArrayOutputStream();
-		DataOutputStream data = new DataOutputStream(outputBuffer);
-
-		Iterator it = input.entrySet().iterator();
-		try {
-
-			// write num of entries into output. 
-			//each KV pair is counted as an entry
-			data.writeInt(input.size());
-
-			// Write every single entry in hashmap
-			//Assuming key to be String.
-			while (it.hasNext()) {
-				Map.Entry<String, Object> entry = (Entry<String, Object>) it
-						.next();
-				putObject(data, entry.getKey());
-				putObject(data, entry.getValue());
-			}
-		} catch (Exception e) {
-			e.printStackTrace();
-			return null;
-		}
-
-		return outputBuffer.toByteArray();
-	}
-
-	private void putObject(DataOutputStream data, Object value)
-			throws IOException {
-
-		//Check object type and invoke appropriate method
-		if (value instanceof JSONObject) {
-			putJSON(data, (JSONObject) value);
-			return;
-
-		}
-
-		if (value instanceof String) {
-			putString(data, (String) value);
-			return;
-		}
-
-		if (value instanceof Number) {
-			putNumber(data, (Number) value);
-			return;
-		}
-
-		if (value instanceof Boolean) {
-			putBoolean(data, (Boolean) value);
-			return;
-		}
-
-		if (value == null) {
-			putNull(data, value);
-			return;
-		}
-
-		if (value instanceof JSONArray) {
-			putArray(data, (JSONArray) value);
-			return;
-		}
-
-	}
-
-	private void putJSON(DataOutputStream data, JSONObject value)
-			throws IOException {
-
-		// JSON ID is 2
-		data.writeByte(JSONKafkaSerializer.JSONObjectID);
-		data.write(toBytes(value));
-
-	}
-
-	public void putArray(DataOutputStream data, JSONArray array)
-			throws IOException {
-
-		data.writeByte(JSONKafkaSerializer.JSONArrayID);
-
-		data.writeInt(array.size());
-
-		for (Object o : array)
-			putObject(data, o);
-
-	}
-
-
-	
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/json/serialization/JSONKryoSerializer.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/json/serialization/JSONKryoSerializer.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/json/serialization/JSONKryoSerializer.java
deleted file mode 100644
index 37e2265..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/json/serialization/JSONKryoSerializer.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.metron.json.serialization;
-
-import org.json.simple.JSONObject;
-
-import com.esotericsoftware.kryo.Kryo;
-import com.esotericsoftware.kryo.io.Input;
-import com.esotericsoftware.kryo.io.Output;
-
-/**
- * @author kiran Custom Serializer to help Storm encode and decode JSONObjects
- */
-
-public class JSONKryoSerializer extends
-		com.esotericsoftware.kryo.Serializer<JSONObject> {
-
-	// JSONKafkaSerializer object actually does the heavy lifting.
-	private JSONKafkaSerializer jsonSerde = new JSONKafkaSerializer();
-
-	@Override
-	public void write(Kryo kryo, Output output, JSONObject json) {
-
-		byte[] bytes = jsonSerde.toBytes(json);
-		output.writeInt(bytes.length);
-		output.write(bytes);
-	}
-
-	@Override
-	public JSONObject read(Kryo kryo, Input input, Class<JSONObject> type) {
-
-		// Get number of Entries
-		int size = input.readInt();
-		byte[] bytes = input.readBytes(size);
-
-		JSONObject json = jsonSerde.fromBytes(bytes);
-
-		return json;
-
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/metrics/MetricReporter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/metrics/MetricReporter.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/metrics/MetricReporter.java
deleted file mode 100644
index a36fed7..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/metrics/MetricReporter.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.metrics;
-
-import com.codahale.metrics.ConsoleReporter;
-import com.codahale.metrics.Counter;
-import com.codahale.metrics.JmxReporter;
-import com.codahale.metrics.MetricRegistry;
-import com.codahale.metrics.graphite.Graphite;
-import com.codahale.metrics.graphite.GraphiteReporter;
-
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.log4j.Logger;
-
-public class MetricReporter {
-
-	final MetricRegistry metrics = new MetricRegistry();
-	private ConsoleReporter consoleReporter = null;
-	private JmxReporter jmxReporter = null;
-	private GraphiteReporter graphiteReporter = null;
-
-	private Class _klas;
-	private String _topologyname = "topology";
-
-	/** The Constant LOGGER. */
-	private static final Logger _Logger = Logger
-			.getLogger(MetricReporter.class);
-
-	public void initialize(Map config, Class klas) {
-
-		_Logger.debug("===========Initializing Reporter");
-		this._klas = klas;
-		if (config.get("topologyname")!=null)
-			_topologyname = (String) config.get("topologyname");
-			
-		this.start(config);
-
-	}
-
-	public Counter registerCounter(String countername) {
-		return metrics.counter(MetricRegistry.name(_topologyname,_klas.getCanonicalName(), countername));
-	}
-
-	public void start(Map config) {
-		try {
-			if (config.get("reporter.jmx").equals("true")) {
-				jmxReporter = JmxReporter.forRegistry(metrics).build();
-				jmxReporter.start();
-			}
-
-			if (config.get("reporter.console").equals("true")) {
-				consoleReporter = ConsoleReporter.forRegistry(metrics).build();
-				consoleReporter.start(1, TimeUnit.SECONDS);
-			}
-
-		} catch (Exception e) {
-			e.printStackTrace();
-		}
-
-		try {
-			if (config.get("reporter.graphite").equals("true")) {
-				String address = (String) config.get("graphite.address");
-				int port = Integer.parseInt((String) config
-						.get("graphite.port"));
-
-				_Logger.debug("===========Graphite ADDRESS: " + address + ":"
-						+ port);
-
-				Graphite graphite = new Graphite(new InetSocketAddress(address,
-						port));
-				// Check if graphite connectivity works
-				graphite.connect();
-				graphite.close();
-
-				graphiteReporter = GraphiteReporter.forRegistry(metrics).build(
-						graphite);
-
-				_Logger.debug("---------******STARTING GRAPHITE*********---------");
-				graphiteReporter.start(1, TimeUnit.SECONDS);
-			}
-		}
-
-		catch (IOException io) {
-			_Logger.warn("Unable to Connect to Graphite");
-		}
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/metrics/MyMetricReporter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/metrics/MyMetricReporter.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/metrics/MyMetricReporter.java
deleted file mode 100644
index 8175487..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/metrics/MyMetricReporter.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.metrics;
-
-import com.codahale.metrics.ConsoleReporter;
-import com.codahale.metrics.JmxReporter;
-import com.codahale.metrics.MetricRegistry;
-import com.codahale.metrics.graphite.GraphiteReporter;
-
-public class MyMetricReporter extends MetricReporter {
-	
-	final MetricRegistry metrics = new MetricRegistry();
-	private ConsoleReporter consoleReporter = null;
-	private JmxReporter jmxReporter=null; 
-	private GraphiteReporter graphiteReporter = null;
-
-	
-	public MyMetricReporter(boolean withConsole, boolean withJMX, boolean witGraphite)
-	{
-		consoleReporter = ConsoleReporter.forRegistry(metrics).build();
-		jmxReporter = JmxReporter.forRegistry(metrics).build();
-		graphiteReporter = GraphiteReporter.forRegistry(metrics).build(null);
-	}
-	
-
-	public static void main(String[] args) {
-		// TODO Auto-generated method stub
-
-	}
-
-	public void report() {
-
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/metrics/NullReporter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/metrics/NullReporter.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/metrics/NullReporter.java
deleted file mode 100644
index 148ea94..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/metrics/NullReporter.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.metrics;
-
-public class NullReporter extends MetricReporter {
-	
-	public void report()
-	{
-
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/parser/interfaces/MessageFilter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/parser/interfaces/MessageFilter.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/parser/interfaces/MessageFilter.java
deleted file mode 100644
index 2590570..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/parser/interfaces/MessageFilter.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.parser.interfaces;
-
-public interface MessageFilter<T> {
-
-	boolean emitTuple(T message);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/parser/interfaces/MessageParser.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/parser/interfaces/MessageParser.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/parser/interfaces/MessageParser.java
deleted file mode 100644
index 2db0db6..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/parser/interfaces/MessageParser.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.parser.interfaces;
-
-import java.util.List;
-
-public interface MessageParser<T> {
-
-	void init();
-	List<T> parse(byte[] rawMessage);
-	boolean validate(T message);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/Constants.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/Constants.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/Constants.java
deleted file mode 100644
index 99945cb..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/Constants.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.pcap;
-
-
-/**
-* The Interface Constants.
-* 
-* @author sheetal
-* @version $Revision: 1.0 $
-*/
-public interface Constants {
-
-/** The protocol tcp. */
-public static final int PROTOCOL_TCP = 6;
-
-/** The protocol udp. */
-public static final int PROTOCOL_UDP = 17;
-
-/** The document key separator. */
-public static final char DOCUMENT_KEY_SEPARATOR = '-';
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/IEEE_802_1Q.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/IEEE_802_1Q.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/IEEE_802_1Q.java
deleted file mode 100644
index 27fae51..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/IEEE_802_1Q.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.pcap;
-
-public class IEEE_802_1Q {
-
-	  int priorityCodePoint = 0;
-	  int dropEligibleIndicator = 0;
-	  int vLANIdentifier = 0;
-
-	  public IEEE_802_1Q(int priorityCodePoint, int dropEligibleIndicator,
-	      int vLANIdentifier) {
-	    this.priorityCodePoint = priorityCodePoint;
-	    this.dropEligibleIndicator = dropEligibleIndicator;
-	    this.vLANIdentifier = vLANIdentifier;
-	  }
-
-	  public int getPriorityCodePoint() {
-	    return priorityCodePoint;
-	  }
-
-	  public int getDropEligibleIndicator() {
-	    return dropEligibleIndicator;
-	  }
-
-	  public int getvLANIdentifier() {
-	    return vLANIdentifier;
-	  }
-	}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/MetronEthernetDecoder.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/MetronEthernetDecoder.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/MetronEthernetDecoder.java
deleted file mode 100644
index 1609887..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/pcap/MetronEthernetDecoder.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.pcap;
-
-import java.util.BitSet;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.CopyOnWriteArraySet;
-
-import org.krakenapps.pcap.decoder.ethernet.EthernetDecoder;
-import org.krakenapps.pcap.decoder.ethernet.EthernetFrame;
-import org.krakenapps.pcap.decoder.ethernet.EthernetProcessor;
-import org.krakenapps.pcap.decoder.ethernet.MacAddress;
-import org.krakenapps.pcap.packet.PcapPacket;
-import org.krakenapps.pcap.util.Buffer;
-
-public class MetronEthernetDecoder extends EthernetDecoder {
-
-  private Set<EthernetProcessor> callbacks;
-  private Map<Integer, Set<EthernetProcessor>> typeCallbacks;
-
-  public MetronEthernetDecoder() {
-    callbacks = new CopyOnWriteArraySet<EthernetProcessor>();
-    typeCallbacks = new ConcurrentHashMap<Integer, Set<EthernetProcessor>>();
-  }
-
-  public void register(EthernetProcessor processor) {
-    this.callbacks.add(processor);
-  }
-
-  public void register(int type, EthernetProcessor processor) {
-    Set<EthernetProcessor> processors = typeCallbacks.get(type);
-    if (processors == null) {
-      processors = new HashSet<EthernetProcessor>();
-      typeCallbacks.put(type, processors);
-    }
-
-    processors.add(processor);
-  }
-
-  public void unregister(EthernetProcessor processor) {
-    this.callbacks.remove(processor);
-  }
-
-  public void unregister(int type, EthernetProcessor processor) {
-    Set<EthernetProcessor> processors = typeCallbacks.get(type);
-    if (processors == null)
-      return;
-
-    processors.remove(processor);
-  }
-
-  public void decode(PcapPacket packet) {
-    // do not reorder following codes (parse sequence)
-    MacAddress destination = getMacAddress(packet.getPacketData());
-    MacAddress source = getMacAddress(packet.getPacketData());
-    int type = getEtherType(packet.getPacketData());
-
-    if (type == 0x8100) {
-      // It is 802.1Q VLAN tag
-      IEEE_802_1Q iee802_1qTag = get802_1qTag(packet.getPacketData());
-      // Now get the type
-      type = getEtherType(packet.getPacketData());
-    }
-
-    Buffer buffer = packet.getPacketData();
-    buffer.discardReadBytes();
-
-    EthernetFrame frame = new EthernetFrame(source, destination, type, buffer);
-    frame.setPcapPacket(packet);
-    dispatch(frame);
-  }
-
-  private MacAddress getMacAddress(Buffer data) {
-    byte[] mac = new byte[6];
-    data.gets(mac, 0, 6);
-    return new MacAddress(mac);
-  }
-
-  private int getEtherType(Buffer data) {
-    return ((int) data.getShort()) & 0x0000FFFF;
-  }
-
-  private IEEE_802_1Q get802_1qTag(Buffer data) {
-
-    // reference http://en.wikipedia.org/wiki/EtherType &
-    // http://en.wikipedia.org/wiki/IEEE_802.1Q
-    byte[] b802_1qTag = new byte[2];
-    data.gets(b802_1qTag, 0, 2);
-    BitSet bits = BitSet.valueOf(b802_1qTag);
-    int pcp = convertBitToInt(bits.get(0, 3));
-    int dei = convertBitToInt(bits.get(3, 4));
-    int vid = convertBitToInt(bits.get(4, 16));
-
-    return new IEEE_802_1Q(pcp, dei, vid);
-  }
-
-  public static int convertBitToInt(BitSet bits) {
-    int value = 0;
-    for (int i = 0; i < bits.length(); ++i) {
-      value += bits.get(i) ? (1 << i) : 0;
-    }
-    return value;
-  }
-
-  private void dispatch(EthernetFrame frame) {
-    for (EthernetProcessor processor : callbacks)
-      processor.process(frame);
-
-    Set<EthernetProcessor> processors = typeCallbacks.get(frame.getType());
-    if (processors == null)
-      return;
-
-    for (EthernetProcessor processor : processors)
-      processor.process(frame.dup());
-  }
-}


[15/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/JavaCharStream.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/JavaCharStream.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/JavaCharStream.java
new file mode 100644
index 0000000..31f3c6f
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/JavaCharStream.java
@@ -0,0 +1,633 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/* Generated By:JavaCC: Do not edit this line. JavaCharStream.java Version 5.0 */
+/* JavaCCOptions:STATIC=false,SUPPORT_CLASS_VISIBILITY_PUBLIC=false */
+package org.apache.metron.parsers.ise;
+
+/**
+ * An implementation of interface CharStream, where the stream is assumed to
+ * contain only ASCII characters (with java-like unicode escape processing).
+ */
+
+class JavaCharStream
+{
+  /** Whether parser is static. */
+  public static final boolean staticFlag = false;
+
+  static final int hexval(char c) throws java.io.IOException {
+    switch(c)
+    {
+       case '0' :
+          return 0;
+       case '1' :
+          return 1;
+       case '2' :
+          return 2;
+       case '3' :
+          return 3;
+       case '4' :
+          return 4;
+       case '5' :
+          return 5;
+       case '6' :
+          return 6;
+       case '7' :
+          return 7;
+       case '8' :
+          return 8;
+       case '9' :
+          return 9;
+
+       case 'a' :
+       case 'A' :
+          return 10;
+       case 'b' :
+       case 'B' :
+          return 11;
+       case 'c' :
+       case 'C' :
+          return 12;
+       case 'd' :
+       case 'D' :
+          return 13;
+       case 'e' :
+       case 'E' :
+          return 14;
+       case 'f' :
+       case 'F' :
+          return 15;
+    }
+
+    throw new java.io.IOException(); // Should never come here
+  }
+
+/** Position in buffer. */
+  public int bufpos = -1;
+  int bufsize;
+  int available;
+  int tokenBegin;
+  protected int bufline[];
+  protected int bufcolumn[];
+
+  protected int column = 0;
+  protected int line = 1;
+
+  protected boolean prevCharIsCR = false;
+  protected boolean prevCharIsLF = false;
+
+  protected java.io.Reader inputStream;
+
+  protected char[] nextCharBuf;
+  protected char[] buffer;
+  protected int maxNextCharInd = 0;
+  protected int nextCharInd = -1;
+  protected int inBuf = 0;
+  protected int tabSize = 8;
+
+  protected void setTabSize(int i) { tabSize = i; }
+  protected int getTabSize(int i) { return tabSize; }
+
+  protected void ExpandBuff(boolean wrapAround)
+  {
+    char[] newbuffer = new char[bufsize + 2048];
+    int newbufline[] = new int[bufsize + 2048];
+    int newbufcolumn[] = new int[bufsize + 2048];
+
+    try
+    {
+      if (wrapAround)
+      {
+        System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
+        System.arraycopy(buffer, 0, newbuffer, bufsize - tokenBegin, bufpos);
+        buffer = newbuffer;
+
+        System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
+        System.arraycopy(bufline, 0, newbufline, bufsize - tokenBegin, bufpos);
+        bufline = newbufline;
+
+        System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
+        System.arraycopy(bufcolumn, 0, newbufcolumn, bufsize - tokenBegin, bufpos);
+        bufcolumn = newbufcolumn;
+
+        bufpos += (bufsize - tokenBegin);
+    }
+    else
+    {
+        System.arraycopy(buffer, tokenBegin, newbuffer, 0, bufsize - tokenBegin);
+        buffer = newbuffer;
+
+        System.arraycopy(bufline, tokenBegin, newbufline, 0, bufsize - tokenBegin);
+        bufline = newbufline;
+
+        System.arraycopy(bufcolumn, tokenBegin, newbufcolumn, 0, bufsize - tokenBegin);
+        bufcolumn = newbufcolumn;
+
+        bufpos -= tokenBegin;
+      }
+    }
+    catch (Throwable t)
+    {
+      throw new Error(t.getMessage());
+    }
+
+    available = (bufsize += 2048);
+    tokenBegin = 0;
+  }
+
+  protected void FillBuff() throws java.io.IOException
+  {
+    int i;
+    if (maxNextCharInd == 4096)
+      maxNextCharInd = nextCharInd = 0;
+
+    try {
+      if ((i = inputStream.read(nextCharBuf, maxNextCharInd,
+                                          4096 - maxNextCharInd)) == -1)
+      {
+        inputStream.close();
+        throw new java.io.IOException();
+      }
+      else
+         maxNextCharInd += i;
+      return;
+    }
+    catch(java.io.IOException e) {
+      if (bufpos != 0)
+      {
+        --bufpos;
+        backup(0);
+      }
+      else
+      {
+        bufline[bufpos] = line;
+        bufcolumn[bufpos] = column;
+      }
+      throw e;
+    }
+  }
+
+  protected char ReadByte() throws java.io.IOException
+  {
+    if (++nextCharInd >= maxNextCharInd)
+      FillBuff();
+
+    return nextCharBuf[nextCharInd];
+  }
+
+/** @return starting character for token. */
+  public char BeginToken() throws java.io.IOException
+  {
+    if (inBuf > 0)
+    {
+      --inBuf;
+
+      if (++bufpos == bufsize)
+        bufpos = 0;
+
+      tokenBegin = bufpos;
+      return buffer[bufpos];
+    }
+
+    tokenBegin = 0;
+    bufpos = -1;
+
+    return readChar();
+  }
+
+  protected void AdjustBuffSize()
+  {
+    if (available == bufsize)
+    {
+      if (tokenBegin > 2048)
+      {
+        bufpos = 0;
+        available = tokenBegin;
+      }
+      else
+        ExpandBuff(false);
+    }
+    else if (available > tokenBegin)
+      available = bufsize;
+    else if ((tokenBegin - available) < 2048)
+      ExpandBuff(true);
+    else
+      available = tokenBegin;
+  }
+
+  protected void UpdateLineColumn(char c)
+  {
+    column++;
+
+    if (prevCharIsLF)
+    {
+      prevCharIsLF = false;
+      line += (column = 1);
+    }
+    else if (prevCharIsCR)
+    {
+      prevCharIsCR = false;
+      if (c == '\n')
+      {
+        prevCharIsLF = true;
+      }
+      else
+        line += (column = 1);
+    }
+
+    switch (c)
+    {
+      case '\r' :
+        prevCharIsCR = true;
+        break;
+      case '\n' :
+        prevCharIsLF = true;
+        break;
+      case '\t' :
+        column--;
+        column += (tabSize - (column % tabSize));
+        break;
+      default :
+        break;
+    }
+
+    bufline[bufpos] = line;
+    bufcolumn[bufpos] = column;
+  }
+
+/** Read a character. */
+  public char readChar() throws java.io.IOException
+  {
+    if (inBuf > 0)
+    {
+      --inBuf;
+
+      if (++bufpos == bufsize)
+        bufpos = 0;
+
+      return buffer[bufpos];
+    }
+
+    char c;
+
+    if (++bufpos == available)
+      AdjustBuffSize();
+
+    if ((buffer[bufpos] = c = ReadByte()) == '\\')
+    {
+      UpdateLineColumn(c);
+
+      int backSlashCnt = 1;
+
+      for (;;) // Read all the backslashes
+      {
+        if (++bufpos == available)
+          AdjustBuffSize();
+
+        try
+        {
+          if ((buffer[bufpos] = c = ReadByte()) != '\\')
+          {
+            UpdateLineColumn(c);
+            // found a non-backslash char.
+            if ((c == 'u') && ((backSlashCnt & 1) == 1))
+            {
+              if (--bufpos < 0)
+                bufpos = bufsize - 1;
+
+              break;
+            }
+
+            backup(backSlashCnt);
+            return '\\';
+          }
+        }
+        catch(java.io.IOException e)
+        {
+	  // We are returning one backslash so we should only backup (count-1)
+          if (backSlashCnt > 1)
+            backup(backSlashCnt-1);
+
+          return '\\';
+        }
+
+        UpdateLineColumn(c);
+        backSlashCnt++;
+      }
+
+      // Here, we have seen an odd number of backslash's followed by a 'u'
+      try
+      {
+        while ((c = ReadByte()) == 'u')
+          ++column;
+
+        buffer[bufpos] = c = (char)(hexval(c) << 12 |
+                                    hexval(ReadByte()) << 8 |
+                                    hexval(ReadByte()) << 4 |
+                                    hexval(ReadByte()));
+
+        column += 4;
+      }
+      catch(java.io.IOException e)
+      {
+        throw new Error("Invalid escape character at line " + line +
+                                         " column " + column + ".");
+      }
+
+      if (backSlashCnt == 1)
+        return c;
+      else
+      {
+        backup(backSlashCnt - 1);
+        return '\\';
+      }
+    }
+    else
+    {
+      UpdateLineColumn(c);
+      return c;
+    }
+  }
+
+  @Deprecated
+  /**
+   * @deprecated
+   * @see #getEndColumn
+   */
+  public int getColumn() {
+    return bufcolumn[bufpos];
+  }
+
+  @Deprecated
+  /**
+   * @deprecated
+   * @see #getEndLine
+   */
+  public int getLine() {
+    return bufline[bufpos];
+  }
+
+/** Get end column. */
+  public int getEndColumn() {
+    return bufcolumn[bufpos];
+  }
+
+/** Get end line. */
+  public int getEndLine() {
+    return bufline[bufpos];
+  }
+
+/** @return column of token start */
+  public int getBeginColumn() {
+    return bufcolumn[tokenBegin];
+  }
+
+/** @return line number of token start */
+  public int getBeginLine() {
+    return bufline[tokenBegin];
+  }
+
+/** Retreat. */
+  public void backup(int amount) {
+
+    inBuf += amount;
+    if ((bufpos -= amount) < 0)
+      bufpos += bufsize;
+  }
+
+/** Constructor. */
+  public JavaCharStream(java.io.Reader dstream,
+                 int startline, int startcolumn, int buffersize)
+  {
+    inputStream = dstream;
+    line = startline;
+    column = startcolumn - 1;
+
+    available = bufsize = buffersize;
+    buffer = new char[buffersize];
+    bufline = new int[buffersize];
+    bufcolumn = new int[buffersize];
+    nextCharBuf = new char[4096];
+  }
+
+/** Constructor. */
+  public JavaCharStream(java.io.Reader dstream,
+                                        int startline, int startcolumn)
+  {
+    this(dstream, startline, startcolumn, 4096);
+  }
+
+/** Constructor. */
+  public JavaCharStream(java.io.Reader dstream)
+  {
+    this(dstream, 1, 1, 4096);
+  }
+/** Reinitialise. */
+  public void ReInit(java.io.Reader dstream,
+                 int startline, int startcolumn, int buffersize)
+  {
+    inputStream = dstream;
+    line = startline;
+    column = startcolumn - 1;
+
+    if (buffer == null || buffersize != buffer.length)
+    {
+      available = bufsize = buffersize;
+      buffer = new char[buffersize];
+      bufline = new int[buffersize];
+      bufcolumn = new int[buffersize];
+      nextCharBuf = new char[4096];
+    }
+    prevCharIsLF = prevCharIsCR = false;
+    tokenBegin = inBuf = maxNextCharInd = 0;
+    nextCharInd = bufpos = -1;
+  }
+
+/** Reinitialise. */
+  public void ReInit(java.io.Reader dstream,
+                                        int startline, int startcolumn)
+  {
+    ReInit(dstream, startline, startcolumn, 4096);
+  }
+
+/** Reinitialise. */
+  public void ReInit(java.io.Reader dstream)
+  {
+    ReInit(dstream, 1, 1, 4096);
+  }
+/** Constructor. */
+  public JavaCharStream(java.io.InputStream dstream, String encoding, int startline,
+  int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
+  {
+    this(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
+  }
+
+/** Constructor. */
+  public JavaCharStream(java.io.InputStream dstream, int startline,
+  int startcolumn, int buffersize)
+  {
+    this(new java.io.InputStreamReader(dstream), startline, startcolumn, 4096);
+  }
+
+/** Constructor. */
+  public JavaCharStream(java.io.InputStream dstream, String encoding, int startline,
+                        int startcolumn) throws java.io.UnsupportedEncodingException
+  {
+    this(dstream, encoding, startline, startcolumn, 4096);
+  }
+
+/** Constructor. */
+  public JavaCharStream(java.io.InputStream dstream, int startline,
+                        int startcolumn)
+  {
+    this(dstream, startline, startcolumn, 4096);
+  }
+
+/** Constructor. */
+  public JavaCharStream(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
+  {
+    this(dstream, encoding, 1, 1, 4096);
+  }
+
+/** Constructor. */
+  public JavaCharStream(java.io.InputStream dstream)
+  {
+    this(dstream, 1, 1, 4096);
+  }
+
+/** Reinitialise. */
+  public void ReInit(java.io.InputStream dstream, String encoding, int startline,
+  int startcolumn, int buffersize) throws java.io.UnsupportedEncodingException
+  {
+    ReInit(encoding == null ? new java.io.InputStreamReader(dstream) : new java.io.InputStreamReader(dstream, encoding), startline, startcolumn, buffersize);
+  }
+
+/** Reinitialise. */
+  public void ReInit(java.io.InputStream dstream, int startline,
+  int startcolumn, int buffersize)
+  {
+    ReInit(new java.io.InputStreamReader(dstream), startline, startcolumn, buffersize);
+  }
+/** Reinitialise. */
+  public void ReInit(java.io.InputStream dstream, String encoding, int startline,
+                     int startcolumn) throws java.io.UnsupportedEncodingException
+  {
+    ReInit(dstream, encoding, startline, startcolumn, 4096);
+  }
+/** Reinitialise. */
+  public void ReInit(java.io.InputStream dstream, int startline,
+                     int startcolumn)
+  {
+    ReInit(dstream, startline, startcolumn, 4096);
+  }
+/** Reinitialise. */
+  public void ReInit(java.io.InputStream dstream, String encoding) throws java.io.UnsupportedEncodingException
+  {
+    ReInit(dstream, encoding, 1, 1, 4096);
+  }
+
+/** Reinitialise. */
+  public void ReInit(java.io.InputStream dstream)
+  {
+    ReInit(dstream, 1, 1, 4096);
+  }
+
+  /** @return token image as String */
+  public String GetImage()
+  {
+    if (bufpos >= tokenBegin)
+      return new String(buffer, tokenBegin, bufpos - tokenBegin + 1);
+    else
+      return new String(buffer, tokenBegin, bufsize - tokenBegin) +
+                              new String(buffer, 0, bufpos + 1);
+  }
+
+  /** @return suffix */
+  public char[] GetSuffix(int len)
+  {
+    char[] ret = new char[len];
+
+    if ((bufpos + 1) >= len)
+      System.arraycopy(buffer, bufpos - len + 1, ret, 0, len);
+    else
+    {
+      System.arraycopy(buffer, bufsize - (len - bufpos - 1), ret, 0,
+                                                        len - bufpos - 1);
+      System.arraycopy(buffer, 0, ret, len - bufpos - 1, bufpos + 1);
+    }
+
+    return ret;
+  }
+
+  /** Set buffers back to null when finished. */
+  public void Done()
+  {
+    nextCharBuf = null;
+    buffer = null;
+    bufline = null;
+    bufcolumn = null;
+  }
+
+  /**
+   * Method to adjust line and column numbers for the start of a token.
+   */
+  public void adjustBeginLineColumn(int newLine, int newCol)
+  {
+    int start = tokenBegin;
+    int len;
+
+    if (bufpos >= tokenBegin)
+    {
+      len = bufpos - tokenBegin + inBuf + 1;
+    }
+    else
+    {
+      len = bufsize - tokenBegin + bufpos + 1 + inBuf;
+    }
+
+    int i = 0, j = 0, k = 0;
+    int nextColDiff = 0, columnDiff = 0;
+
+    while (i < len && bufline[j = start % bufsize] == bufline[k = ++start % bufsize])
+    {
+      bufline[j] = newLine;
+      nextColDiff = columnDiff + bufcolumn[k] - bufcolumn[j];
+      bufcolumn[j] = newCol + columnDiff;
+      columnDiff = nextColDiff;
+      i++;
+    }
+
+    if (i < len)
+    {
+      bufline[j] = newLine++;
+      bufcolumn[j] = newCol + columnDiff;
+
+      while (i++ < len)
+      {
+        if (bufline[j = start % bufsize] != bufline[++start % bufsize])
+          bufline[j] = newLine++;
+        else
+          bufline[j] = newLine;
+      }
+    }
+
+    line = bufline[j];
+    column = bufcolumn[j];
+  }
+
+}
+/* JavaCC - OriginalChecksum=96a5b0b0fa09286690f250998f047719 (do not edit this line) */

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ParseException.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ParseException.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ParseException.java
new file mode 100644
index 0000000..fc21aa1
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/ParseException.java
@@ -0,0 +1,204 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/* Generated By:JavaCC: Do not edit this line. ParseException.java Version 5.0 */
+/* JavaCCOptions:KEEP_LINE_COL=null */
+package org.apache.metron.parsers.ise;
+
+/**
+ * This exception is thrown when parse errors are encountered.
+ * You can explicitly create objects of this exception type by
+ * calling the method generateParseException in the generated
+ * parser.
+ *
+ * You can modify this class to customize your error reporting
+ * mechanisms so long as you retain the public fields.
+ */
+public class ParseException extends Exception {
+
+  /**
+   * The version identifier for this Serializable class.
+   * Increment only if the <i>serialized</i> form of the
+   * class changes.
+   */
+  private static final long serialVersionUID = 1L;
+
+  /**
+   * This constructor is used by the method "generateParseException"
+   * in the generated parser.  Calling this constructor generates
+   * a new object of this type with the fields "currentToken",
+   * "expectedTokenSequences", and "tokenImage" set.
+   */
+  public ParseException(Token currentTokenVal,
+                        int[][] expectedTokenSequencesVal,
+                        String[] tokenImageVal
+                       )
+  {
+    super(initialise(currentTokenVal, expectedTokenSequencesVal, tokenImageVal));
+    currentToken = currentTokenVal;
+    expectedTokenSequences = expectedTokenSequencesVal;
+    tokenImage = tokenImageVal;
+  }
+
+  /**
+   * The following constructors are for use by you for whatever
+   * purpose you can think of.  Constructing the exception in this
+   * manner makes the exception behave in the normal way - i.e., as
+   * documented in the class "Throwable".  The fields "errorToken",
+   * "expectedTokenSequences", and "tokenImage" do not contain
+   * relevant information.  The JavaCC generated code does not use
+   * these constructors.
+   */
+
+  public ParseException() {
+    super();
+  }
+
+  /** Constructor with message. */
+  public ParseException(String message) {
+    super(message);
+  }
+
+
+  /**
+   * This is the last token that has been consumed successfully.  If
+   * this object has been created due to a parse error, the token
+   * followng this token will (therefore) be the first error token.
+   */
+  public Token currentToken;
+
+  /**
+   * Each entry in this array is an array of integers.  Each array
+   * of integers represents a sequence of tokens (by their ordinal
+   * values) that is expected at this point of the parse.
+   */
+  public int[][] expectedTokenSequences;
+
+  /**
+   * This is a reference to the "tokenImage" array of the generated
+   * parser within which the parse error occurred.  This array is
+   * defined in the generated ...Constants interface.
+   */
+  public String[] tokenImage;
+
+  /**
+   * It uses "currentToken" and "expectedTokenSequences" to generate a parse
+   * error message and returns it.  If this object has been created
+   * due to a parse error, and you do not catch it (it gets thrown
+   * from the parser) the correct error message
+   * gets displayed.
+   */
+  private static String initialise(Token currentToken,
+                           int[][] expectedTokenSequences,
+                           String[] tokenImage) {
+    String eol = System.getProperty("line.separator", "\n");
+    StringBuffer expected = new StringBuffer();
+    int maxSize = 0;
+    for (int i = 0; i < expectedTokenSequences.length; i++) {
+      if (maxSize < expectedTokenSequences[i].length) {
+        maxSize = expectedTokenSequences[i].length;
+      }
+      for (int j = 0; j < expectedTokenSequences[i].length; j++) {
+        expected.append(tokenImage[expectedTokenSequences[i][j]]).append(' ');
+      }
+      if (expectedTokenSequences[i][expectedTokenSequences[i].length - 1] != 0) {
+        expected.append("...");
+      }
+      expected.append(eol).append("    ");
+    }
+    String retval = "Encountered \"";
+    Token tok = currentToken.next;
+    for (int i = 0; i < maxSize; i++) {
+      if (i != 0) retval += " ";
+      if (tok.kind == 0) {
+        retval += tokenImage[0];
+        break;
+      }
+      retval += " " + tokenImage[tok.kind];
+      retval += " \"";
+      retval += add_escapes(tok.image);
+      retval += " \"";
+      tok = tok.next;
+    }
+    retval += "\" at line " + currentToken.next.beginLine + ", column " + currentToken.next.beginColumn;
+    retval += "." + eol;
+    if (expectedTokenSequences.length == 1) {
+      retval += "Was expecting:" + eol + "    ";
+    } else {
+      retval += "Was expecting one of:" + eol + "    ";
+    }
+    retval += expected.toString();
+    return retval;
+  }
+
+  /**
+   * The end of line string for this machine.
+   */
+  protected String eol = System.getProperty("line.separator", "\n");
+
+  /**
+   * Used to convert raw characters to their escaped version
+   * when these raw version cannot be used as part of an ASCII
+   * string literal.
+   */
+  static String add_escapes(String str) {
+      StringBuffer retval = new StringBuffer();
+      char ch;
+      for (int i = 0; i < str.length(); i++) {
+        switch (str.charAt(i))
+        {
+           case 0 :
+              continue;
+           case '\b':
+              retval.append("\\b");
+              continue;
+           case '\t':
+              retval.append("\\t");
+              continue;
+           case '\n':
+              retval.append("\\n");
+              continue;
+           case '\f':
+              retval.append("\\f");
+              continue;
+           case '\r':
+              retval.append("\\r");
+              continue;
+           case '\"':
+              retval.append("\\\"");
+              continue;
+           case '\'':
+              retval.append("\\\'");
+              continue;
+           case '\\':
+              retval.append("\\\\");
+              continue;
+           default:
+              if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) {
+                 String s = "0000" + Integer.toString(ch, 16);
+                 retval.append("\\u" + s.substring(s.length() - 4, s.length()));
+              } else {
+                 retval.append(ch);
+              }
+              continue;
+        }
+      }
+      return retval.toString();
+   }
+
+}
+/* JavaCC - OriginalChecksum=f9f7217056f99de5708d01ebd497dede (do not edit this line) */

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/Token.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/Token.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/Token.java
new file mode 100644
index 0000000..3545ec4
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/Token.java
@@ -0,0 +1,148 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/* Generated By:JavaCC: Do not edit this line. Token.java Version 5.0 */
+/* JavaCCOptions:TOKEN_EXTENDS=,KEEP_LINE_COL=null,SUPPORT_CLASS_VISIBILITY_PUBLIC=false */
+package org.apache.metron.parsers.ise;
+
+/**
+ * Describes the input token stream.
+ */
+
+class Token implements java.io.Serializable {
+
+  /**
+   * The version identifier for this Serializable class.
+   * Increment only if the <i>serialized</i> form of the
+   * class changes.
+   */
+  private static final long serialVersionUID = 1L;
+
+  /**
+   * An integer that describes the kind of this token.  This numbering
+   * system is determined by JavaCCParser, and a table of these numbers is
+   * stored in the file ...Constants.java.
+   */
+  public int kind;
+
+  /** The line number of the first character of this Token. */
+  public int beginLine;
+  /** The column number of the first character of this Token. */
+  public int beginColumn;
+  /** The line number of the last character of this Token. */
+  public int endLine;
+  /** The column number of the last character of this Token. */
+  public int endColumn;
+
+  /**
+   * The string image of the token.
+   */
+  public String image;
+
+  /**
+   * A reference to the next regular (non-special) token from the input
+   * stream.  If this is the last token from the input stream, or if the
+   * token manager has not read tokens beyond this one, this field is
+   * set to null.  This is true only if this token is also a regular
+   * token.  Otherwise, see below for a description of the contents of
+   * this field.
+   */
+  public Token next;
+
+  /**
+   * This field is used to access special tokens that occur prior to this
+   * token, but after the immediately preceding regular (non-special) token.
+   * If there are no such special tokens, this field is set to null.
+   * When there are more than one such special token, this field refers
+   * to the last of these special tokens, which in turn refers to the next
+   * previous special token through its specialToken field, and so on
+   * until the first special token (whose specialToken field is null).
+   * The next fields of special tokens refer to other special tokens that
+   * immediately follow it (without an intervening regular token).  If there
+   * is no such token, this field is null.
+   */
+  public Token specialToken;
+
+  /**
+   * An optional attribute value of the Token.
+   * Tokens which are not used as syntactic sugar will often contain
+   * meaningful values that will be used later on by the compiler or
+   * interpreter. This attribute value is often different from the image.
+   * Any subclass of Token that actually wants to return a non-null value can
+   * override this method as appropriate.
+   */
+  public Object getValue() {
+    return null;
+  }
+
+  /**
+   * No-argument constructor
+   */
+  public Token() {}
+
+  /**
+   * Constructs a new token for the specified Image.
+   */
+  public Token(int kind)
+  {
+    this(kind, null);
+  }
+
+  /**
+   * Constructs a new token for the specified Image and Kind.
+   */
+  public Token(int kind, String image)
+  {
+    this.kind = kind;
+    this.image = image;
+  }
+
+  /**
+   * Returns the image.
+   */
+  public String toString()
+  {
+    return image;
+  }
+
+  /**
+   * Returns a new Token object, by default. However, if you want, you
+   * can create and return subclass objects based on the value of ofKind.
+   * Simply add the cases to the switch for all those special cases.
+   * For example, if you have a subclass of Token called IDToken that
+   * you want to create if ofKind is ID, simply add something like :
+   *
+   *    case MyParserConstants.ID : return new IDToken(ofKind, image);
+   *
+   * to the following switch statement. Then you can cast matchedToken
+   * variable to the appropriate type and use sit in your lexical actions.
+   */
+  public static Token newToken(int ofKind, String image)
+  {
+    switch(ofKind)
+    {
+      default : return new Token(ofKind, image);
+    }
+  }
+
+  public static Token newToken(int ofKind)
+  {
+    return newToken(ofKind, null);
+  }
+
+}
+/* JavaCC - OriginalChecksum=99daf0baa94b6c270eea5be0575db6aa (do not edit this line) */

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/TokenMgrError.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/TokenMgrError.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/TokenMgrError.java
new file mode 100644
index 0000000..2ccc23a
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/ise/TokenMgrError.java
@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 5.0 */
+/* JavaCCOptions: */
+package org.apache.metron.parsers.ise;
+
+/** Token Manager Error. */
+class TokenMgrError extends Error
+{
+
+  /**
+   * The version identifier for this Serializable class.
+   * Increment only if the <i>serialized</i> form of the
+   * class changes.
+   */
+  private static final long serialVersionUID = 1L;
+
+  /*
+   * Ordinals for various reasons why an Error of this type can be thrown.
+   */
+
+  /**
+   * Lexical error occurred.
+   */
+  static final int LEXICAL_ERROR = 0;
+
+  /**
+   * An attempt was made to create a second instance of a static token manager.
+   */
+  static final int STATIC_LEXER_ERROR = 1;
+
+  /**
+   * Tried to change to an invalid lexical state.
+   */
+  static final int INVALID_LEXICAL_STATE = 2;
+
+  /**
+   * Detected (and bailed out of) an infinite loop in the token manager.
+   */
+  static final int LOOP_DETECTED = 3;
+
+  /**
+   * Indicates the reason why the exception is thrown. It will have
+   * one of the above 4 values.
+   */
+  int errorCode;
+
+  /**
+   * Replaces unprintable characters by their escaped (or unicode escaped)
+   * equivalents in the given string
+   */
+  protected static final String addEscapes(String str) {
+    StringBuffer retval = new StringBuffer();
+    char ch;
+    for (int i = 0; i < str.length(); i++) {
+      switch (str.charAt(i))
+      {
+        case 0 :
+          continue;
+        case '\b':
+          retval.append("\\b");
+          continue;
+        case '\t':
+          retval.append("\\t");
+          continue;
+        case '\n':
+          retval.append("\\n");
+          continue;
+        case '\f':
+          retval.append("\\f");
+          continue;
+        case '\r':
+          retval.append("\\r");
+          continue;
+        case '\"':
+          retval.append("\\\"");
+          continue;
+        case '\'':
+          retval.append("\\\'");
+          continue;
+        case '\\':
+          retval.append("\\\\");
+          continue;
+        default:
+          if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) {
+            String s = "0000" + Integer.toString(ch, 16);
+            retval.append("\\u" + s.substring(s.length() - 4, s.length()));
+          } else {
+            retval.append(ch);
+          }
+          continue;
+      }
+    }
+    return retval.toString();
+  }
+
+  /**
+   * Returns a detailed message for the Error when it is thrown by the
+   * token manager to indicate a lexical error.
+   * Parameters :
+   *    EOFSeen     : indicates if EOF caused the lexical error
+   *    curLexState : lexical state in which this error occurred
+   *    errorLine   : line number when the error occurred
+   *    errorColumn : column number when the error occurred
+   *    errorAfter  : prefix that was seen before this error occurred
+   *    curchar     : the offending character
+   * Note: You can customize the lexical error message by modifying this method.
+   */
+  protected static String LexicalError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar) {
+    return("Lexical error at line " +
+          errorLine + ", column " +
+          errorColumn + ".  Encountered: " +
+          (EOFSeen ? "<EOF> " : ("\"" + addEscapes(String.valueOf(curChar)) + "\"") + " (" + (int)curChar + "), ") +
+          "after : \"" + addEscapes(errorAfter) + "\"");
+  }
+
+  /**
+   * You can also modify the body of this method to customize your error messages.
+   * For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not
+   * of end-users concern, so you can return something like :
+   *
+   *     "Internal Error : Please file a bug report .... "
+   *
+   * from this method for such cases in the release version of your parser.
+   */
+  public String getMessage() {
+    return super.getMessage();
+  }
+
+  /*
+   * Constructors of various flavors follow.
+   */
+
+  /** No arg constructor. */
+  public TokenMgrError() {
+  }
+
+  /** Constructor with message and reason. */
+  public TokenMgrError(String message, int reason) {
+    super(message);
+    errorCode = reason;
+  }
+
+  /** Full Constructor. */
+  public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int reason) {
+    this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason);
+  }
+}
+/* JavaCC - OriginalChecksum=5fbf6813c9d6a1d713f1d4a002af1322 (do not edit this line) */

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/lancope/BasicLancopeParser.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/lancope/BasicLancopeParser.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/lancope/BasicLancopeParser.java
new file mode 100644
index 0000000..6c25d67
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/lancope/BasicLancopeParser.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.parsers.lancope;
+
+import org.apache.metron.parsers.BasicParser;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+
+@SuppressWarnings("serial")
+public class BasicLancopeParser extends BasicParser {
+	// Sample Lancope Message
+	// {"message":"<131>Jul 17 15:59:01 smc-01 StealthWatch[12365]: 2014-07-17T15:58:30Z 10.40.10.254 0.0.0.0 Minor High Concern Index The host's concern index has either exceeded the CI threshold or rapidly increased. Observed 36.55M points. Policy maximum allows up to 20M points.","@version":"1","@timestamp":"2014-07-17T15:56:05.992Z","type":"syslog","host":"10.122.196.201"}
+
+	private static final Logger _LOG = LoggerFactory.getLogger(BasicLancopeParser
+					.class);
+
+	@Override
+	public void init() {
+
+	}
+
+	//@SuppressWarnings("unchecked")
+	@Override
+	public List<JSONObject> parse(byte[] msg) {
+
+		JSONObject payload = null;
+		List<JSONObject> messages = new ArrayList<>();
+		try {
+			
+			String raw_message = new String(msg, "UTF-8");
+			
+			payload = (JSONObject) JSONValue.parse(raw_message);
+			
+			
+
+			String message = payload.get("message").toString();
+			String[] parts = message.split(" ");
+			payload.put("ip_src_addr", parts[6]);
+			payload.put("ip_dst_addr", parts[7]);
+
+			String fixed_date = parts[5].replace('T', ' ');
+			fixed_date = fixed_date.replace('Z', ' ').trim();
+
+			SimpleDateFormat formatter = new SimpleDateFormat(
+					"yyyy-MM-dd HH:mm:ss");
+
+			Date date;
+
+			date = formatter.parse(fixed_date);
+			long timestamp = date.getTime();
+			payload.put("timestamp", timestamp);
+
+			payload.remove("@timestamp");
+			payload.remove("message");
+			payload.put("original_string", message);
+
+			messages.add(payload);
+			return messages;
+		} catch (Exception e) {
+
+			_LOG.error("Unable to parse message: " + payload.toJSONString());
+			return null;
+		}
+	}
+
+	
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/logstash/BasicLogstashParser.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/logstash/BasicLogstashParser.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/logstash/BasicLogstashParser.java
new file mode 100644
index 0000000..39177aa
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/logstash/BasicLogstashParser.java
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.logstash;
+
+import org.apache.metron.parsers.BasicParser;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.List;
+
+public class BasicLogstashParser extends BasicParser {
+
+	@Override
+	public void init() {
+
+	}
+
+	@Override
+	public List<JSONObject> parse(byte[] raw_message) {
+		List<JSONObject> messages = new ArrayList<>();
+		try {
+			
+			/*
+			 * We need to create a new JSONParser each time because its 
+			 * not serializable and the parser is created on the storm nimbus
+			 * node, then transfered to the workers.
+			 */
+			JSONParser jsonParser = new JSONParser();
+			String rawString = new String(raw_message, "UTF-8");
+			JSONObject rawJson = (JSONObject) jsonParser.parse(rawString);
+			
+			// remove logstash meta fields
+			rawJson.remove("@version");
+			rawJson.remove("type");
+			rawJson.remove("host");
+			rawJson.remove("tags");
+			
+			// rename other keys
+			rawJson = mutate(rawJson, "message", "original_string");
+			rawJson = mutate(rawJson, "src_ip", "ip_src_addr");
+			rawJson = mutate(rawJson, "dst_ip", "ip_dst_addr");
+			rawJson = mutate(rawJson, "src_port", "ip_src_port");
+			rawJson = mutate(rawJson, "dst_port", "ip_dst_port");
+			rawJson = mutate(rawJson, "src_ip", "ip_src_addr");
+			
+			// convert timestamp to milli since epoch
+			long timestamp = LogstashToEpoch((String) rawJson.remove("@timestamp"));
+			rawJson.put("timestamp", timestamp);
+			messages.add(rawJson);
+			return messages;
+		} catch (Exception e) {
+			e.printStackTrace();
+			return null;
+		}	
+	}
+	
+	private JSONObject mutate(JSONObject json, String oldKey, String newKey) {
+		if (json.containsKey(oldKey)) {
+			json.put(newKey, json.remove(oldKey));
+		}	
+		return json;
+	}
+	
+	private long LogstashToEpoch(String timestamp) throws java.text.ParseException {
+		SimpleDateFormat logstashDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
+		return logstashDateFormat.parse(timestamp).getTime();
+		
+	}
+
+	
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/paloalto/BasicPaloAltoFirewallParser.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/paloalto/BasicPaloAltoFirewallParser.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/paloalto/BasicPaloAltoFirewallParser.java
new file mode 100644
index 0000000..c67e2b5
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/paloalto/BasicPaloAltoFirewallParser.java
@@ -0,0 +1,209 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.paloalto;
+
+
+import org.apache.metron.parsers.BasicParser;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
+
+public class BasicPaloAltoFirewallParser extends BasicParser {
+
+  private static final Logger _LOG = LoggerFactory.getLogger
+          (BasicPaloAltoFirewallParser.class);
+
+  private static final long serialVersionUID = 3147090149725343999L;
+  public static final String PaloAltoDomain = "palo_alto_domain";
+  public static final String ReceiveTime = "receive_time";
+  public static final String SerialNum = "serial_num";
+  public static final String Type = "type";
+  public static final String ThreatContentType = "threat_content_type";
+  public static final String ConfigVersion = "config_version";
+  public static final String GenerateTime = "generate_time";
+  public static final String SourceAddress = "source_address";
+  public static final String DestinationAddress = "destination_address";
+  public static final String NATSourceIP = "nat_source_ip";
+  public static final String NATDestinationIP = "nat_destination_ip";
+  public static final String Rule = "rule";
+  public static final String SourceUser = "source_user";
+  public static final String DestinationUser = "destination_user";
+  public static final String Application = "application";
+  public static final String VirtualSystem = "virtual_system";
+  public static final String SourceZone = "source_zone";
+  public static final String DestinationZone = "destination_zone";
+  public static final String InboundInterface = "inbound_interface";
+  public static final String OutboundInterface = "outbound_interface";
+  public static final String LogAction = "log_action";
+  public static final String TimeLogged = "time_logged";
+  public static final String SessionID = "session_id";
+  public static final String RepeatCount = "repeat_count";
+  public static final String SourcePort = "source_port";
+  public static final String DestinationPort = "destination_port";
+  public static final String NATSourcePort = "nats_source_port";
+  public static final String NATDestinationPort = "nats_destination_port";
+  public static final String Flags = "flags";
+  public static final String IPProtocol = "ip_protocol";
+  public static final String Action = "action";
+
+  //Threat
+  public static final String URL = "url";
+  public static final String HOST = "host";
+  public static final String ThreatContentName = "threat_content_name";
+  public static final String Category = "category";
+  public static final String Direction = "direction";
+  public static final String Seqno = "seqno";
+  public static final String ActionFlags = "action_flags";
+  public static final String SourceCountry = "source_country";
+  public static final String DestinationCountry = "destination_country";
+  public static final String Cpadding = "cpadding";
+  public static final String ContentType = "content_type";
+
+  //Traffic
+  public static final String Bytes = "content_type";
+  public static final String BytesSent = "content_type";
+  public static final String BytesReceived = "content_type";
+  public static final String Packets = "content_type";
+  public static final String StartTime = "content_type";
+  public static final String ElapsedTimeInSec = "content_type";
+  public static final String Padding = "content_type";
+  public static final String PktsSent = "pkts_sent";
+  public static final String PktsReceived = "pkts_received";
+
+
+  @Override
+  public void init() {
+
+  }
+
+  @SuppressWarnings({"unchecked", "unused"})
+  public List<JSONObject> parse(byte[] msg) {
+
+    JSONObject outputMessage = new JSONObject();
+    String toParse = "";
+    List<JSONObject> messages = new ArrayList<>();
+    try {
+
+      toParse = new String(msg, "UTF-8");
+      _LOG.debug("Received message: " + toParse);
+
+
+      parseMessage(toParse, outputMessage);
+      long timestamp = System.currentTimeMillis();
+      outputMessage.put("timestamp", System.currentTimeMillis());
+      outputMessage.put("ip_src_addr", outputMessage.remove("source_address"));
+      outputMessage.put("ip_src_port", outputMessage.remove("source_port"));
+      outputMessage.put("ip_dst_addr", outputMessage.remove("destination_address"));
+      outputMessage.put("ip_dst_port", outputMessage.remove("destination_port"));
+      outputMessage.put("protocol", outputMessage.remove("ip_protocol"));
+
+      outputMessage.put("original_string", toParse);
+      messages.add(outputMessage);
+      return messages;
+    } catch (Exception e) {
+      e.printStackTrace();
+      _LOG.error("Failed to parse: " + toParse);
+      return null;
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  private void parseMessage(String message, JSONObject outputMessage) {
+
+    String[] tokens = message.split(",");
+
+    String type = tokens[3].trim();
+
+    //populate common objects
+    outputMessage.put(PaloAltoDomain, tokens[0].trim());
+    outputMessage.put(ReceiveTime, tokens[1].trim());
+    outputMessage.put(SerialNum, tokens[2].trim());
+    outputMessage.put(Type, type);
+    outputMessage.put(ThreatContentType, tokens[4].trim());
+    outputMessage.put(ConfigVersion, tokens[5].trim());
+    outputMessage.put(GenerateTime, tokens[6].trim());
+    outputMessage.put(SourceAddress, tokens[7].trim());
+    outputMessage.put(DestinationAddress, tokens[8].trim());
+    outputMessage.put(NATSourceIP, tokens[9].trim());
+    outputMessage.put(NATDestinationIP, tokens[10].trim());
+    outputMessage.put(Rule, tokens[11].trim());
+    outputMessage.put(SourceUser, tokens[12].trim());
+    outputMessage.put(DestinationUser, tokens[13].trim());
+    outputMessage.put(Application, tokens[14].trim());
+    outputMessage.put(VirtualSystem, tokens[15].trim());
+    outputMessage.put(SourceZone, tokens[16].trim());
+    outputMessage.put(DestinationZone, tokens[17].trim());
+    outputMessage.put(InboundInterface, tokens[18].trim());
+    outputMessage.put(OutboundInterface, tokens[19].trim());
+    outputMessage.put(LogAction, tokens[20].trim());
+    outputMessage.put(TimeLogged, tokens[21].trim());
+    outputMessage.put(SessionID, tokens[22].trim());
+    outputMessage.put(RepeatCount, tokens[23].trim());
+    outputMessage.put(SourcePort, tokens[24].trim());
+    outputMessage.put(DestinationPort, tokens[25].trim());
+    outputMessage.put(NATSourcePort, tokens[26].trim());
+    outputMessage.put(NATDestinationPort, tokens[27].trim());
+    outputMessage.put(Flags, tokens[28].trim());
+    outputMessage.put(IPProtocol, tokens[29].trim());
+    outputMessage.put(Action, tokens[30].trim());
+
+
+    if ("THREAT".equals(type.toUpperCase())) {
+      outputMessage.put(URL, tokens[31].trim());
+      try {
+        URL url = new URL(tokens[31].trim());
+        outputMessage.put(HOST, url.getHost());
+      } catch (MalformedURLException e) {
+      }
+      outputMessage.put(ThreatContentName, tokens[32].trim());
+      outputMessage.put(Category, tokens[33].trim());
+      outputMessage.put(Direction, tokens[34].trim());
+      outputMessage.put(Seqno, tokens[35].trim());
+      outputMessage.put(ActionFlags, tokens[36].trim());
+      outputMessage.put(SourceCountry, tokens[37].trim());
+      outputMessage.put(DestinationCountry, tokens[38].trim());
+      outputMessage.put(Cpadding, tokens[39].trim());
+      outputMessage.put(ContentType, tokens[40].trim());
+
+    } else {
+      outputMessage.put(Bytes, tokens[31].trim());
+      outputMessage.put(BytesSent, tokens[32].trim());
+      outputMessage.put(BytesReceived, tokens[33].trim());
+      outputMessage.put(Packets, tokens[34].trim());
+      outputMessage.put(StartTime, tokens[35].trim());
+      outputMessage.put(ElapsedTimeInSec, tokens[36].trim());
+      outputMessage.put(Category, tokens[37].trim());
+      outputMessage.put(Padding, tokens[38].trim());
+      outputMessage.put(Seqno, tokens[39].trim());
+      outputMessage.put(ActionFlags, tokens[40].trim());
+      outputMessage.put(SourceCountry, tokens[41].trim());
+      outputMessage.put(DestinationCountry, tokens[42].trim());
+      outputMessage.put(Cpadding, tokens[43].trim());
+      outputMessage.put(PktsSent, tokens[44].trim());
+      outputMessage.put(PktsReceived, tokens[45].trim());
+    }
+
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/pcap/PcapParser.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/pcap/PcapParser.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/pcap/PcapParser.java
new file mode 100644
index 0000000..899dbd8
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/pcap/PcapParser.java
@@ -0,0 +1,229 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.pcap;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.log4j.Logger;
+import org.apache.metron.parsers.interfaces.MessageParser;
+import org.apache.metron.pcap.Constants;
+import org.apache.metron.pcap.MetronEthernetDecoder;
+import org.apache.metron.pcap.PacketInfo;
+import org.apache.metron.pcap.PcapByteInputStream;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+import org.krakenapps.pcap.decoder.ethernet.EthernetDecoder;
+import org.krakenapps.pcap.decoder.ethernet.EthernetType;
+import org.krakenapps.pcap.decoder.ip.IpDecoder;
+import org.krakenapps.pcap.decoder.ip.Ipv4Packet;
+import org.krakenapps.pcap.decoder.tcp.TcpPacket;
+import org.krakenapps.pcap.decoder.udp.UdpPacket;
+import org.krakenapps.pcap.file.GlobalHeader;
+import org.krakenapps.pcap.packet.PacketHeader;
+import org.krakenapps.pcap.packet.PcapPacket;
+import org.krakenapps.pcap.util.Buffer;
+
+import java.io.EOFException;
+import java.io.File;
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+public class PcapParser implements MessageParser<JSONObject>, Serializable {
+
+  private static final Logger LOG = Logger.getLogger(PcapParser.class);
+
+  private EthernetDecoder ethernetDecoder;
+  private long timePrecisionDivisor = 1L;
+
+  public PcapParser withTsPrecision(String tsPrecision) {
+    if (tsPrecision.equalsIgnoreCase("MILLI")) {
+      //Convert nanos to millis
+      LOG.info("Configured for MILLI, setting timePrecisionDivisor to 1000000L" );
+      timePrecisionDivisor = 1000000L;
+    } else if (tsPrecision.equalsIgnoreCase("MICRO")) {
+      //Convert nanos to micro
+      LOG.info("Configured for MICRO, setting timePrecisionDivisor to 1000L" );
+      timePrecisionDivisor = 1000L;
+    } else if (tsPrecision.equalsIgnoreCase("NANO")) {
+      //Keep nano as is.
+      LOG.info("Configured for NANO, setting timePrecisionDivisor to 1L" );
+      timePrecisionDivisor = 1L;
+    } else {
+      LOG.info("bolt.parser.ts.precision not set. Default to NANO");
+      timePrecisionDivisor = 1L;
+    }
+    return this;
+  }
+
+  @Override
+  public void init() {
+    ethernetDecoder = new MetronEthernetDecoder();
+    IpDecoder ipDecoder = new IpDecoder();
+    ethernetDecoder.register(EthernetType.IPV4, ipDecoder);
+  }
+
+  @Override
+  public List<JSONObject> parse(byte[] pcap) {
+    List<JSONObject> messages = new ArrayList<>();
+    List<PacketInfo> packetInfoList = new ArrayList<>();
+    try {
+      packetInfoList = getPacketInfo(pcap);
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+    for (PacketInfo packetInfo : packetInfoList) {
+      JSONObject message = (JSONObject) JSONValue.parse(packetInfo.getJsonIndexDoc());
+      messages.add(message);
+    }
+    return messages;
+  }
+
+  @Override
+  public boolean validate(JSONObject message) {
+    List<String> requiredFields = Arrays.asList("ip_src_addr",
+            "ip_dst_addr",
+            "ip_protocol",
+            "ip_src_port",
+            "ip_dst_port");
+    return message.keySet().containsAll(requiredFields);
+
+  }
+
+  /**
+   * Parses the.
+   * 
+   * @param pcap
+   *          the pcap
+   * @return the list * @throws IOException Signals that an I/O exception has
+   *         occurred. * @throws IOException * @throws IOException * @throws
+   *         IOException
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public List<PacketInfo> getPacketInfo(byte[] pcap) throws IOException {
+    List<PacketInfo> packetInfoList = new ArrayList<PacketInfo>();
+
+    PcapByteInputStream pcapByteInputStream = new PcapByteInputStream(pcap);
+
+    GlobalHeader globalHeader = pcapByteInputStream.getGlobalHeader();
+    while (true) {
+      try
+
+      {
+        PcapPacket packet = pcapByteInputStream.getPacket();
+        // int packetCounter = 0;
+        // PacketHeader packetHeader = null;
+        // Ipv4Packet ipv4Packet = null;
+        TcpPacket tcpPacket = null;
+        UdpPacket udpPacket = null;
+        // Buffer packetDataBuffer = null;
+        int sourcePort = 0;
+        int destinationPort = 0;
+
+        // LOG.trace("Got packet # " + ++packetCounter);
+
+        // LOG.trace(packet.getPacketData());
+        ethernetDecoder.decode(packet);
+
+        PacketHeader packetHeader = packet.getPacketHeader();
+        Ipv4Packet ipv4Packet = Ipv4Packet.parse(packet.getPacketData());
+
+        if (ipv4Packet.getProtocol() == Constants.PROTOCOL_TCP) {
+          tcpPacket = TcpPacket.parse(ipv4Packet);
+
+        }
+
+        if (ipv4Packet.getProtocol() == Constants.PROTOCOL_UDP) {
+
+          Buffer packetDataBuffer = ipv4Packet.getData();
+          sourcePort = packetDataBuffer.getUnsignedShort();
+          destinationPort = packetDataBuffer.getUnsignedShort();
+
+          udpPacket = new UdpPacket(ipv4Packet, sourcePort, destinationPort);
+
+          udpPacket.setLength(packetDataBuffer.getUnsignedShort());
+          udpPacket.setChecksum(packetDataBuffer.getUnsignedShort());
+          packetDataBuffer.discardReadBytes();
+          udpPacket.setData(packetDataBuffer);
+        }
+
+        packetInfoList.add(new PacketInfo(globalHeader, packetHeader, packet,
+            ipv4Packet, tcpPacket, udpPacket));
+      } catch (NegativeArraySizeException ignored) {
+        LOG.debug("Ignorable exception while parsing packet.", ignored);
+      } catch (EOFException eof) { // $codepro.audit.disable logExceptions
+        // Ignore exception and break
+        break;
+      }
+    }
+    return packetInfoList;
+  }
+
+  /**
+   * The main method.
+   * 
+   * @param args
+   *          the arguments
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   * @throws InterruptedException
+   *           the interrupted exception
+   */
+  public static void main(String[] args) throws IOException,
+      InterruptedException {
+
+    double totalIterations = 1000000;
+    double parallelism = 64;
+    double targetEvents = 1000000;
+    PcapParser pcapParser = new PcapParser();
+    File fin = new File("/Users/sheetal/Downloads/bad_packets/bad_packet_1405988125427.pcap");
+    File fout = new File(fin.getAbsolutePath() + ".parsed");
+    byte[] pcapBytes = FileUtils.readFileToByteArray(fin);
+    long startTime = System.currentTimeMillis();
+    for (int i = 0; i < totalIterations; i++) {
+      List<PacketInfo> list = pcapParser.getPacketInfo(pcapBytes);
+
+      for (PacketInfo packetInfo : list) {
+        System.out.println(packetInfo.getJsonIndexDoc());
+      }
+    }
+    long endTime = System.currentTimeMillis();
+
+    System.out.println("Time taken to process " + totalIterations + " events :"
+        + (endTime - startTime) + " milliseconds");
+
+    System.out
+        .println("With parallelism of "
+            + parallelism
+            + " estimated time to process "
+            + targetEvents
+            + " events: "
+            + (((((endTime - startTime) / totalIterations) * targetEvents) / parallelism) / 1000)
+            + " seconds");
+    System.out.println("With parallelism of " + parallelism
+        + " estimated # of events per second: "
+        + ((parallelism * 1000 * totalIterations) / (endTime - startTime))
+        + " events");
+    System.out.println("Expected Parallelism to process " + targetEvents
+        + " events in a second: "
+        + (targetEvents / ((1000 * totalIterations) / (endTime - startTime))));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/snort/BasicSnortParser.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/snort/BasicSnortParser.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/snort/BasicSnortParser.java
new file mode 100644
index 0000000..4455e57
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/snort/BasicSnortParser.java
@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.snort;
+
+import org.apache.metron.parsers.BasicParser;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.*;
+
+@SuppressWarnings("serial")
+public class BasicSnortParser extends BasicParser {
+
+	private static final Logger _LOG = LoggerFactory
+					.getLogger(BasicSnortParser.class);
+
+	/**
+	 * The default field names for Snort Alerts.
+	 */
+	private String[] fieldNames = new String[] {
+			"timestamp",
+			"sig_generator",
+			"sig_id",
+			"sig_rev",
+			"msg",
+			"protocol",
+			"ip_src_addr",
+			"ip_src_port",
+			"ip_dst_addr",
+			"ip_dst_port",
+			"ethsrc",
+			"ethdst",
+			"ethlen",
+			"tcpflags",
+			"tcpseq",
+			"tcpack",
+			"tcplen",
+			"tcpwindow",
+			"ttl",
+			"tos",
+			"id",
+			"dgmlen",
+			"iplen",
+			"icmptype",
+			"icmpcode",
+			"icmpid",
+			"icmpseq"
+	};
+
+
+	/**
+	 * Snort alerts are received as CSV records
+	 */
+	private String recordDelimiter = ",";
+
+	@Override
+	public void init() {
+
+	}
+
+	@Override
+	public List<JSONObject> parse(byte[] rawMessage) {
+
+		JSONObject jsonMessage = new JSONObject();
+		List<JSONObject> messages = new ArrayList<>();
+		try {
+			// snort alerts expected as csv records
+			String csvMessage = new String(rawMessage, "UTF-8");
+			String[] records = csvMessage.split(recordDelimiter, -1);
+
+			// validate the number of fields
+			if (records.length != fieldNames.length) {
+				throw new IllegalArgumentException("Unexpected number of fields, expected: " + fieldNames.length + " got: " + records.length);
+			}
+			long timestamp = 0L;
+			// build the json record from each field
+			for (int i=0; i<records.length; i++) {
+			
+				String field = fieldNames[i];
+				String record = records[i];
+				
+				if("timestamp".equals(field)) {
+
+					// convert the timestamp to epoch
+					timestamp = toEpoch(record);
+					jsonMessage.put("timestamp", timestamp);
+					
+				} else {
+					jsonMessage.put(field, record);
+				}
+			}
+
+			// add original msg; required by 'checkForSchemaCorrectness'
+			jsonMessage.put("original_string", csvMessage);
+			jsonMessage.put("is_alert", "true");
+			messages.add(jsonMessage);
+		} catch (Exception e) {
+
+            _LOG.error("unable to parse message: " + rawMessage);
+            e.printStackTrace();
+            return null;
+        }
+
+		return messages;
+	}
+
+	/**
+	 * Parses Snort's default date-time representation and
+	 * converts to epoch.
+	 * @param snortDatetime Snort's default date-time as String '01/27-16:01:04.877970'
+	 * @return epoch time
+	 * @throws java.text.ParseException 
+	 */
+	private long toEpoch(String snortDatetime) throws ParseException {
+		
+		/*
+		 * TODO how does Snort not embed the year in their default timestamp?! need to change this in 
+		 * Snort configuration.  for now, just assume current year.
+		 */
+		int year = Calendar.getInstance().get(Calendar.YEAR);
+		String withYear = Integer.toString(year) + " " + snortDatetime;
+		
+		// convert to epoch time
+		SimpleDateFormat df = new SimpleDateFormat("yyyy MM/dd-HH:mm:ss.S");
+		Date date = df.parse(withYear);
+		return date.getTime();
+	}
+
+	public String getRecordDelimiter() {
+		return this.recordDelimiter;
+	}
+
+	public void setRecordDelimiter(String recordDelimiter) {
+		this.recordDelimiter = recordDelimiter;
+	}
+
+	public String[] getFieldNames() {
+		return this.fieldNames;
+	}
+
+	public void setFieldNames(String[] fieldNames) {
+		this.fieldNames = fieldNames;
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/sourcefire/BasicSourcefireParser.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/sourcefire/BasicSourcefireParser.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/sourcefire/BasicSourcefireParser.java
new file mode 100644
index 0000000..40badcd
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/sourcefire/BasicSourcefireParser.java
@@ -0,0 +1,122 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.parsers.sourcefire;
+
+import org.apache.metron.parsers.BasicParser;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+@SuppressWarnings("serial")
+public class BasicSourcefireParser extends BasicParser {
+
+	private static final Logger _LOG = LoggerFactory
+					.getLogger(BasicSourcefireParser.class);
+
+	public static final String hostkey = "host";
+	String domain_name_regex = "([^\\.]+)\\.([a-z]{2}|[a-z]{3}|([a-z]{2}\\.[a-z]{2}))$";
+	String sidRegex = "(.*)(\\[[0-9]+:[0-9]+:[0-9]\\])(.*)$";
+	//String sidRegex = "(\\[[0-9]+:[0-9]+:[0-9]\\])(.*)$";
+	Pattern sidPattern = Pattern.compile(sidRegex);	
+	Pattern pattern = Pattern.compile(domain_name_regex);
+
+	@Override
+	public void init() {
+
+	}
+
+	@SuppressWarnings({ "unchecked", "unused" })
+	public List<JSONObject> parse(byte[] msg) {
+
+		JSONObject payload = new JSONObject();
+		String toParse = "";
+		List<JSONObject> messages = new ArrayList<>();
+		try {
+
+			toParse = new String(msg, "UTF-8");
+			_LOG.debug("Received message: " + toParse);
+
+			String tmp = toParse.substring(toParse.lastIndexOf("{"));
+			payload.put("key", tmp);
+
+			String protocol = tmp.substring(tmp.indexOf("{") + 1,
+					tmp.indexOf("}")).toLowerCase();
+			String source = tmp.substring(tmp.indexOf("}") + 1,
+					tmp.indexOf("->")).trim();
+			String dest = tmp.substring(tmp.indexOf("->") + 2, tmp.length())
+					.trim();
+
+			payload.put("protocol", protocol);
+
+			String source_ip = "";
+			String dest_ip = "";
+
+			if (source.contains(":")) {
+				String parts[] = source.split(":");
+				payload.put("ip_src_addr", parts[0]);
+				payload.put("ip_src_port", parts[1]);
+				source_ip = parts[0];
+			} else {
+				payload.put("ip_src_addr", source);
+				source_ip = source;
+
+			}
+
+			if (dest.contains(":")) {
+				String parts[] = dest.split(":");
+				payload.put("ip_dst_addr", parts[0]);
+				payload.put("ip_dst_port", parts[1]);
+				dest_ip = parts[0];
+			} else {
+				payload.put("ip_dst_addr", dest);
+				dest_ip = dest;
+			}
+			long timestamp = System.currentTimeMillis();
+			payload.put("timestamp", timestamp);
+			
+			Matcher sidMatcher = sidPattern.matcher(toParse);
+			String originalString = null;
+			String signatureId = "";
+			if (sidMatcher.find()) {
+				signatureId = sidMatcher.group(2);
+				originalString = sidMatcher.group(1) +" "+ sidMatcher.group(2) + " " + sidMatcher.group(3);
+			} else {
+				_LOG.warn("Unable to find SID in message: " + toParse);
+				originalString = toParse;
+			}
+			payload.put("original_string", originalString);
+			payload.put("signature_id", signatureId);
+			messages.add(payload);
+			return messages;
+		} catch (Exception e) {
+			e.printStackTrace();
+			_LOG.error("Failed to parse: " + toParse);
+			return null;
+		}
+	}
+
+	
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/utils/GrokUtils.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/utils/GrokUtils.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/utils/GrokUtils.java
new file mode 100644
index 0000000..e60e5f8
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/utils/GrokUtils.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.utils;
+import java.io.Serializable;
+
+import com.google.code.regexp.Pattern;
+
+public class GrokUtils implements Serializable {
+
+	private static final long serialVersionUID = 7465176887422419286L;
+	/**
+	   * Extract Grok patter like %{FOO} to FOO, Also Grok pattern with semantic.
+	   */
+	  public static final Pattern GROK_PATTERN = Pattern.compile(
+	      "%\\{" +
+	      "(?<name>" +
+	        "(?<pattern>[A-z0-9]+)" +
+	          "(?::(?<subname>[A-z0-9_:;\\/\\s\\.]+))?" +
+	          ")" +
+	          "(?:=(?<definition>" +
+	            "(?:" +
+	            "(?:[^{}]+|\\.+)+" +
+	            ")+" +
+	            ")" +
+	      ")?" +
+	      "\\}");
+
+	}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/utils/ParserUtils.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/utils/ParserUtils.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/utils/ParserUtils.java
new file mode 100644
index 0000000..f98f996
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/utils/ParserUtils.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.utils;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.commons.io.IOUtils;
+import org.json.simple.JSONObject;
+
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.Locale;
+import java.util.TimeZone;
+
+public class ParserUtils {
+
+  public static final String PREFIX = "stream2file";
+  public static final String SUFFIX = ".tmp";
+
+  public static File stream2file(InputStream in) throws IOException {
+    final File tempFile = File.createTempFile(PREFIX, SUFFIX);
+    tempFile.deleteOnExit();
+    try (FileOutputStream out = new FileOutputStream(tempFile)) {
+      IOUtils.copy(in, out);
+    }
+    return tempFile;
+  }
+
+  public static Long convertToEpoch(String m, String d, String ts,
+                                    boolean adjust_timezone) throws ParseException {
+    d = d.trim();
+    if (d.length() <= 2) {
+      d = "0" + d;
+    }
+    Date date = new SimpleDateFormat("MMM", Locale.ENGLISH).parse(m);
+    Calendar cal = Calendar.getInstance();
+    cal.setTime(date);
+    String month = String.valueOf(cal.get(Calendar.MONTH));
+    int year = Calendar.getInstance().get(Calendar.YEAR);
+    if (month.length() <= 2) {
+      month = "0" + month;
+    }
+    String coglomerated_ts = year + "-" + month + "-" + d + " " + ts;
+    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+    if (adjust_timezone) {
+      sdf.setTimeZone(TimeZone.getTimeZone("GMT"));
+    }
+    date = sdf.parse(coglomerated_ts);
+    long timeInMillisSinceEpoch = date.getTime();
+    return timeInMillisSinceEpoch;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/writer/KafkaWriter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/writer/KafkaWriter.java b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/writer/KafkaWriter.java
new file mode 100644
index 0000000..d168e29
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/java/org/apache/metron/parsers/writer/KafkaWriter.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.writer;
+
+import backtype.storm.tuple.Tuple;
+import org.apache.kafka.clients.producer.KafkaProducer;
+import org.apache.kafka.clients.producer.ProducerRecord;
+import org.apache.metron.common.Constants;
+import org.apache.metron.common.configuration.Configurations;
+import org.apache.metron.common.interfaces.MessageWriter;
+import org.json.simple.JSONObject;
+
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Map;
+
+public class KafkaWriter implements MessageWriter<JSONObject>, Serializable {
+
+  private String brokerUrl;
+  private String keySerializer = "org.apache.kafka.common.serialization.StringSerializer";
+  private String valueSerializer = "org.apache.kafka.common.serialization.StringSerializer";
+  private int requiredAcks = 1;
+  private KafkaProducer kafkaProducer;
+
+  public KafkaWriter(String brokerUrl) {
+    this.brokerUrl = brokerUrl;
+  }
+
+  public KafkaWriter withKeySerializer(String keySerializer) {
+    this.keySerializer = keySerializer;
+    return this;
+  }
+
+  public KafkaWriter withValueSerializer(String valueSerializer) {
+    this.valueSerializer = valueSerializer;
+    return this;
+  }
+
+  public KafkaWriter withRequiredAcks(int requiredAcks) {
+    this.requiredAcks = requiredAcks;
+    return this;
+  }
+
+  @Override
+  public void init() {
+    Map<String, Object> producerConfig = new HashMap<>();
+    producerConfig.put("bootstrap.servers", brokerUrl);
+    producerConfig.put("key.serializer", keySerializer);
+    producerConfig.put("value.serializer", valueSerializer);
+    producerConfig.put("request.required.acks", requiredAcks);
+    this.kafkaProducer = new KafkaProducer<>(producerConfig);
+  }
+
+  @SuppressWarnings("unchecked")
+  @Override
+  public void write(String sourceType, Configurations configurations, Tuple tuple, JSONObject message) throws Exception {
+    kafkaProducer.send(new ProducerRecord<String, String>(Constants.ENRICHMENT_TOPIC, message.toJSONString()));
+  }
+
+  @Override
+  public void close() throws Exception {
+    kafkaProducer.close();
+  }
+}


[11/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/pom.xml b/metron-platform/metron-pcap/pom.xml
new file mode 100644
index 0000000..7125382
--- /dev/null
+++ b/metron-platform/metron-pcap/pom.xml
@@ -0,0 +1,103 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software
+  Foundation (ASF) under one or more contributor license agreements. See the
+  NOTICE file distributed with this work for additional information regarding
+  copyright ownership. The ASF licenses this file to You under the Apache License,
+  Version 2.0 (the "License"); you may not use this file except in compliance
+  with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+  Unless required by applicable law or agreed to in writing, software distributed
+  under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
+  OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+  the specific language governing permissions and limitations under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.metron</groupId>
+        <artifactId>metron-platform</artifactId>
+        <version>0.1BETA</version>
+    </parent>
+    <artifactId>metron-pcap</artifactId>
+    <description>Metron Pcap</description>
+    <properties>
+    </properties>
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-hbase</artifactId>
+            <version>${project.parent.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.google.guava</groupId>
+            <artifactId>guava</artifactId>
+            <version>${global_guava_version}</version>
+            <scope>compile</scope>
+        </dependency>
+        <dependency>
+            <groupId>com.googlecode.json-simple</groupId>
+            <artifactId>json-simple</artifactId>
+            <version>${global_json_simple_version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-common</artifactId>
+            <version>${global_hadoop_version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-core</artifactId>
+            <version>${global_storm_version}</version>
+            <scope>provided</scope>
+            <exclusions>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>log4j-over-slf4j</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka_2.9.2</artifactId>
+            <version>${global_kafka_version}</version>
+            <scope>provided</scope>
+            <exclusions>
+                <exclusion>
+                    <groupId>com.sun.jmx</groupId>
+                    <artifactId>jmxri</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>com.sun.jdmk</groupId>
+                    <artifactId>jmxtools</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>javax.jms</groupId>
+                    <artifactId>jms</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-kafka</artifactId>
+            <version>${global_storm_version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>org.apache.curator</artifactId>
+                    <groupId>curator-client</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+    </dependencies>
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/Constants.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/Constants.java b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/Constants.java
new file mode 100644
index 0000000..99945cb
--- /dev/null
+++ b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/Constants.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcap;
+
+
+/**
+* The Interface Constants.
+* 
+* @author sheetal
+* @version $Revision: 1.0 $
+*/
+public interface Constants {
+
+/** The protocol tcp. */
+public static final int PROTOCOL_TCP = 6;
+
+/** The protocol udp. */
+public static final int PROTOCOL_UDP = 17;
+
+/** The document key separator. */
+public static final char DOCUMENT_KEY_SEPARATOR = '-';
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/IEEE_802_1Q.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/IEEE_802_1Q.java b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/IEEE_802_1Q.java
new file mode 100644
index 0000000..27fae51
--- /dev/null
+++ b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/IEEE_802_1Q.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcap;
+
+public class IEEE_802_1Q {
+
+	  int priorityCodePoint = 0;
+	  int dropEligibleIndicator = 0;
+	  int vLANIdentifier = 0;
+
+	  public IEEE_802_1Q(int priorityCodePoint, int dropEligibleIndicator,
+	      int vLANIdentifier) {
+	    this.priorityCodePoint = priorityCodePoint;
+	    this.dropEligibleIndicator = dropEligibleIndicator;
+	    this.vLANIdentifier = vLANIdentifier;
+	  }
+
+	  public int getPriorityCodePoint() {
+	    return priorityCodePoint;
+	  }
+
+	  public int getDropEligibleIndicator() {
+	    return dropEligibleIndicator;
+	  }
+
+	  public int getvLANIdentifier() {
+	    return vLANIdentifier;
+	  }
+	}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/MetronEthernetDecoder.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/MetronEthernetDecoder.java b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/MetronEthernetDecoder.java
new file mode 100644
index 0000000..1609887
--- /dev/null
+++ b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/MetronEthernetDecoder.java
@@ -0,0 +1,134 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcap;
+
+import java.util.BitSet;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CopyOnWriteArraySet;
+
+import org.krakenapps.pcap.decoder.ethernet.EthernetDecoder;
+import org.krakenapps.pcap.decoder.ethernet.EthernetFrame;
+import org.krakenapps.pcap.decoder.ethernet.EthernetProcessor;
+import org.krakenapps.pcap.decoder.ethernet.MacAddress;
+import org.krakenapps.pcap.packet.PcapPacket;
+import org.krakenapps.pcap.util.Buffer;
+
+public class MetronEthernetDecoder extends EthernetDecoder {
+
+  private Set<EthernetProcessor> callbacks;
+  private Map<Integer, Set<EthernetProcessor>> typeCallbacks;
+
+  public MetronEthernetDecoder() {
+    callbacks = new CopyOnWriteArraySet<EthernetProcessor>();
+    typeCallbacks = new ConcurrentHashMap<Integer, Set<EthernetProcessor>>();
+  }
+
+  public void register(EthernetProcessor processor) {
+    this.callbacks.add(processor);
+  }
+
+  public void register(int type, EthernetProcessor processor) {
+    Set<EthernetProcessor> processors = typeCallbacks.get(type);
+    if (processors == null) {
+      processors = new HashSet<EthernetProcessor>();
+      typeCallbacks.put(type, processors);
+    }
+
+    processors.add(processor);
+  }
+
+  public void unregister(EthernetProcessor processor) {
+    this.callbacks.remove(processor);
+  }
+
+  public void unregister(int type, EthernetProcessor processor) {
+    Set<EthernetProcessor> processors = typeCallbacks.get(type);
+    if (processors == null)
+      return;
+
+    processors.remove(processor);
+  }
+
+  public void decode(PcapPacket packet) {
+    // do not reorder following codes (parse sequence)
+    MacAddress destination = getMacAddress(packet.getPacketData());
+    MacAddress source = getMacAddress(packet.getPacketData());
+    int type = getEtherType(packet.getPacketData());
+
+    if (type == 0x8100) {
+      // It is 802.1Q VLAN tag
+      IEEE_802_1Q iee802_1qTag = get802_1qTag(packet.getPacketData());
+      // Now get the type
+      type = getEtherType(packet.getPacketData());
+    }
+
+    Buffer buffer = packet.getPacketData();
+    buffer.discardReadBytes();
+
+    EthernetFrame frame = new EthernetFrame(source, destination, type, buffer);
+    frame.setPcapPacket(packet);
+    dispatch(frame);
+  }
+
+  private MacAddress getMacAddress(Buffer data) {
+    byte[] mac = new byte[6];
+    data.gets(mac, 0, 6);
+    return new MacAddress(mac);
+  }
+
+  private int getEtherType(Buffer data) {
+    return ((int) data.getShort()) & 0x0000FFFF;
+  }
+
+  private IEEE_802_1Q get802_1qTag(Buffer data) {
+
+    // reference http://en.wikipedia.org/wiki/EtherType &
+    // http://en.wikipedia.org/wiki/IEEE_802.1Q
+    byte[] b802_1qTag = new byte[2];
+    data.gets(b802_1qTag, 0, 2);
+    BitSet bits = BitSet.valueOf(b802_1qTag);
+    int pcp = convertBitToInt(bits.get(0, 3));
+    int dei = convertBitToInt(bits.get(3, 4));
+    int vid = convertBitToInt(bits.get(4, 16));
+
+    return new IEEE_802_1Q(pcp, dei, vid);
+  }
+
+  public static int convertBitToInt(BitSet bits) {
+    int value = 0;
+    for (int i = 0; i < bits.length(); ++i) {
+      value += bits.get(i) ? (1 << i) : 0;
+    }
+    return value;
+  }
+
+  private void dispatch(EthernetFrame frame) {
+    for (EthernetProcessor processor : callbacks)
+      processor.process(frame);
+
+    Set<EthernetProcessor> processors = typeCallbacks.get(frame.getType());
+    if (processors == null)
+      return;
+
+    for (EthernetProcessor processor : processors)
+      processor.process(frame.dup());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PacketInfo.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PacketInfo.java b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PacketInfo.java
new file mode 100644
index 0000000..fcaf1b0
--- /dev/null
+++ b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PacketInfo.java
@@ -0,0 +1,470 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcap;
+
+import java.text.MessageFormat;
+import org.apache.log4j.Logger;
+
+import org.krakenapps.pcap.decoder.ip.Ipv4Packet;
+import org.krakenapps.pcap.decoder.tcp.TcpPacket;
+import org.krakenapps.pcap.decoder.udp.UdpPacket;
+import org.krakenapps.pcap.file.GlobalHeader;
+import org.krakenapps.pcap.packet.PacketHeader;
+import org.krakenapps.pcap.packet.PcapPacket;
+
+import org.apache.metron.pcap.utils.PcapUtils;
+
+/**
+ * The Class PacketInfo.
+ * 
+ * @author sheetal
+ * @version $Revision: 1.0 $
+ */
+public class PacketInfo {
+
+  /** The packetHeader. */
+  private PacketHeader packetHeader = null;
+
+  /** The packet. */
+  private PcapPacket packet = null;
+
+  /** The ipv4 packet. */
+  private Ipv4Packet ipv4Packet = null;
+
+  /** The tcp packet. */
+  private TcpPacket tcpPacket = null;
+
+  /** The udp packet. */
+  private UdpPacket udpPacket = null;
+
+  /** The global header. */
+  private GlobalHeader globalHeader = null;
+
+  /** The Constant globalHeaderJsonTemplateSB. */
+  private static final StringBuffer globalHeaderJsonTemplateSB = new StringBuffer();
+
+  /** The Constant ipv4HeaderJsonTemplateSB. */
+  private static final StringBuffer ipv4HeaderJsonTemplateSB = new StringBuffer();
+
+  /** The Constant tcpHeaderJsonTemplateSB. */
+  private static final StringBuffer tcpHeaderJsonTemplateSB = new StringBuffer();
+
+  /** The Constant udpHeaderJsonTemplateSB. */
+  private static final StringBuffer udpHeaderJsonTemplateSB = new StringBuffer();
+
+  /** The Constant LOG. */
+  private static final Logger LOG = Logger.getLogger(PacketInfo.class);
+  
+  static {
+    globalHeaderJsonTemplateSB.append("<\"global_header\":<\"pcap_id\":\"").append("{0}").append('"');
+    globalHeaderJsonTemplateSB.append(",\"inc_len\":").append("{1}");
+    globalHeaderJsonTemplateSB.append(",\"orig_len\":").append("{2}");
+    globalHeaderJsonTemplateSB.append(",\"ts_sec\":").append("{3}");
+    globalHeaderJsonTemplateSB.append(",\"ts_usec\":").append("{4}");
+    globalHeaderJsonTemplateSB.append(">,"); // NOPMD by sheetal on 1/29/14 2:37
+    // PM
+
+    // ipv4 header
+
+    ipv4HeaderJsonTemplateSB.append("\"ipv4_header\":");
+
+    ipv4HeaderJsonTemplateSB.append("\"ip_dst\":").append("{0}");
+    ipv4HeaderJsonTemplateSB.append(",\"ip_dst_addr\":\"").append("{1}");
+    ipv4HeaderJsonTemplateSB.append("\",\"ip_flags\":").append("{2}");
+    ipv4HeaderJsonTemplateSB.append(",\"ip_fragment_offset\":").append("{3}");
+    ipv4HeaderJsonTemplateSB.append(",\"ip_header_checksum\":").append("{4}");
+    ipv4HeaderJsonTemplateSB.append(",\"ip_id\":").append("{5}");
+    ipv4HeaderJsonTemplateSB.append(",\"ip_header_length\":").append("{6}");
+    ipv4HeaderJsonTemplateSB.append(",\"ip_protocol\":").append("{7}");
+    ipv4HeaderJsonTemplateSB.append(",\"ip_src\":").append("{8}");
+    ipv4HeaderJsonTemplateSB.append(",\"ip_src_addr\":\"").append("{9}");
+    ipv4HeaderJsonTemplateSB.append("\",\"ip_tos\":").append("{10}");
+    ipv4HeaderJsonTemplateSB.append(",\"ip_total_length\":").append("{11}");
+    ipv4HeaderJsonTemplateSB.append(",\"ip_ttl\":").append("{12}");
+    ipv4HeaderJsonTemplateSB.append(",\"ip_version\":").append("{13}");
+    ipv4HeaderJsonTemplateSB.append('>');
+
+    // tcp header
+    tcpHeaderJsonTemplateSB.append(",\"tcp_header\":<\"ack\":").append("{0}");
+    tcpHeaderJsonTemplateSB.append(",\"checksum\":").append("{1}");
+    tcpHeaderJsonTemplateSB.append(",\"data_length\":").append("{2}");
+    tcpHeaderJsonTemplateSB.append(",\"data_offset\":").append("{3}");
+    tcpHeaderJsonTemplateSB.append(",\"dst_addr\":\"").append("{4}");
+    tcpHeaderJsonTemplateSB.append("\",\"dst_port\":").append("{5}");
+    tcpHeaderJsonTemplateSB.append(",\"direction\":").append("{6}");
+    tcpHeaderJsonTemplateSB.append(",\"flags\":").append("{7}");
+    tcpHeaderJsonTemplateSB.append(",\"reassembled_length \":").append("{8}");
+    tcpHeaderJsonTemplateSB.append(",\"relative_ack\":").append("{9}");
+    tcpHeaderJsonTemplateSB.append(",\"relative_seq\":").append("{10}");
+    tcpHeaderJsonTemplateSB.append(",\"seq\":").append("{11}");
+    tcpHeaderJsonTemplateSB.append(",\"session_key\":\"").append("{12}");
+    tcpHeaderJsonTemplateSB.append("\",\"src_addr\":\"").append("{13}");
+    tcpHeaderJsonTemplateSB.append("\",\"src_port\":").append("{14}");
+    tcpHeaderJsonTemplateSB.append(",\"total_length\":").append("{15}");
+    tcpHeaderJsonTemplateSB.append(",\"urgent_pointer\":").append("{16}");
+    tcpHeaderJsonTemplateSB.append(",\"window\":").append("{17}");
+    tcpHeaderJsonTemplateSB.append(">>");
+
+    // udp headers
+    udpHeaderJsonTemplateSB.append(",\"udp_header\":<\"checksum\":").append("{0}");
+    udpHeaderJsonTemplateSB.append(",\"dst_port\":").append("{1}");
+    udpHeaderJsonTemplateSB.append(",\"length\":").append("{2}");
+    udpHeaderJsonTemplateSB.append(",\"src_port\":").append("{3}");
+    udpHeaderJsonTemplateSB.append(",\"dst_addr\":\"").append("{4}");
+    udpHeaderJsonTemplateSB.append("\",\"src_addr\":\"").append("{5}").append('"');
+    tcpHeaderJsonTemplateSB.append(">>");
+
+  }
+
+  /** The Constant globalHeaderJsonTemplateString. */
+  private static final String globalHeaderJsonTemplateString = globalHeaderJsonTemplateSB.toString();
+
+  /** The Constant ipv4HeaderJsonTemplateString. */
+  private static final String ipv4HeaderJsonTemplateString = ipv4HeaderJsonTemplateSB.toString();
+
+  /** The Constant tcpHeaderJsonTemplateString. */
+  private static final String tcpHeaderJsonTemplateString = tcpHeaderJsonTemplateSB.toString();
+
+  /** The Constant udpHeaderJsonTemplateString. */
+  private static final String udpHeaderJsonTemplateString = udpHeaderJsonTemplateSB.toString();
+
+  /**
+   * Instantiates a new packet info.
+   * 
+   * @param globalHeader
+   *          the global header
+   * @param packetHeader
+   *          the packet header
+   * @param packet
+   *          the packet
+   * @param ipv4Packet
+   *          the ipv4 packet
+   * @param tcpPacket
+   *          the tcp packet
+   * @param udpPacket
+   *          the udp packet
+   */
+  public PacketInfo(GlobalHeader globalHeader, PacketHeader packetHeader, PcapPacket packet, Ipv4Packet ipv4Packet, TcpPacket tcpPacket,
+      UdpPacket udpPacket) {
+    this.packetHeader = packetHeader;
+    this.packet = packet;
+    this.ipv4Packet = ipv4Packet;
+    this.tcpPacket = tcpPacket;
+    this.udpPacket = udpPacket;
+    this.globalHeader = globalHeader;
+  }
+
+  /**
+   * Gets the global header.
+   * 
+   * @return the global header
+   */
+  public GlobalHeader getGlobalHeader() {
+    return globalHeader;
+  }
+
+  /**
+   * Gets the packet header.
+   * 
+   * 
+   * @return the packet header
+   */
+  public PacketHeader getPacketHeader() {
+    return packetHeader;
+  }
+
+  /**
+   * Gets the packet.
+   * 
+   * 
+   * @return the packet
+   */
+  public PcapPacket getPacket() {
+    return packet;
+  }
+
+  /**
+   * Gets the ipv4 packet.
+   * 
+   * 
+   * @return the ipv4 packet
+   */
+  public Ipv4Packet getIpv4Packet() {
+    return ipv4Packet;
+  }
+
+  /**
+   * Gets the tcp packet.
+   * 
+   * 
+   * @return the tcp packet
+   */
+  public TcpPacket getTcpPacket() {
+    return tcpPacket;
+  }
+
+  /**
+   * Gets the udp packet.
+   * 
+   * 
+   * @return the udp packet
+   */
+  public UdpPacket getUdpPacket() {
+    return udpPacket;
+  }
+
+  /**
+   * Gets the key.
+   * 
+   * 
+   * @return the key
+   */
+  public String getKey() {
+    int sourcePort = 0;
+    int destinationPort = 0;
+    if (Constants.PROTOCOL_UDP == ipv4Packet.getProtocol()) {
+      sourcePort = udpPacket.getSourcePort();
+
+      destinationPort = udpPacket.getDestinationPort();
+
+    } else if (Constants.PROTOCOL_TCP == ipv4Packet.getProtocol()) {
+      sourcePort = tcpPacket.getSourcePort();
+
+      destinationPort = tcpPacket.getDestinationPort();
+
+    }
+
+    return PcapUtils.getSessionKey(ipv4Packet.getSourceAddress().getHostAddress(), ipv4Packet.getDestinationAddress().getHostAddress(),
+        ipv4Packet.getProtocol(), sourcePort, destinationPort, ipv4Packet.getId(), ipv4Packet.getFragmentOffset());
+
+  }
+
+  /**
+   * Gets the short key
+   * 
+   * 
+   * @return the short key
+   */
+  public String getShortKey() {
+	int sourcePort = 0;
+	int destinationPort = 0;
+	if(Constants.PROTOCOL_UDP == ipv4Packet.getProtocol()) {
+		sourcePort = udpPacket.getSourcePort();
+		destinationPort = udpPacket.getDestinationPort();
+	} else if (Constants.PROTOCOL_TCP == ipv4Packet.getProtocol()) {
+		sourcePort = tcpPacket.getSourcePort();
+		destinationPort = tcpPacket.getDestinationPort();
+	}
+	  
+	return PcapUtils.getShortSessionKey(ipv4Packet.getSourceAddress().getHostAddress(), ipv4Packet.getDestinationAddress().getHostAddress(),
+	    ipv4Packet.getProtocol(), sourcePort, destinationPort);
+			 
+  }
+  
+  /**
+   * Gets the json doc.
+   * 
+   * 
+   * @return the json doc
+   */
+  public String getJsonDoc() {
+
+    return getJsonDocUsingSBAppend();
+  }
+
+  /**
+   * Gets the json doc.
+   * 
+   * 
+   * @return the json doc
+   */
+  public String getJsonIndexDoc() {
+
+    return getJsonIndexDocUsingSBAppend();
+  }
+
+  /**
+   * Gets the json doc using sb append.
+   * 
+   * @return the json doc using sb append
+   */
+  private String getJsonDocUsingSBAppend() {
+
+	
+    StringBuffer jsonSb = new StringBuffer(1024);
+
+    // global header
+    jsonSb.append("{\"global_header\":{\"pcap_id\":\"").append(getKey());
+    jsonSb.append("\",\"inc_len\":").append(packetHeader.getInclLen());
+    jsonSb.append(",\"orig_len\":").append(packetHeader.getOrigLen());
+    jsonSb.append(",\"ts_sec\":").append(packetHeader.getTsSec());
+    jsonSb.append(",\"ts_usec\":").append(packetHeader.getTsUsec());
+    jsonSb.append("},"); // NOPMD by sheetal on 1/29/14 2:37 PM
+
+    // ipv4 header
+
+    jsonSb.append("\"ipv4_header\":{");
+
+    jsonSb.append("\"ip_dst\":").append(ipv4Packet.getDestination());
+    jsonSb.append(",\"ip_dst_addr\":\"").append(ipv4Packet.getDestinationAddress().getHostAddress());
+    jsonSb.append("\",\"ip_flags\":").append(ipv4Packet.getFlags());
+    jsonSb.append(",\"ip_fragment_offset\":").append(ipv4Packet.getFragmentOffset());
+    jsonSb.append(",\"ip_header_checksum\":").append(ipv4Packet.getHeaderChecksum());
+    jsonSb.append(",\"ip_id\":").append(ipv4Packet.getId());
+    jsonSb.append(",\"ip_header_length\":").append(ipv4Packet.getIhl());
+    jsonSb.append(",\"ip_protocol\":").append(ipv4Packet.getProtocol());
+    jsonSb.append(",\"ip_src\":").append(ipv4Packet.getSource());
+    jsonSb.append(",\"ip_src_addr\":\"").append(ipv4Packet.getSourceAddress().getHostAddress());
+    jsonSb.append("\",\"ip_tos\":").append(ipv4Packet.getTos());
+    jsonSb.append(",\"ip_total_length\":").append(ipv4Packet.getTotalLength());
+    jsonSb.append(",\"ip_ttl\":").append(ipv4Packet.getTtl());
+    jsonSb.append(",\"ip_version\":").append(ipv4Packet.getVersion());
+    jsonSb.append('}');
+
+    // tcp header
+    if (tcpPacket != null) {
+      jsonSb.append(",\"tcp_header\":{\"ack\":").append(tcpPacket.getAck());
+      jsonSb.append(",\"checksum\":").append(tcpPacket.getChecksum());
+      jsonSb.append(",\"data_length\":").append(tcpPacket.getDataLength());
+      jsonSb.append(",\"data_offset\":").append(tcpPacket.getDataOffset());
+      jsonSb.append(",\"dst_addr\":\"").append(tcpPacket.getDestinationAddress().getHostAddress());
+      jsonSb.append("\",\"dst_port\":").append(tcpPacket.getDestinationPort());
+      jsonSb.append(",\"direction\":").append(tcpPacket.getDirection());
+      jsonSb.append(",\"flags\":").append(tcpPacket.getFlags());
+      jsonSb.append(",\"reassembled_length \":").append(tcpPacket.getReassembledLength());
+      jsonSb.append(",\"relative_ack\":").append(tcpPacket.getRelativeAck());
+      jsonSb.append(",\"relative_seq\":").append(tcpPacket.getRelativeSeq());
+      jsonSb.append(",\"seq\":").append(tcpPacket.getSeq());
+      jsonSb.append(",\"session_key\":\"").append(tcpPacket.getSessionKey());
+      jsonSb.append("\",\"src_addr\":\"").append(tcpPacket.getSourceAddress().getHostAddress());
+      jsonSb.append("\",\"src_port\":").append(tcpPacket.getSourcePort());
+      jsonSb.append(",\"total_length\":").append(tcpPacket.getTotalLength());
+      jsonSb.append(",\"urgent_pointer\":").append(tcpPacket.getUrgentPointer());
+      jsonSb.append(",\"window\":").append(tcpPacket.getWindow());
+      jsonSb.append('}');
+    }
+
+    // udp headers
+    if (udpPacket != null) {
+      jsonSb.append(",\"udp_header\":{\"checksum\":").append(udpPacket.getChecksum());
+      jsonSb.append(",\"dst_port\":").append(udpPacket.getDestinationPort());
+      jsonSb.append(",\"length\":").append(udpPacket.getLength());
+      jsonSb.append(",\"src_port\":").append(udpPacket.getSourcePort());
+      jsonSb.append(",\"dst_addr\":\"").append(udpPacket.getDestination().getAddress().getHostAddress());
+      jsonSb.append("\",\"src_addr\":\"").append(udpPacket.getSource().getAddress().getHostAddress());
+      jsonSb.append("\"}");
+    }
+
+    jsonSb.append('}');
+
+    return jsonSb.toString();
+  }
+
+  /**
+   * Gets the json doc using message format.
+   * 
+   * @return the json doc using message format
+   */
+  private String getJsonDocUsingMessageFormat() {
+
+    StringBuffer jsonSb = new StringBuffer(600);
+
+    jsonSb.append(MessageFormat.format(globalHeaderJsonTemplateString, getKey(), packetHeader.getInclLen(), packetHeader.getOrigLen(),
+        packetHeader.getTsSec(), packetHeader.getTsUsec()));
+
+    jsonSb.append(MessageFormat.format(ipv4HeaderJsonTemplateString, ipv4Packet.getDestination(), ipv4Packet.getDestinationAddress()
+        .getHostAddress(), ipv4Packet.getFlags(), ipv4Packet.getFragmentOffset(), ipv4Packet.getHeaderChecksum(), ipv4Packet.getId(),
+        ipv4Packet.getIhl(), ipv4Packet.getProtocol(), ipv4Packet.getSource(), ipv4Packet.getSourceAddress().getHostAddress(), ipv4Packet
+            .getTos(), ipv4Packet.getTotalLength(), ipv4Packet.getTtl(), ipv4Packet.getVersion()));
+
+    // tcp header
+    if (tcpPacket != null) {
+      jsonSb.append(MessageFormat.format(tcpHeaderJsonTemplateString, tcpPacket.getAck(), tcpPacket.getChecksum(), tcpPacket
+          .getDataLength(), tcpPacket.getDataOffset(), tcpPacket.getDestinationAddress().getHostAddress(), tcpPacket.getDestinationPort(),
+          tcpPacket.getDirection(), tcpPacket.getFlags(), tcpPacket.getReassembledLength(), tcpPacket.getRelativeAck(), tcpPacket
+              .getRelativeSeq(), tcpPacket.getSeq(), tcpPacket.getSessionKey(), tcpPacket.getSourceAddress().getHostAddress(), tcpPacket
+              .getSourcePort(), tcpPacket.getTotalLength(), tcpPacket.getUrgentPointer(), tcpPacket.getWindow()));
+    } else
+    // udp headers
+    if (udpPacket != null) {
+      jsonSb.append(MessageFormat.format(udpHeaderJsonTemplateString, udpPacket.getChecksum(), udpPacket.getDestinationPort(),
+          udpPacket.getLength(), udpPacket.getSourcePort(), udpPacket.getDestination().getAddress().getHostAddress(), udpPacket.getSource()
+              .getAddress().getHostAddress()));
+
+    } else {
+      jsonSb.append('}');
+    }
+    return jsonSb.toString().replace('<', '{').replace('>', '}');
+  }
+
+  /**
+   * Gets the json index doc using sb append.
+   * 
+   * @return the json index doc using sb append
+   */
+  private String getJsonIndexDocUsingSBAppend() {
+
+	Long ts_micro = getPacketTimeInNanos() / 1000L;
+	StringBuffer jsonSb = new StringBuffer(175);
+
+	jsonSb.append("{\"pcap_id\":\"").append(getShortKey());
+    jsonSb.append("\",\"ip_protocol\":").append(ipv4Packet.getProtocol());
+    jsonSb.append(",\"ip_id\":").append(ipv4Packet.getId());
+    jsonSb.append(",\"frag_offset\":").append(ipv4Packet.getFragmentOffset());
+    jsonSb.append(",\"ts_micro\":").append(ts_micro);
+
+
+    // tcp header
+    if (tcpPacket != null) {
+      jsonSb.append(",\"ip_src_addr\":\"").append(tcpPacket.getSourceAddress().getHostAddress());
+      jsonSb.append("\",\"ip_src_port\":").append(tcpPacket.getSourcePort());
+      jsonSb.append(",\"ip_dst_addr\":\"").append(tcpPacket.getDestinationAddress().getHostAddress());
+      jsonSb.append("\",\"ip_dst_port\":").append(tcpPacket.getDestinationPort());
+    }
+
+    // udp headers
+    if (udpPacket != null) {
+      jsonSb.append(",\"ip_src_addr\":\"").append(udpPacket.getSource().getAddress().getHostAddress());
+      jsonSb.append("\",\"ip_src_port\":").append(udpPacket.getSourcePort());
+      jsonSb.append(",\"ip_dst_addr\":\"").append(udpPacket.getDestination().getAddress().getHostAddress());
+      jsonSb.append("\",\"ip_dst_port\":").append(udpPacket.getDestinationPort());
+    }
+
+    jsonSb.append('}');
+
+    return jsonSb.toString();
+  }
+  
+  public long getPacketTimeInNanos()
+  {
+	  if ( getGlobalHeader().getMagicNumber() == 0xa1b2c3d4 || getGlobalHeader().getMagicNumber() == 0xd4c3b2a1 )
+	  {
+		  //Time is in micro assemble as nano
+		  LOG.info("Times are in micro according to the magic number");
+		  return getPacketHeader().getTsSec() * 1000000000L + getPacketHeader().getTsUsec() * 1000L ; 
+	  }
+	  else if ( getGlobalHeader().getMagicNumber() == 0xa1b23c4d || getGlobalHeader().getMagicNumber() == 0x4d3cb2a1 ) {
+		//Time is in nano assemble as nano
+		  LOG.info("Times are in nano according to the magic number");
+		  return getPacketHeader().getTsSec() * 1000000000L + getPacketHeader().getTsUsec() ; 
+	  }
+	  //Default assume time is in micro assemble as nano
+	  LOG.warn("Unknown magic number. Defaulting to micro");
+	  return getPacketHeader().getTsSec() * 1000000000L + getPacketHeader().getTsUsec() * 1000L ;  
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PcapByteInputStream.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PcapByteInputStream.java b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PcapByteInputStream.java
new file mode 100644
index 0000000..e2d56c8
--- /dev/null
+++ b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PcapByteInputStream.java
@@ -0,0 +1,185 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcap;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.io.IOException;
+
+import org.krakenapps.pcap.PcapInputStream;
+import org.krakenapps.pcap.file.GlobalHeader;
+import org.krakenapps.pcap.packet.PacketHeader;
+import org.krakenapps.pcap.packet.PcapPacket;
+import org.krakenapps.pcap.util.Buffer;
+import org.krakenapps.pcap.util.ByteOrderConverter;
+import org.krakenapps.pcap.util.ChainBuffer;
+
+/**
+ * The Class PcapByteInputStream.
+ * 
+ * @author sheetal
+ * @version $Revision: 1.0 $
+ */
+public class PcapByteInputStream implements PcapInputStream {
+
+  /** The is. */
+  private DataInputStream is;
+
+  /** The global header. */
+  private GlobalHeader globalHeader;
+
+  /**
+   * Opens pcap file input stream.
+   * 
+   * @param pcap
+   *          the byte array to be read
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public PcapByteInputStream(byte[] pcap) throws IOException {
+    is = new DataInputStream(new ByteArrayInputStream(pcap)); // $codepro.audit.disable
+                                                              // closeWhereCreated
+    readGlobalHeader();
+  }
+
+  /**
+   * Reads a packet from pcap byte array.
+   * 
+   * @return the packet throws IOException the stream has been closed and the
+   *         contained input stream does not support reading after close, or
+   *         another I/O error occurs. * @throws IOException Signals that an I/O
+   *         exception has occurred. * @see
+   *         org.krakenapps.pcap.PcapInputStream#getPacket()
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+
+  public PcapPacket getPacket() throws IOException {
+    return readPacket(globalHeader.getMagicNumber());
+  }
+
+  /**
+   * Gets the global header.
+   * 
+   * 
+   * @return the global header
+   */
+  public GlobalHeader getGlobalHeader() {
+    return globalHeader;
+  }
+
+  /**
+   * Read global header.
+   * 
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private void readGlobalHeader() throws IOException {
+    int magic = is.readInt();
+    short major = is.readShort();
+    short minor = is.readShort();
+    int tz = is.readInt();
+    int sigfigs = is.readInt();
+    int snaplen = is.readInt();
+    int network = is.readInt();
+
+    globalHeader = new GlobalHeader(magic, major, minor, tz, sigfigs, snaplen,
+        network);
+
+    if (globalHeader.getMagicNumber() == 0xD4C3B2A1) {
+      globalHeader.swapByteOrder();
+    }
+  }
+
+  /**
+   * Read packet.
+   * 
+   * @param magicNumber
+   *          the magic number
+   * @return the pcap packet * @throws IOException Signals that an I/O exception
+   *         has occurred. * @throws EOFException the EOF exception
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private PcapPacket readPacket(int magicNumber) throws IOException {
+    PacketHeader packetHeader = readPacketHeader(magicNumber);
+    Buffer packetData = readPacketData(packetHeader.getInclLen());
+    return new PcapPacket(packetHeader, packetData);
+  }
+
+  /**
+   * Read packet header.
+   * 
+   * @param magicNumber
+   *          the magic number
+   * @return the packet header * @throws IOException Signals that an I/O
+   *         exception has occurred. * @throws EOFException the EOF exception
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private PacketHeader readPacketHeader(int magicNumber) throws IOException {
+    int tsSec = is.readInt();
+    int tsUsec = is.readInt();
+    int inclLen = is.readInt();
+    int origLen = is.readInt();
+
+    if (magicNumber == 0xD4C3B2A1) {
+      tsSec = ByteOrderConverter.swap(tsSec);
+      tsUsec = ByteOrderConverter.swap(tsUsec);
+      inclLen = ByteOrderConverter.swap(inclLen);
+      origLen = ByteOrderConverter.swap(origLen);
+    }
+
+    return new PacketHeader(tsSec, tsUsec, inclLen, origLen);
+  }
+
+  /**
+   * Read packet data.
+   * 
+   * @param packetLength
+   *          the packet length
+   * @return the buffer * @throws IOException Signals that an I/O exception has
+   *         occurred.
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private Buffer readPacketData(int packetLength) throws IOException {
+    byte[] packets = new byte[packetLength];
+    is.read(packets);
+
+    Buffer payload = new ChainBuffer();
+    payload.addLast(packets);
+    return payload;
+    // return new PacketPayload(packets);
+  }
+
+  /**
+   * Closes pcap stream handle.
+   * 
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred. * @see
+   *           org.krakenapps.pcap.PcapInputStream#close()
+   */
+
+  public void close() throws IOException {
+    is.close(); // $codepro.audit.disable closeInFinally
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PcapByteOutputStream.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PcapByteOutputStream.java b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PcapByteOutputStream.java
new file mode 100644
index 0000000..06d6af6
--- /dev/null
+++ b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PcapByteOutputStream.java
@@ -0,0 +1,305 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// $codepro.audit.disable explicitThisUsage, lossOfPrecisionInCast
+package org.apache.metron.pcap;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.BufferUnderflowException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.log4j.Logger;
+import org.krakenapps.pcap.PcapOutputStream;
+import org.krakenapps.pcap.file.GlobalHeader;
+import org.krakenapps.pcap.packet.PacketHeader;
+import org.krakenapps.pcap.packet.PcapPacket;
+import org.krakenapps.pcap.util.Buffer;
+
+// TODO: Auto-generated Javadoc
+/**
+ * The Class PcapByteOutputStream.
+ * 
+ * @author sheetal
+ * @version $Revision: 1.0 $
+ */
+public class PcapByteOutputStream implements PcapOutputStream {
+
+  /** The Constant LOG. */
+  private static final Logger LOG = Logger
+      .getLogger(PcapByteOutputStream.class);
+
+  /** The Constant MAX_CACHED_PACKET_NUMBER. */
+  private static final int MAX_CACHED_PACKET_NUMBER = 1000;
+
+  /** The cached packet num. */
+  private int cachedPacketNum = 0; // NOPMD by sheetal on 1/29/14 2:34 PM
+
+  /** The baos. */
+  private ByteArrayOutputStream baos; // NOPMD by sheetal on 1/29/14 2:34 PM
+
+  /** The list. */
+  private List<Byte> list; // NOPMD by sheetal on 1/29/14 2:34 PM
+
+  /**
+   * Instantiates a new pcap byte output stream.
+   * 
+   * @param baos
+   *          the baos
+   */
+  public PcapByteOutputStream(ByteArrayOutputStream baos) {
+    this.baos = baos;
+    list = new ArrayList<Byte>();
+    createGlobalHeader();
+  }
+
+  /**
+   * Instantiates a new pcap byte output stream.
+   * 
+   * @param baos
+   *          the baos
+   * @param header
+   *          the header
+   */
+  public PcapByteOutputStream(ByteArrayOutputStream baos, GlobalHeader header) {
+    this.baos = baos;
+    list = new ArrayList<Byte>();
+    copyGlobalHeader(header);
+  }
+
+  /**
+   * Creates the global header.
+   */
+  private void createGlobalHeader() {
+    /* magic number(swapped) */
+    list.add((byte) 0xd4);
+    list.add((byte) 0xc3);
+    list.add((byte) 0xb2);
+    list.add((byte) 0xa1);
+
+    /* major version number */
+    list.add((byte) 0x02);
+    list.add((byte) 0x00);
+
+    /* minor version number */
+    list.add((byte) 0x04);
+    list.add((byte) 0x00);
+
+    /* GMT to local correction */
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+
+    /* accuracy of timestamps */
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+
+    /* max length of captured packets, in octets */
+    list.add((byte) 0xff);
+    list.add((byte) 0xff);
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+
+    /* data link type(ethernet) */
+    list.add((byte) 0x01);
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+    list.add((byte) 0x00);
+  }
+
+  /**
+   * Copy global header.
+   * 
+   * @param header
+   *          the header
+   */
+  private void copyGlobalHeader(GlobalHeader header) {
+    final byte[] magicNumber = intToByteArray(header.getMagicNumber());
+    final byte[] majorVersion = shortToByteArray(header.getMajorVersion());
+    final byte[] minorVersion = shortToByteArray(header.getMinorVersion());
+    final byte[] zone = intToByteArray(header.getThiszone());
+    final byte[] sigFigs = intToByteArray(header.getSigfigs());
+    final byte[] snapLen = intToByteArray(header.getSnaplen());
+    final byte[] network = intToByteArray(header.getNetwork());
+
+    list.add(magicNumber[0]);
+    list.add(magicNumber[1]);
+    list.add(magicNumber[2]);
+    list.add(magicNumber[3]);
+
+    list.add(majorVersion[1]);
+    list.add(majorVersion[0]);
+
+    list.add(minorVersion[1]);
+    list.add(minorVersion[0]);
+
+    list.add(zone[3]);
+    list.add(zone[2]);
+    list.add(zone[1]);
+    list.add(zone[0]);
+
+    list.add(sigFigs[3]);
+    list.add(sigFigs[2]);
+    list.add(sigFigs[1]);
+    list.add(sigFigs[0]);
+
+    list.add(snapLen[3]);
+    list.add(snapLen[2]);
+    list.add(snapLen[1]);
+    list.add(snapLen[0]);
+
+    list.add(network[3]);
+    list.add(network[2]);
+    list.add(network[1]);
+    list.add(network[0]);
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.krakenapps.pcap.PcapOutputStream#write(org.krakenapps.pcap.packet
+   * .PcapPacket)
+   */
+  /**
+   * Method write.
+   * 
+   * @param packet
+   *          PcapPacket
+   * 
+   * 
+   * @throws IOException
+   *           * @see org.krakenapps.pcap.PcapOutputStream#write(PcapPacket) * @see
+   *           org.krakenapps.pcap.PcapOutputStream#write(PcapPacket)
+   */
+ 
+  public void write(PcapPacket packet) throws IOException {
+    PacketHeader packetHeader = packet.getPacketHeader();
+
+    int tsSec = packetHeader.getTsSec();
+    int tsUsec = packetHeader.getTsUsec();
+    int inclLen = packetHeader.getInclLen();
+    int origLen = packetHeader.getOrigLen();
+
+    addInt(tsSec);
+    addInt(tsUsec);
+    addInt(inclLen);
+    addInt(origLen);
+
+    Buffer payload = packet.getPacketData();
+
+    try {
+      payload.mark();
+      while (true) {
+        list.add(payload.get());
+      }
+    } catch (BufferUnderflowException e) {
+      //LOG.debug("Ignorable exception while writing packet", e);
+      payload.reset();
+    }
+
+    cachedPacketNum++;
+    if (cachedPacketNum == MAX_CACHED_PACKET_NUMBER) {
+      flush();
+    }
+  }
+
+  /**
+   * Adds the int.
+   * 
+   * @param number
+   *          the number
+   */
+  private void addInt(int number) {
+    list.add((byte) (number & 0xff));
+    list.add((byte) ((number & 0xff00) >> 8));
+    list.add((byte) ((number & 0xff0000) >> 16));
+    list.add((byte) ((number & 0xff000000) >> 24));
+  }
+
+  /**
+   * Int to byte array.
+   * 
+   * @param number
+   *          the number
+   * 
+   * @return the byte[]
+   */
+  private byte[] intToByteArray(int number) {
+    return new byte[] { (byte) (number >>> 24), (byte) (number >>> 16),
+        (byte) (number >>> 8), (byte) number };
+  }
+
+  /**
+   * Short to byte array.
+   * 
+   * @param number
+   *          the number
+   * 
+   * @return the byte[]
+   */
+  private byte[] shortToByteArray(short number) {
+    return new byte[] { (byte) (number >>> 8), (byte) number };
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.krakenapps.pcap.PcapOutputStream#flush()
+   */
+  /**
+   * Method flush.
+   * 
+   * 
+   * @throws IOException
+   *           * @see org.krakenapps.pcap.PcapOutputStream#flush() * @see
+   *           org.krakenapps.pcap.PcapOutputStream#flush()
+   */
+ 
+  public void flush() throws IOException {
+    byte[] fileBinary = new byte[list.size()];
+    for (int i = 0; i < fileBinary.length; i++) {
+      fileBinary[i] = list.get(i);
+    }
+
+    list.clear();
+    baos.write(fileBinary);
+    cachedPacketNum = 0;
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see org.krakenapps.pcap.PcapOutputStream#close()
+   */
+  /**
+   * Method close.
+   * 
+   * 
+   * @throws IOException
+   *           * @see org.krakenapps.pcap.PcapOutputStream#close() * @see
+   *           org.krakenapps.pcap.PcapOutputStream#close()
+   */
+ 
+  public void close() throws IOException {
+    flush();
+    baos.close(); // $codepro.audit.disable closeInFinally
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PcapMerger.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PcapMerger.java b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PcapMerger.java
new file mode 100644
index 0000000..48d25c7
--- /dev/null
+++ b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PcapMerger.java
@@ -0,0 +1,262 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcap;
+
+import java.io.ByteArrayOutputStream;
+import java.io.EOFException;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.log4j.Logger;
+
+import org.krakenapps.pcap.packet.PcapPacket;
+import org.krakenapps.pcap.file.GlobalHeader;
+
+// TODO: Auto-generated Javadoc
+/**
+ * The Class PcapMerger.
+ * 
+ * @author sheetal
+ * @version $Revision: 1.0 $
+ */
+public final class PcapMerger {
+
+  /** The Constant LOG. */
+  private static final Logger LOG = Logger.getLogger(PcapMerger.class);
+  
+  /** The comparator for PcapPackets */
+  private static PcapPacketComparator PCAP_PACKET_COMPARATOR = new PcapPacketComparator();
+
+  /**
+   * Instantiates a new pcap merger.
+   */
+  private PcapMerger() { // $codepro.audit.disable emptyMethod
+  }
+
+  /**
+   * Merge two pcap byte arrays.
+   * 
+   * @param baos
+   *          the baos
+   * @param pcaps
+   *          the pcaps
+   * 
+   * @throws IOException
+   *           if there is no byte array, no access permission, or other io
+   *           related problems.
+   */
+  // public static void merge(byte[] to, byte[] from) throws IOException {
+  // PcapByteInputStream is = null;
+  // PcapByteOutputStream os = null;
+  // ByteArrayOutputStream baos = null;
+  // try {
+  // is = new PcapByteInputStream(from);
+  // baos = new ByteArrayOutputStream();
+  // os = new PcapByteOutputStream(baos, is.getGlobalHeader());
+  //
+  // writePacket(is, os);
+  // } finally {
+  // closeInput(is);
+  // if (baos != null) {
+  // baos.close();
+  // }
+  // closeOutput(os);
+  // }
+  // }
+
+  public static void merge(ByteArrayOutputStream baos, List<byte[]> pcaps)
+      throws IOException {
+    PcapByteInputStream is = null;
+    PcapByteOutputStream os = null;
+    ByteArrayOutputStream unsortedBaos = new ByteArrayOutputStream();
+    
+    try {
+      int i = 1;
+      for (byte[] pcap : pcaps) {
+        is = new PcapByteInputStream(pcap);
+        if (i == 1) {
+          os = new PcapByteOutputStream(unsortedBaos, is.getGlobalHeader());
+        }
+
+        writePacket(is, os);
+        i++;
+        closeInput(is);
+      }
+    } finally {
+      if (unsortedBaos != null) {
+        unsortedBaos.close();
+      }
+      closeOutput(os);
+      sort(baos, unsortedBaos.toByteArray());
+    }
+  }
+
+  /**
+   * Merge byte array1 with byte array2, and write to output byte array. It
+   * doesn't hurt original pcap dump byte arrays.
+   * 
+   * @param baos
+   *          the baos
+   * @param pcaps
+   *          the pcaps
+   * 
+   * @throws IOException
+   *           if there are no source byte arrays, have no read and/or write
+   *           permissions, or anything else.
+   */
+  public static void merge(ByteArrayOutputStream baos, byte[]... pcaps) // $codepro.audit.disable
+                                                                        // overloadedMethods
+      throws IOException {
+    merge(baos, Arrays.asList(pcaps));
+
+  }
+  
+  /**
+   * Sort the potentially unsorted byte array according to the timestamp
+   * in the packet header
+   * 
+   * @param unsortedBytes
+   * 	a byte array of a pcap file
+   * 
+   * @return byte array of a pcap file with packets in cronological order
+   * 
+   * @throws IOException
+   * 	if there are no source byte arrays, have no read and or write 
+   * 	permission, or anything else.
+   */
+  private static void sort(ByteArrayOutputStream baos, byte[] unsortedBytes) throws IOException {
+	  PcapByteInputStream pcapIs = new PcapByteInputStream(unsortedBytes);
+	  PcapByteOutputStream pcapOs = new PcapByteOutputStream(baos, pcapIs.getGlobalHeader());
+	  PcapPacket packet;
+	  ArrayList<PcapPacket> packetList = new ArrayList<PcapPacket>();
+	  
+	  try {
+		  while (true) {
+			  packet = pcapIs.getPacket();
+			  if (packet == null)
+				  break;
+			  packetList.add(packet);
+			  LOG.debug("Presort packet: " + packet.getPacketHeader().toString());
+		  }
+	  } catch (EOFException e) {
+		  //LOG.debug("Ignoreable exception in sort", e);
+	  }
+	  
+	  Collections.sort(packetList, PCAP_PACKET_COMPARATOR);
+	  for (PcapPacket p : packetList) {
+		  pcapOs.write(p);
+		  LOG.debug("Postsort packet: " + p.getPacketHeader().toString());
+	  }
+	  pcapOs.close();  
+  }
+  
+  /**
+   * Write packet.
+   * 
+   * @param is
+   *          the is
+   * @param os
+   *          the os
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private static void writePacket(PcapByteInputStream is,
+      PcapByteOutputStream os) throws IOException {
+    PcapPacket packet = null;
+    try {
+      while (true) {
+        packet = is.getPacket();
+        if (packet == null) {
+          break;
+        }
+        os.write(packet);
+      }
+    } catch (EOFException e) {
+      //LOG.debug("Ignorable exception in writePacket", e);
+    }
+
+  }
+
+  /**
+   * Close input.
+   * 
+   * @param is
+   *          the is
+   */
+  private static void closeInput(PcapByteInputStream is) {
+    if (is == null) {
+      return;
+    }
+    try {
+      is.close(); // $codepro.audit.disable closeInFinally
+    } catch (IOException e) {
+      LOG.error("Failed to close input stream", e);
+    }
+  }
+
+  /**
+   * Close output.
+   * 
+   * @param os
+   *          the os
+   */
+  private static void closeOutput(PcapByteOutputStream os) {
+    if (os == null) {
+      return;
+    }
+    try {
+      os.close();
+    } catch (IOException e) {
+      LOG.error("Failed to close output stream", e);
+
+    }
+  }
+
+  /**
+   * The main method.
+   * 
+   * @param args
+   *          the arguments
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public static void main(String[] args) throws IOException {
+    byte[] b1 = FileUtils.readFileToByteArray(new File(
+        "/Users/sheetal/Downloads/constructedTcpDump.1.pcap"));
+    byte[] b2 = FileUtils.readFileToByteArray(new File(
+        "/Users/sheetal/Downloads/constructedTcpDump.2.pcap"));
+    byte[] b3 = FileUtils.readFileToByteArray(new File(
+        "/Users/sheetal/Downloads/constructedTcpDump.3.pcap"));
+
+    ByteArrayOutputStream boas = new ByteArrayOutputStream(); // $codepro.audit.disable
+                                                              // closeWhereCreated
+    PcapMerger.merge(boas, b1, b2, b3);
+
+    FileUtils.writeByteArrayToFile(new File(
+        "/Users/sheetal/Downloads/constructedTcpDump.automerged.1.2.pcap"),
+        boas.toByteArray(), false);
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PcapPacketComparator.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PcapPacketComparator.java b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PcapPacketComparator.java
new file mode 100644
index 0000000..96f64a0
--- /dev/null
+++ b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/PcapPacketComparator.java
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcap;
+
+import java.util.Comparator;
+
+import org.apache.log4j.Logger;
+
+import org.krakenapps.pcap.packet.PcapPacket;
+
+public class PcapPacketComparator implements Comparator<PcapPacket> {
+
+	/** The Constant LOG. */
+	private static final Logger LOG = Logger.getLogger(PcapMerger.class);
+	
+	public int compare(PcapPacket p1, PcapPacket p2) {
+
+		Long p1time = new Long(p1.getPacketHeader().getTsSec()) * 1000000L + new Long(p1.getPacketHeader().getTsUsec());
+		Long p2time = new Long(p2.getPacketHeader().getTsSec()) * 1000000L + new Long(p2.getPacketHeader().getTsUsec());
+		Long delta = p1time - p2time;
+		LOG.debug("p1time: " + p1time.toString() + " p2time: " + p2time.toString() + " delta: " + delta.toString());
+		return delta.intValue();
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/spout/HDFSWriterCallback.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/spout/HDFSWriterCallback.java b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/spout/HDFSWriterCallback.java
new file mode 100644
index 0000000..fcfcafd
--- /dev/null
+++ b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/spout/HDFSWriterCallback.java
@@ -0,0 +1,168 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcap.spout;
+
+import com.google.common.base.Joiner;
+import com.google.common.collect.ImmutableList;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.log4j.Logger;
+import storm.kafka.Callback;
+import storm.kafka.EmitContext;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.List;
+
+public class HDFSWriterCallback implements Callback {
+  static final long serialVersionUID = 0xDEADBEEFL;
+  private static final Logger LOG = Logger.getLogger(HDFSWriterCallback.class);
+  public static final byte[] PCAP_GLOBAL_HEADER = new byte[] {
+          (byte) 0xd4, (byte) 0xc3, (byte) 0xb2, (byte) 0xa1, 0x02, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00
+          ,0x00, 0x00, 0x00, 0x00, (byte) 0xff, (byte) 0xff, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00
+  };
+
+  private static final List<Object> RET_TUPLE = ImmutableList.of((Object)Byte.valueOf((byte) 0x00), Byte.valueOf((byte)0x00));
+  private FileSystem fs;
+  private SequenceFile.Writer writer;
+  private HDFSWriterConfig config;
+  private long batchStartTime;
+  private long numWritten;
+  private EmitContext context;
+
+  public HDFSWriterCallback() {
+    //this.config = config;
+  }
+
+  public HDFSWriterCallback withConfig(HDFSWriterConfig config) {
+    LOG.info("Configured: " + config);
+    this.config = config;
+    return this;
+  }
+
+  @Override
+  public List<Object> apply(List<Object> tuple, EmitContext context) {
+
+    LongWritable ts = (LongWritable) tuple.get(0);
+    BytesWritable rawPacket = (BytesWritable)tuple.get(1);
+    try {
+      turnoverIfNecessary(ts.get());
+      writer.append(ts, headerize(rawPacket.getBytes()));
+      writer.hflush();
+    } catch (IOException e) {
+      LOG.error(e.getMessage(), e);
+      //drop?  not sure..
+    }
+    return RET_TUPLE;
+  }
+
+  private static BytesWritable headerize(byte[] packet) {
+    byte[] ret = new byte[packet.length + PCAP_GLOBAL_HEADER.length];
+    int offset = 0;
+    System.arraycopy(PCAP_GLOBAL_HEADER, 0, ret, offset, PCAP_GLOBAL_HEADER.length);
+    offset += PCAP_GLOBAL_HEADER.length;
+    System.arraycopy(packet, 0, ret, offset, packet.length);
+    return new BytesWritable(ret);
+  }
+
+
+  private synchronized void turnoverIfNecessary(long ts) throws IOException {
+    long duration = ts - batchStartTime;
+    if(batchStartTime == 0L || duration > config.getMaxTimeMS() || numWritten > config.getNumPackets()) {
+      //turnover
+      Path path = getPath(ts);
+      if(writer != null) {
+        writer.close();
+      }
+      writer = SequenceFile.createWriter(new Configuration()
+              , SequenceFile.Writer.file(path)
+              , SequenceFile.Writer.keyClass(LongWritable.class)
+              , SequenceFile.Writer.valueClass(BytesWritable.class)
+      );
+      //reset state
+      LOG.info("Turning over and writing to " + path);
+      batchStartTime = ts;
+      numWritten = 0;
+    }
+  }
+
+  private Path getPath(long ts) {
+    String fileName = Joiner.on("_").join("pcap"
+            , "" + ts
+            , context.get(EmitContext.Type.UUID)
+    );
+    return new Path(config.getOutputPath(), fileName);
+  }
+
+  @Override
+  public void initialize(EmitContext context) {
+    this.context = context;
+    try {
+      fs = FileSystem.get(new Configuration());
+    } catch (IOException e) {
+      throw new IllegalStateException("Unable to create filesystem", e);
+    }
+  }
+
+  /**
+   * Closes this resource, relinquishing any underlying resources.
+   * This method is invoked automatically on objects managed by the
+   * {@code try}-with-resources statement.
+   * <p/>
+   * <p>While this interface method is declared to throw {@code
+   * Exception}, implementers are <em>strongly</em> encouraged to
+   * declare concrete implementations of the {@code close} method to
+   * throw more specific exceptions, or to throw no exception at all
+   * if the close operation cannot fail.
+   * <p/>
+   * <p><em>Implementers of this interface are also strongly advised
+   * to not have the {@code close} method throw {@link
+   * InterruptedException}.</em>
+   * <p/>
+   * This exception interacts with a thread's interrupted status,
+   * and runtime misbehavior is likely to occur if an {@code
+   * InterruptedException} is {@linkplain Throwable#addSuppressed
+   * suppressed}.
+   * <p/>
+   * More generally, if it would cause problems for an
+   * exception to be suppressed, the {@code AutoCloseable.close}
+   * method should not throw it.
+   * <p/>
+   * <p>Note that unlike the {@link Closeable#close close}
+   * method of {@link Closeable}, this {@code close} method
+   * is <em>not</em> required to be idempotent.  In other words,
+   * calling this {@code close} method more than once may have some
+   * visible side effect, unlike {@code Closeable.close} which is
+   * required to have no effect if called more than once.
+   * <p/>
+   * However, implementers of this interface are strongly encouraged
+   * to make their {@code close} methods idempotent.
+   *
+   * @throws Exception if this resource cannot be closed
+   */
+  @Override
+  public void close() throws Exception {
+    if(writer != null) {
+      writer.close();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/spout/HDFSWriterConfig.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/spout/HDFSWriterConfig.java b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/spout/HDFSWriterConfig.java
new file mode 100644
index 0000000..60a23c2
--- /dev/null
+++ b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/spout/HDFSWriterConfig.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcap.spout;
+
+import com.google.common.base.Splitter;
+import com.google.common.collect.Iterables;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+public class HDFSWriterConfig implements Serializable {
+  static final long serialVersionUID = 0xDEADBEEFL;
+  private long numPackets;
+  private long maxTimeMS;
+  private String outputPath;
+  private String zookeeperQuorum;
+
+  public HDFSWriterConfig withOutputPath(String path) {
+    outputPath = path;
+    return this;
+  }
+
+  public HDFSWriterConfig withNumPackets(long n) {
+    numPackets = n;
+    return this;
+  }
+
+  public HDFSWriterConfig withMaxTimeMS(long t) {
+    maxTimeMS = t;
+    return this;
+  }
+
+  public HDFSWriterConfig withZookeeperQuorum(String zookeeperQuorum) {
+    this.zookeeperQuorum = zookeeperQuorum;
+    return this;
+  }
+
+  public List<String> getZookeeperServers() {
+    List<String> out = new ArrayList<>();
+    if(zookeeperQuorum != null) {
+      for (String hostPort : Splitter.on(',').split(zookeeperQuorum)) {
+        Iterable<String> tokens = Splitter.on(':').split(hostPort);
+        String host = Iterables.getFirst(tokens, null);
+        if(host != null) {
+          out.add(host);
+        }
+      }
+    }
+    return out;
+  }
+
+  public Integer getZookeeperPort() {
+    if(zookeeperQuorum != null) {
+      String hostPort = Iterables.getFirst(Splitter.on(',').split(zookeeperQuorum), null);
+      String portStr = Iterables.getLast(Splitter.on(':').split(hostPort));
+      return Integer.parseInt(portStr);
+    }
+    return  null;
+  }
+
+  public String getOutputPath() {
+    return outputPath;
+  }
+
+  public long getNumPackets() {
+    return numPackets;
+  }
+
+  public long getMaxTimeMS() {
+    return maxTimeMS;
+  }
+
+  @Override
+  public String toString() {
+    return "HDFSWriterConfig{" +
+            "numPackets=" + numPackets +
+            ", maxTimeMS=" + maxTimeMS +
+            ", outputPath='" + outputPath + '\'' +
+            '}';
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/utils/PcapUtils.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/utils/PcapUtils.java b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/utils/PcapUtils.java
new file mode 100644
index 0000000..48e99d2
--- /dev/null
+++ b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/utils/PcapUtils.java
@@ -0,0 +1,475 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcap.utils;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import com.google.common.base.Joiner;
+import org.apache.commons.lang.StringUtils;
+
+import com.google.common.collect.BiMap;
+import com.google.common.collect.HashBiMap;
+import org.json.simple.JSONObject;
+
+/**
+ * The Class PcapUtils.
+ */
+public class PcapUtils {
+
+  /** The Constant SESSION_KEY_SEPERATOR. */
+  private static final char SESSION_KEY_SEPERATOR = '-';
+
+  /** The Constant protocolIdToNameMap. */
+  private static final BiMap<Integer, String> protocolIdToNameMap = HashBiMap
+      .create();
+
+  // private static final Map<Integer, String> protocolIdToNameMap = new
+  // HashMap();
+
+  static {
+
+    protocolIdToNameMap.put(0, "HOPOPT");
+    protocolIdToNameMap.put(1, "ICMP");
+    protocolIdToNameMap.put(2, "IGMP");
+    protocolIdToNameMap.put(3, "GGP");
+    protocolIdToNameMap.put(4, "IPV4");
+    protocolIdToNameMap.put(5, "ST");
+    protocolIdToNameMap.put(6, "TCP");
+    protocolIdToNameMap.put(7, "CBT");
+    protocolIdToNameMap.put(8, "EGP");
+    protocolIdToNameMap.put(9, "IGP");
+    protocolIdToNameMap.put(10, "BBN-RCC-MON");
+    protocolIdToNameMap.put(11, "NVP-II");
+    protocolIdToNameMap.put(12, "PUP");
+    protocolIdToNameMap.put(13, "ARGUS");
+    protocolIdToNameMap.put(14, "EMCON");
+    protocolIdToNameMap.put(15, "XNET");
+    protocolIdToNameMap.put(16, "CHAOS");
+    protocolIdToNameMap.put(17, "UDP");
+    protocolIdToNameMap.put(18, "MUX");
+    protocolIdToNameMap.put(19, "DCN-MEAS");
+    protocolIdToNameMap.put(20, "HMP");
+    protocolIdToNameMap.put(21, "PRM");
+    protocolIdToNameMap.put(22, "XNS-IDP");
+    protocolIdToNameMap.put(23, "TRUNK-1");
+    protocolIdToNameMap.put(24, "TRUNK-2");
+    protocolIdToNameMap.put(25, "LEAF-1");
+    protocolIdToNameMap.put(26, "LEAF-2");
+    protocolIdToNameMap.put(27, "RDP");
+    protocolIdToNameMap.put(28, "IRTP");
+    protocolIdToNameMap.put(29, "ISO-TP4");
+    protocolIdToNameMap.put(30, "NETBLT");
+    protocolIdToNameMap.put(31, "MFE-NSP");
+    protocolIdToNameMap.put(32, "MERIT-INP");
+    protocolIdToNameMap.put(33, "DCCP");
+    protocolIdToNameMap.put(34, "3PC");
+    protocolIdToNameMap.put(35, "IDPR");
+    protocolIdToNameMap.put(36, "XTP");
+    protocolIdToNameMap.put(37, "DDP");
+    protocolIdToNameMap.put(38, "IDPR-CMTP");
+    protocolIdToNameMap.put(39, "TP++");
+    protocolIdToNameMap.put(40, "IL");
+    protocolIdToNameMap.put(41, "IPV6");
+    protocolIdToNameMap.put(42, "SDRP");
+    protocolIdToNameMap.put(43, "IPV6-ROUTE");
+    protocolIdToNameMap.put(44, "IPV6-FRAG");
+    protocolIdToNameMap.put(45, "IDRP");
+    protocolIdToNameMap.put(46, "RSVP");
+    protocolIdToNameMap.put(47, "GRE");
+    protocolIdToNameMap.put(48, "DSR");
+    protocolIdToNameMap.put(49, "BNA");
+    protocolIdToNameMap.put(50, "ESP");
+    protocolIdToNameMap.put(51, "AH");
+    protocolIdToNameMap.put(52, "I-NLSP");
+    protocolIdToNameMap.put(53, "SWIPE");
+    protocolIdToNameMap.put(54, "NARP");
+    protocolIdToNameMap.put(55, "MOBILE");
+    protocolIdToNameMap.put(56, "TLSP");
+    protocolIdToNameMap.put(57, "SKIP");
+    protocolIdToNameMap.put(58, "IPV6-ICMP");
+    protocolIdToNameMap.put(59, "IPV6-NONXT");
+    protocolIdToNameMap.put(60, "IPV6-OPTS");
+    protocolIdToNameMap.put(62, "CFTP");
+    protocolIdToNameMap.put(64, "SAT-EXPAK");
+    protocolIdToNameMap.put(65, "KRYPTOLAN");
+    protocolIdToNameMap.put(66, "RVD");
+    protocolIdToNameMap.put(67, "IPPC");
+    protocolIdToNameMap.put(69, "SAT-MON");
+    protocolIdToNameMap.put(70, "VISA");
+    protocolIdToNameMap.put(71, "IPCV");
+    protocolIdToNameMap.put(72, "CPNX");
+    protocolIdToNameMap.put(73, "CPHB");
+    protocolIdToNameMap.put(74, "WSN");
+    protocolIdToNameMap.put(75, "PVP");
+    protocolIdToNameMap.put(76, "BR-SAT-MON");
+    protocolIdToNameMap.put(77, "SUN-ND");
+    protocolIdToNameMap.put(78, "WB-MON");
+    protocolIdToNameMap.put(79, "WB-EXPAK");
+    protocolIdToNameMap.put(80, "ISO-IP");
+    protocolIdToNameMap.put(81, "VMTP");
+    protocolIdToNameMap.put(82, "SECURE-VMTP");
+    protocolIdToNameMap.put(83, "VINES");
+    protocolIdToNameMap.put(84, "TTP");
+    protocolIdToNameMap.put(85, "NSFNET-IGP");
+    protocolIdToNameMap.put(86, "DGP");
+    protocolIdToNameMap.put(87, "TCF");
+    protocolIdToNameMap.put(88, "EIGRP");
+    protocolIdToNameMap.put(89, "OSPFIGP");
+    protocolIdToNameMap.put(90, "SPRITE-RPC");
+    protocolIdToNameMap.put(91, "LARP");
+    protocolIdToNameMap.put(92, "MTP");
+    protocolIdToNameMap.put(93, "AX.25");
+    protocolIdToNameMap.put(94, "IPIP");
+    protocolIdToNameMap.put(95, "MICP");
+    protocolIdToNameMap.put(96, "SCC-SP");
+    protocolIdToNameMap.put(97, "ETHERIP");
+    protocolIdToNameMap.put(98, "ENCAP");
+    protocolIdToNameMap.put(100, "GMTP");
+    protocolIdToNameMap.put(101, "IFMP");
+    protocolIdToNameMap.put(102, "PNNI");
+    protocolIdToNameMap.put(103, "PIM");
+    protocolIdToNameMap.put(104, "ARIS");
+    protocolIdToNameMap.put(105, "SCPS");
+    protocolIdToNameMap.put(106, "QNX");
+    protocolIdToNameMap.put(107, "A/N");
+    protocolIdToNameMap.put(108, "IPCOMP");
+    protocolIdToNameMap.put(109, "SNP");
+    protocolIdToNameMap.put(110, "COMPAQ-PEER");
+    protocolIdToNameMap.put(111, "IPX-IN-IP");
+    protocolIdToNameMap.put(112, "VRRP");
+    protocolIdToNameMap.put(113, "PGM");
+    protocolIdToNameMap.put(115, "L2TP");
+    protocolIdToNameMap.put(116, "DDX");
+    protocolIdToNameMap.put(117, "IATP");
+    protocolIdToNameMap.put(118, "STP");
+    protocolIdToNameMap.put(119, "SRP");
+    protocolIdToNameMap.put(120, "UTI");
+    protocolIdToNameMap.put(121, "SMP");
+    protocolIdToNameMap.put(122, "SM");
+    protocolIdToNameMap.put(123, "PTP");
+    protocolIdToNameMap.put(124, "ISIS OVER IPV4");
+    protocolIdToNameMap.put(125, "FIRE");
+    protocolIdToNameMap.put(126, "CRTP");
+    protocolIdToNameMap.put(127, "CRUDP");
+    protocolIdToNameMap.put(128, "SSCOPMCE");
+    protocolIdToNameMap.put(129, "IPLT");
+    protocolIdToNameMap.put(130, "SPS");
+    protocolIdToNameMap.put(131, "PIPE");
+    protocolIdToNameMap.put(132, "SCTP");
+    protocolIdToNameMap.put(133, "FC");
+    protocolIdToNameMap.put(134, "RSVP-E2E-IGNORE");
+    protocolIdToNameMap.put(135, "MOBILITY HEADER");
+    protocolIdToNameMap.put(136, "UDPLITE");
+    protocolIdToNameMap.put(137, "MPLS-IN-IP");
+    protocolIdToNameMap.put(138, "MANET");
+    protocolIdToNameMap.put(139, "HIP");
+    protocolIdToNameMap.put(140, "SHIM6");
+    protocolIdToNameMap.put(141, "WESP");
+    protocolIdToNameMap.put(142, "ROHC");
+  }
+
+  /** The Constant protocolNameToIdMap. */
+  private static final BiMap<String, Integer> protocolNameToIdMap = protocolIdToNameMap
+      .inverse();
+
+  // private static final Map<String, Integer> protocolNameToIdMap =
+  // invertMap(protocolIdToNameMap);
+
+  /**
+   * Convert ipv4 ip to hex.
+   * 
+   * @param ipAddress
+   *          the ip address
+   * @return the string
+   */
+  public static String convertIpv4IpToHex(String ipAddress) {
+    StringBuffer hexIp = new StringBuffer(64);
+    String[] ipSegments = ipAddress.split("\\.");
+
+    for (String ipSegment : ipSegments) {
+      hexIp.append(convertIpSegmentToHex(ipSegment));
+    }
+
+    return hexIp.toString();
+
+  }
+
+  public static String convertHexToIpv4Ip(String hex) {
+    List<Integer> ipSegments = new ArrayList<>();
+    for(int i = 0; i < hex.length(); i += 2) {
+      String segment = hex.substring(i, i + 2);
+      ipSegments.add(Integer.parseInt(segment, 16));
+    }
+    return Joiner.on(".").join(ipSegments);
+  }
+
+  /**
+   * Gets the session key.
+   * 
+   * @param srcIp
+   *          the src ip
+   * @param dstIp
+   *          the dst ip
+   * @param protocol
+   *          the protocol
+   * @param srcPort
+   *          the src port
+   * @param dstPort
+   *          the dst port
+   * @return the session key
+   */
+  public static String getSessionKey(String srcIp, String dstIp,
+      String protocol, String srcPort, String dstPort) {
+    return getSessionKey(srcIp, dstIp, protocol, srcPort, dstPort, null, null);
+  }
+
+  /**
+   * Gets the session key.
+   * 
+   * @param srcIp
+   *          the src ip
+   * @param dstIp
+   *          the dst ip
+   * @param protocol
+   *          the protocol
+   * @param srcPort
+   *          the src port
+   * @param dstPort
+   *          the dst port
+   * @param ipId
+   *          the ip id
+   * @param fragmentOffset
+   *          the fragment offset
+   * @return the session key
+   */
+  public static String getSessionKey(String srcIp, String dstIp,
+      String protocol, String srcPort, String dstPort, String ipId,
+      String fragmentOffset) {
+
+    StringBuffer sb = new StringBuffer(40);
+    sb.append(convertIpv4IpToHex(srcIp)).append(SESSION_KEY_SEPERATOR)
+        .append(convertIpv4IpToHex(dstIp)).append(SESSION_KEY_SEPERATOR)
+        .append(protocol == null ? "0" : protocol)
+        .append(SESSION_KEY_SEPERATOR).append(srcPort == null ? "0" : srcPort)
+        .append(SESSION_KEY_SEPERATOR).append(dstPort == null ? "0" : dstPort)
+        .append(SESSION_KEY_SEPERATOR).append(ipId == null ? "0" : ipId)
+        .append(SESSION_KEY_SEPERATOR)
+        .append(fragmentOffset == null ? "0" : fragmentOffset);
+
+    return sb.toString();
+  }
+
+  public static String getSessionKey(JSONObject message) {
+    String srcIp = (String) message.get("ip_src_addr");
+    String dstIp = (String) message.get("ip_dst_addr");
+    Long protocol = (Long) message.get("ip_protocol");
+    Long srcPort = (Long) message.get("ip_src_port");
+    Long dstPort = (Long) message.get("ip_dst_port");
+    Long ipId = (Long) message.get("ip_id");
+    String ipIdString = ipId == null ? null : ipId.toString();
+    Long fragmentOffset = (Long) message.get("frag_offset");
+    String fragmentOffsetString = fragmentOffset == null ? null : fragmentOffset.toString();
+    return PcapUtils.getSessionKey(srcIp, dstIp, protocol.toString(), srcPort.toString(), dstPort.toString(), ipIdString, fragmentOffsetString);
+  }
+
+  public static String getPartialSessionKey(String srcIp, String dstIp,
+                                            String protocol, String srcPort, String dstPort) {
+    StringBuffer sb = new StringBuffer(40);
+    sb.append(convertIpv4IpToHex(srcIp)).append(SESSION_KEY_SEPERATOR)
+            .append(convertIpv4IpToHex(dstIp)).append(SESSION_KEY_SEPERATOR)
+            .append(protocol == null ? "0" : protocol)
+            .append(SESSION_KEY_SEPERATOR).append(srcPort == null ? "0" : srcPort)
+            .append(SESSION_KEY_SEPERATOR).append(dstPort == null ? "0" : dstPort);
+    return sb.toString();
+  }
+
+  /**
+   * Gets the session key.
+   * 
+   * @param srcIp
+   *          the src ip
+   * @param dstIp
+   *          the dst ip
+   * @param protocol
+   *          the protocol
+   * @param srcPort
+   *          the src port
+   * @param dstPort
+   *          the dst port
+   * @param ipId
+   *          the ip id
+   * @param fragmentOffset
+   *          the fragment offset
+   * @return the session key
+   */
+  public static String getSessionKey(String srcIp, String dstIp, int protocol,
+      int srcPort, int dstPort, int ipId, int fragmentOffset) {
+    String keySeperator = "-";
+    StringBuffer sb = new StringBuffer(40);
+    sb.append(convertIpv4IpToHex(srcIp)).append(keySeperator)
+        .append(convertIpv4IpToHex(dstIp)).append(keySeperator)
+        .append(protocol).append(keySeperator).append(srcPort)
+        .append(keySeperator).append(dstPort).append(keySeperator).append(ipId)
+        .append(keySeperator).append(fragmentOffset);
+
+    return sb.toString();
+  }
+
+  /**
+   * Gets the short session key. (5-tuple only)
+   * 
+   * @param srcIp
+   *          the src ip
+   * @param dstIp
+   *          the dst ip
+   * @param protocol
+   *          the protocol
+   * @param srcPort
+   *          the src port
+   * @param dstPort
+   *          the dst port
+   * @return the session key
+   */
+  public static String getShortSessionKey(String srcIp, String dstIp, int protocol,
+      int srcPort, int dstPort) {
+    String keySeperator = "-";
+    StringBuffer sb = new StringBuffer(40);
+    sb.append(convertIpv4IpToHex(srcIp)).append(keySeperator)
+        .append(convertIpv4IpToHex(dstIp)).append(keySeperator)
+        .append(protocol).append(keySeperator).append(srcPort)
+        .append(keySeperator).append(dstPort);
+
+    return sb.toString();
+  }
+  
+  // public static String convertPortToHex(String portNumber) {
+  // return convertPortToHex(Integer.valueOf(portNumber));
+  //
+  // }
+  //
+  // public static String convertPortToHex(int portNumber) {
+  // return convertToHex(portNumber, 4);
+  //
+  // }
+  //
+  // public static String convertProtocolToHex(String protocol) {
+  // return convertProtocolToHex(Integer.valueOf(protocol));
+  //
+  // }
+  //
+  // public static String convertProtocolToHex(int protocol) {
+  // return convertToHex(protocol, 2);
+  // }
+
+  /**
+   * Convert ip segment to hex.
+   * 
+   * @param ipSegment
+   *          the ip segment
+   * @return the string
+   */
+  public static String convertIpSegmentToHex(String ipSegment) {
+    return convertIpSegmentToHex(Integer.valueOf(ipSegment));
+
+  }
+
+  /**
+   * Convert ip segment to hex.
+   * 
+   * @param ipSegment
+   *          the ip segment
+   * @return the string
+   */
+  public static String convertIpSegmentToHex(int ipSegment) {
+    return convertToHex(ipSegment, 2);
+
+  }
+
+  /**
+   * Convert to hex.
+   * 
+   * @param number
+   *          the number
+   * @param length
+   *          the length
+   * @return the string
+   */
+  public static String convertToHex(int number, int length) {
+    return StringUtils.leftPad(Integer.toHexString(number), length, '0');
+
+  }
+
+  /**
+   * Gets the protocol name.
+   * 
+   * @param protocolNumber
+   *          the protocol number
+   * 
+   * @return the protocol name
+   */
+  public static String getProtocolNameFromId(int protocolNumber) {
+    String protocolName = protocolIdToNameMap.get(protocolNumber);
+
+    if (protocolName == null) {
+      protocolName = String.valueOf(protocolNumber);
+    }
+    return protocolName;
+  }
+
+  /**
+   * Gets the protocol id from name.
+   * 
+   * @param protocolName
+   *          the protocol name
+   * @return the protocol id from name
+   */
+  public static int getProtocolIdFromName(String protocolName) {
+    Integer protocolNumber = protocolNameToIdMap
+        .get(protocolName.toUpperCase());
+
+    if (protocolNumber == null) {
+      protocolNumber = -1;
+    }
+    return protocolNumber;
+  }
+
+  /**
+   * Invert map.
+   * 
+   * @param <V>
+   *          the value type
+   * @param <K>
+   *          the key type
+   * @param map
+   *          the map
+   * @return the map
+   */
+  private static <V, K> Map<V, K> invertMap(Map<K, V> map) {
+
+    Map<V, K> inv = new HashMap<V, K>();
+
+    for (Entry<K, V> entry : map.entrySet())
+      inv.put(entry.getValue(), entry.getKey());
+
+    return inv;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/writer/PcapWriter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/writer/PcapWriter.java b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/writer/PcapWriter.java
new file mode 100644
index 0000000..c07a217
--- /dev/null
+++ b/metron-platform/metron-pcap/src/main/java/org/apache/metron/pcap/writer/PcapWriter.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcap.writer;
+
+import backtype.storm.tuple.Tuple;
+import org.apache.metron.hbase.writer.HBaseWriter;
+import org.apache.metron.pcap.utils.PcapUtils;
+import org.json.simple.JSONObject;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class PcapWriter extends HBaseWriter {
+
+  private String column;
+
+  public PcapWriter(String tableName, String column) {
+    super(tableName);
+    this.column = column;
+  }
+
+  @Override
+  public byte[] getKey(Tuple tuple, JSONObject message) {
+    String key = PcapUtils.getSessionKey(message);
+    return key.getBytes();
+  }
+
+  @Override
+  public long getTimestamp(Tuple tuple, JSONObject message) {
+    return (long) message.get("ts_micro");
+  }
+
+  @Override
+  public Map<String, byte[]> getValues(Tuple tuple, JSONObject message) {
+    Map<String, byte[]> values = new HashMap<>();
+    values.put(column, tuple.getBinary(0));
+    return values;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/src/main/java/storm/kafka/Callback.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/src/main/java/storm/kafka/Callback.java b/metron-platform/metron-pcap/src/main/java/storm/kafka/Callback.java
new file mode 100644
index 0000000..ff05c29
--- /dev/null
+++ b/metron-platform/metron-pcap/src/main/java/storm/kafka/Callback.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package storm.kafka;
+
+import java.io.Serializable;
+import java.util.List;
+
+public interface Callback extends AutoCloseable, Serializable {
+  List<Object> apply(List<Object> tuple, EmitContext context);
+  void initialize(EmitContext context);
+}



[25/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/resources/effective_tld_names.dat
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/resources/effective_tld_names.dat b/metron-platform/metron-enrichment/src/main/resources/effective_tld_names.dat
new file mode 100644
index 0000000..36e5d4c
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/resources/effective_tld_names.dat
@@ -0,0 +1,9719 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+// ===BEGIN ICANN DOMAINS===
+
+// ac : http://en.wikipedia.org/wiki/.ac
+ac
+com.ac
+edu.ac
+gov.ac
+net.ac
+mil.ac
+org.ac
+
+// ad : http://en.wikipedia.org/wiki/.ad
+ad
+nom.ad
+
+// ae : http://en.wikipedia.org/wiki/.ae
+// see also: "Domain Name Eligibility Policy" at http://www.aeda.ae/eng/aepolicy.php
+ae
+co.ae
+net.ae
+org.ae
+sch.ae
+ac.ae
+gov.ae
+mil.ae
+
+// aero : see http://www.information.aero/index.php?id=66
+aero
+accident-investigation.aero
+accident-prevention.aero
+aerobatic.aero
+aeroclub.aero
+aerodrome.aero
+agents.aero
+aircraft.aero
+airline.aero
+airport.aero
+air-surveillance.aero
+airtraffic.aero
+air-traffic-control.aero
+ambulance.aero
+amusement.aero
+association.aero
+author.aero
+ballooning.aero
+broker.aero
+caa.aero
+cargo.aero
+catering.aero
+certification.aero
+championship.aero
+charter.aero
+civilaviation.aero
+club.aero
+conference.aero
+consultant.aero
+consulting.aero
+control.aero
+council.aero
+crew.aero
+design.aero
+dgca.aero
+educator.aero
+emergency.aero
+engine.aero
+engineer.aero
+entertainment.aero
+equipment.aero
+exchange.aero
+express.aero
+federation.aero
+flight.aero
+freight.aero
+fuel.aero
+gliding.aero
+government.aero
+groundhandling.aero
+group.aero
+hanggliding.aero
+homebuilt.aero
+insurance.aero
+journal.aero
+journalist.aero
+leasing.aero
+logistics.aero
+magazine.aero
+maintenance.aero
+marketplace.aero
+media.aero
+microlight.aero
+modelling.aero
+navigation.aero
+parachuting.aero
+paragliding.aero
+passenger-association.aero
+pilot.aero
+press.aero
+production.aero
+recreation.aero
+repbody.aero
+res.aero
+research.aero
+rotorcraft.aero
+safety.aero
+scientist.aero
+services.aero
+show.aero
+skydiving.aero
+software.aero
+student.aero
+taxi.aero
+trader.aero
+trading.aero
+trainer.aero
+union.aero
+workinggroup.aero
+works.aero
+
+// af : http://www.nic.af/help.jsp
+af
+gov.af
+com.af
+org.af
+net.af
+edu.af
+
+// ag : http://www.nic.ag/prices.htm
+ag
+com.ag
+org.ag
+net.ag
+co.ag
+nom.ag
+
+// ai : http://nic.com.ai/
+ai
+off.ai
+com.ai
+net.ai
+org.ai
+
+// al : http://www.ert.gov.al/ert_alb/faq_det.html?Id=31
+al
+com.al
+edu.al
+gov.al
+mil.al
+net.al
+org.al
+
+// am : http://en.wikipedia.org/wiki/.am
+am
+
+// an : http://www.una.an/an_domreg/default.asp
+an
+com.an
+net.an
+org.an
+edu.an
+
+// ao : http://en.wikipedia.org/wiki/.ao
+// http://www.dns.ao/REGISTR.DOC
+ao
+ed.ao
+gv.ao
+og.ao
+co.ao
+pb.ao
+it.ao
+
+// aq : http://en.wikipedia.org/wiki/.aq
+aq
+
+// ar : https://nic.ar/normativa-vigente.xhtml
+ar
+com.ar
+edu.ar
+gob.ar
+gov.ar
+int.ar
+mil.ar
+net.ar
+org.ar
+tur.ar
+
+// arpa : http://en.wikipedia.org/wiki/.arpa
+// Confirmed by registry <ia...@icann.org> 2008-06-18
+arpa
+e164.arpa
+in-addr.arpa
+ip6.arpa
+iris.arpa
+uri.arpa
+urn.arpa
+
+// as : http://en.wikipedia.org/wiki/.as
+as
+gov.as
+
+// asia : http://en.wikipedia.org/wiki/.asia
+asia
+
+// at : http://en.wikipedia.org/wiki/.at
+// Confirmed by registry <it...@nic.at> 2008-06-17
+at
+ac.at
+co.at
+gv.at
+or.at
+
+// au : http://en.wikipedia.org/wiki/.au
+// http://www.auda.org.au/
+au
+// 2LDs
+com.au
+net.au
+org.au
+edu.au
+gov.au
+asn.au
+id.au
+// Historic 2LDs (closed to new registration, but sites still exist)
+info.au
+conf.au
+oz.au
+// CGDNs - http://www.cgdn.org.au/
+act.au
+nsw.au
+nt.au
+qld.au
+sa.au
+tas.au
+vic.au
+wa.au
+// 3LDs
+act.edu.au
+nsw.edu.au
+nt.edu.au
+qld.edu.au
+sa.edu.au
+tas.edu.au
+vic.edu.au
+wa.edu.au
+// act.gov.au  Bug 984824 - Removed at request of Greg Tankard
+// nsw.gov.au  Bug 547985 - Removed at request of <Sh...@services.nsw.gov.au>
+// nt.gov.au  Bug 940478 - Removed at request of Greg Connors <Gr...@nt.gov.au>
+qld.gov.au
+sa.gov.au
+tas.gov.au
+vic.gov.au
+wa.gov.au
+
+// aw : http://en.wikipedia.org/wiki/.aw
+aw
+com.aw
+
+// ax : http://en.wikipedia.org/wiki/.ax
+ax
+
+// az : http://en.wikipedia.org/wiki/.az
+az
+com.az
+net.az
+int.az
+gov.az
+org.az
+edu.az
+info.az
+pp.az
+mil.az
+name.az
+pro.az
+biz.az
+
+// ba : http://en.wikipedia.org/wiki/.ba
+ba
+org.ba
+net.ba
+edu.ba
+gov.ba
+mil.ba
+unsa.ba
+unbi.ba
+co.ba
+com.ba
+rs.ba
+
+// bb : http://en.wikipedia.org/wiki/.bb
+bb
+biz.bb
+co.bb
+com.bb
+edu.bb
+gov.bb
+info.bb
+net.bb
+org.bb
+store.bb
+tv.bb
+
+// bd : http://en.wikipedia.org/wiki/.bd
+*.bd
+
+// be : http://en.wikipedia.org/wiki/.be
+// Confirmed by registry <te...@dns.be> 2008-06-08
+be
+ac.be
+
+// bf : http://en.wikipedia.org/wiki/.bf
+bf
+gov.bf
+
+// bg : http://en.wikipedia.org/wiki/.bg
+// https://www.register.bg/user/static/rules/en/index.html
+bg
+a.bg
+b.bg
+c.bg
+d.bg
+e.bg
+f.bg
+g.bg
+h.bg
+i.bg
+j.bg
+k.bg
+l.bg
+m.bg
+n.bg
+o.bg
+p.bg
+q.bg
+r.bg
+s.bg
+t.bg
+u.bg
+v.bg
+w.bg
+x.bg
+y.bg
+z.bg
+0.bg
+1.bg
+2.bg
+3.bg
+4.bg
+5.bg
+6.bg
+7.bg
+8.bg
+9.bg
+
+// bh : http://en.wikipedia.org/wiki/.bh
+bh
+com.bh
+edu.bh
+net.bh
+org.bh
+gov.bh
+
+// bi : http://en.wikipedia.org/wiki/.bi
+// http://whois.nic.bi/
+bi
+co.bi
+com.bi
+edu.bi
+or.bi
+org.bi
+
+// biz : http://en.wikipedia.org/wiki/.biz
+biz
+
+// bj : http://en.wikipedia.org/wiki/.bj
+bj
+asso.bj
+barreau.bj
+gouv.bj
+
+// bm : http://www.bermudanic.bm/dnr-text.txt
+bm
+com.bm
+edu.bm
+gov.bm
+net.bm
+org.bm
+
+// bn : http://en.wikipedia.org/wiki/.bn
+*.bn
+
+// bo : http://www.nic.bo/
+bo
+com.bo
+edu.bo
+gov.bo
+gob.bo
+int.bo
+org.bo
+net.bo
+mil.bo
+tv.bo
+
+// br : http://registro.br/dominio/categoria.html
+// Submitted by registry <fn...@registro.br> 2014-08-11
+br
+adm.br
+adv.br
+agr.br
+am.br
+arq.br
+art.br
+ato.br
+b.br
+bio.br
+blog.br
+bmd.br
+cim.br
+cng.br
+cnt.br
+com.br
+coop.br
+ecn.br
+eco.br
+edu.br
+emp.br
+eng.br
+esp.br
+etc.br
+eti.br
+far.br
+flog.br
+fm.br
+fnd.br
+fot.br
+fst.br
+g12.br
+ggf.br
+gov.br
+imb.br
+ind.br
+inf.br
+jor.br
+jus.br
+leg.br
+lel.br
+mat.br
+med.br
+mil.br
+mp.br
+mus.br
+net.br
+*.nom.br
+not.br
+ntr.br
+odo.br
+org.br
+ppg.br
+pro.br
+psc.br
+psi.br
+qsl.br
+radio.br
+rec.br
+slg.br
+srv.br
+taxi.br
+teo.br
+tmp.br
+trd.br
+tur.br
+tv.br
+vet.br
+vlog.br
+wiki.br
+zlg.br
+
+// bs : http://www.nic.bs/rules.html
+bs
+com.bs
+net.bs
+org.bs
+edu.bs
+gov.bs
+
+// bt : http://en.wikipedia.org/wiki/.bt
+bt
+com.bt
+edu.bt
+gov.bt
+net.bt
+org.bt
+
+// bv : No registrations at this time.
+// Submitted by registry <ja...@uninett.no> 2006-06-16
+bv
+
+// bw : http://en.wikipedia.org/wiki/.bw
+// http://www.gobin.info/domainname/bw.doc
+// list of other 2nd level tlds ?
+bw
+co.bw
+org.bw
+
+// by : http://en.wikipedia.org/wiki/.by
+// http://tld.by/rules_2006_en.html
+// list of other 2nd level tlds ?
+by
+gov.by
+mil.by
+// Official information does not indicate that com.by is a reserved
+// second-level domain, but it's being used as one (see www.google.com.by and
+// www.yahoo.com.by, for example), so we list it here for safety's sake.
+com.by
+
+// http://hoster.by/
+of.by
+
+// bz : http://en.wikipedia.org/wiki/.bz
+// http://www.belizenic.bz/
+bz
+com.bz
+net.bz
+org.bz
+edu.bz
+gov.bz
+
+// ca : http://en.wikipedia.org/wiki/.ca
+ca
+// ca geographical names
+ab.ca
+bc.ca
+mb.ca
+nb.ca
+nf.ca
+nl.ca
+ns.ca
+nt.ca
+nu.ca
+on.ca
+pe.ca
+qc.ca
+sk.ca
+yk.ca
+// gc.ca: http://en.wikipedia.org/wiki/.gc.ca
+// see also: http://registry.gc.ca/en/SubdomainFAQ
+gc.ca
+
+// cat : http://en.wikipedia.org/wiki/.cat
+cat
+
+// cc : http://en.wikipedia.org/wiki/.cc
+cc
+
+// cd : http://en.wikipedia.org/wiki/.cd
+// see also: https://www.nic.cd/domain/insertDomain_2.jsp?act=1
+cd
+gov.cd
+
+// cf : http://en.wikipedia.org/wiki/.cf
+cf
+
+// cg : http://en.wikipedia.org/wiki/.cg
+cg
+
+// ch : http://en.wikipedia.org/wiki/.ch
+ch
+
+// ci : http://en.wikipedia.org/wiki/.ci
+// http://www.nic.ci/index.php?page=charte
+ci
+org.ci
+or.ci
+com.ci
+co.ci
+edu.ci
+ed.ci
+ac.ci
+net.ci
+go.ci
+asso.ci
+aéroport.ci
+int.ci
+presse.ci
+md.ci
+gouv.ci
+
+// ck : http://en.wikipedia.org/wiki/.ck
+*.ck
+!www.ck
+
+// cl : http://en.wikipedia.org/wiki/.cl
+cl
+gov.cl
+gob.cl
+co.cl
+mil.cl
+
+// cm : http://en.wikipedia.org/wiki/.cm plus bug 981927
+cm
+co.cm
+com.cm
+gov.cm
+net.cm
+
+// cn : http://en.wikipedia.org/wiki/.cn
+// Submitted by registry <ta...@cnnic.cn> 2008-06-11
+cn
+ac.cn
+com.cn
+edu.cn
+gov.cn
+net.cn
+org.cn
+mil.cn
+公司.cn
+网络.cn
+網絡.cn
+// cn geographic names
+ah.cn
+bj.cn
+cq.cn
+fj.cn
+gd.cn
+gs.cn
+gz.cn
+gx.cn
+ha.cn
+hb.cn
+he.cn
+hi.cn
+hl.cn
+hn.cn
+jl.cn
+js.cn
+jx.cn
+ln.cn
+nm.cn
+nx.cn
+qh.cn
+sc.cn
+sd.cn
+sh.cn
+sn.cn
+sx.cn
+tj.cn
+xj.cn
+xz.cn
+yn.cn
+zj.cn
+hk.cn
+mo.cn
+tw.cn
+
+// co : http://en.wikipedia.org/wiki/.co
+// Submitted by registry <te...@uniandes.edu.co> 2008-06-11
+co
+arts.co
+com.co
+edu.co
+firm.co
+gov.co
+info.co
+int.co
+mil.co
+net.co
+nom.co
+org.co
+rec.co
+web.co
+
+// com : http://en.wikipedia.org/wiki/.com
+com
+
+// coop : http://en.wikipedia.org/wiki/.coop
+coop
+
+// cr : http://www.nic.cr/niccr_publico/showRegistroDominiosScreen.do
+cr
+ac.cr
+co.cr
+ed.cr
+fi.cr
+go.cr
+or.cr
+sa.cr
+
+// cu : http://en.wikipedia.org/wiki/.cu
+cu
+com.cu
+edu.cu
+org.cu
+net.cu
+gov.cu
+inf.cu
+
+// cv : http://en.wikipedia.org/wiki/.cv
+cv
+
+// cw : http://www.una.cw/cw_registry/
+// Confirmed by registry <re...@una.net> 2013-03-26
+cw
+com.cw
+edu.cw
+net.cw
+org.cw
+
+// cx : http://en.wikipedia.org/wiki/.cx
+// list of other 2nd level tlds ?
+cx
+gov.cx
+
+// cy : http://en.wikipedia.org/wiki/.cy
+*.cy
+
+// cz : http://en.wikipedia.org/wiki/.cz
+cz
+
+// de : http://en.wikipedia.org/wiki/.de
+// Confirmed by registry <op...@denic.de> (with technical
+// reservations) 2008-07-01
+de
+
+// dj : http://en.wikipedia.org/wiki/.dj
+dj
+
+// dk : http://en.wikipedia.org/wiki/.dk
+// Confirmed by registry <ro...@dk-hostmaster.dk> 2008-06-17
+dk
+
+// dm : http://en.wikipedia.org/wiki/.dm
+dm
+com.dm
+net.dm
+org.dm
+edu.dm
+gov.dm
+
+// do : http://en.wikipedia.org/wiki/.do
+do
+art.do
+com.do
+edu.do
+gob.do
+gov.do
+mil.do
+net.do
+org.do
+sld.do
+web.do
+
+// dz : http://en.wikipedia.org/wiki/.dz
+dz
+com.dz
+org.dz
+net.dz
+gov.dz
+edu.dz
+asso.dz
+pol.dz
+art.dz
+
+// ec : http://www.nic.ec/reg/paso1.asp
+// Submitted by registry <va...@nic.ec> 2008-07-04
+ec
+com.ec
+info.ec
+net.ec
+fin.ec
+k12.ec
+med.ec
+pro.ec
+org.ec
+edu.ec
+gov.ec
+gob.ec
+mil.ec
+
+// edu : http://en.wikipedia.org/wiki/.edu
+edu
+
+// ee : http://www.eenet.ee/EENet/dom_reeglid.html#lisa_B
+ee
+edu.ee
+gov.ee
+riik.ee
+lib.ee
+med.ee
+com.ee
+pri.ee
+aip.ee
+org.ee
+fie.ee
+
+// eg : http://en.wikipedia.org/wiki/.eg
+eg
+com.eg
+edu.eg
+eun.eg
+gov.eg
+mil.eg
+name.eg
+net.eg
+org.eg
+sci.eg
+
+// er : http://en.wikipedia.org/wiki/.er
+*.er
+
+// es : https://www.nic.es/site_ingles/ingles/dominios/index.html
+es
+com.es
+nom.es
+org.es
+gob.es
+edu.es
+
+// et : http://en.wikipedia.org/wiki/.et
+et
+com.et
+gov.et
+org.et
+edu.et
+biz.et
+name.et
+info.et
+
+// eu : http://en.wikipedia.org/wiki/.eu
+eu
+
+// fi : http://en.wikipedia.org/wiki/.fi
+fi
+// aland.fi : http://en.wikipedia.org/wiki/.ax
+// This domain is being phased out in favor of .ax. As there are still many
+// domains under aland.fi, we still keep it on the list until aland.fi is
+// completely removed.
+// TODO: Check for updates (expected to be phased out around Q1/2009)
+aland.fi
+
+// fj : http://en.wikipedia.org/wiki/.fj
+*.fj
+
+// fk : http://en.wikipedia.org/wiki/.fk
+*.fk
+
+// fm : http://en.wikipedia.org/wiki/.fm
+fm
+
+// fo : http://en.wikipedia.org/wiki/.fo
+fo
+
+// fr : http://www.afnic.fr/
+// domaines descriptifs : http://www.afnic.fr/obtenir/chartes/nommage-fr/annexe-descriptifs
+fr
+com.fr
+asso.fr
+nom.fr
+prd.fr
+presse.fr
+tm.fr
+// domaines sectoriels : http://www.afnic.fr/obtenir/chartes/nommage-fr/annexe-sectoriels
+aeroport.fr
+assedic.fr
+avocat.fr
+avoues.fr
+cci.fr
+chambagri.fr
+chirurgiens-dentistes.fr
+experts-comptables.fr
+geometre-expert.fr
+gouv.fr
+greta.fr
+huissier-justice.fr
+medecin.fr
+notaires.fr
+pharmacien.fr
+port.fr
+veterinaire.fr
+
+// ga : http://en.wikipedia.org/wiki/.ga
+ga
+
+// gb : This registry is effectively dormant
+// Submitted by registry <Da...@ja.net> 2008-06-12
+gb
+
+// gd : http://en.wikipedia.org/wiki/.gd
+gd
+
+// ge : http://www.nic.net.ge/policy_en.pdf
+ge
+com.ge
+edu.ge
+gov.ge
+org.ge
+mil.ge
+net.ge
+pvt.ge
+
+// gf : http://en.wikipedia.org/wiki/.gf
+gf
+
+// gg : http://www.channelisles.net/register-domains/
+// Confirmed by registry <ni...@channelisles.net> 2013-11-28
+gg
+co.gg
+net.gg
+org.gg
+
+// gh : http://en.wikipedia.org/wiki/.gh
+// see also: http://www.nic.gh/reg_now.php
+// Although domains directly at second level are not possible at the moment,
+// they have been possible for some time and may come back.
+gh
+com.gh
+edu.gh
+gov.gh
+org.gh
+mil.gh
+
+// gi : http://www.nic.gi/rules.html
+gi
+com.gi
+ltd.gi
+gov.gi
+mod.gi
+edu.gi
+org.gi
+
+// gl : http://en.wikipedia.org/wiki/.gl
+// http://nic.gl
+gl
+
+// gm : http://www.nic.gm/htmlpages%5Cgm-policy.htm
+gm
+
+// gn : http://psg.com/dns/gn/gn.txt
+// Submitted by registry <ra...@psg.com> 2008-06-17
+gn
+ac.gn
+com.gn
+edu.gn
+gov.gn
+org.gn
+net.gn
+
+// gov : http://en.wikipedia.org/wiki/.gov
+gov
+
+// gp : http://www.nic.gp/index.php?lang=en
+gp
+com.gp
+net.gp
+mobi.gp
+edu.gp
+org.gp
+asso.gp
+
+// gq : http://en.wikipedia.org/wiki/.gq
+gq
+
+// gr : https://grweb.ics.forth.gr/english/1617-B-2005.html
+// Submitted by registry <se...@ics.forth.gr> 2008-06-09
+gr
+com.gr
+edu.gr
+net.gr
+org.gr
+gov.gr
+
+// gs : http://en.wikipedia.org/wiki/.gs
+gs
+
+// gt : http://www.gt/politicas_de_registro.html
+gt
+com.gt
+edu.gt
+gob.gt
+ind.gt
+mil.gt
+net.gt
+org.gt
+
+// gu : http://gadao.gov.gu/registration.txt
+*.gu
+
+// gw : http://en.wikipedia.org/wiki/.gw
+gw
+
+// gy : http://en.wikipedia.org/wiki/.gy
+// http://registry.gy/
+gy
+co.gy
+com.gy
+net.gy
+
+// hk : https://www.hkdnr.hk
+// Submitted by registry <hk...@hkirc.hk> 2008-06-11
+hk
+com.hk
+edu.hk
+gov.hk
+idv.hk
+net.hk
+org.hk
+公司.hk
+教育.hk
+敎育.hk
+政府.hk
+個人.hk
+个人.hk
+箇人.hk
+網络.hk
+网络.hk
+组織.hk
+網絡.hk
+网絡.hk
+组织.hk
+組織.hk
+組织.hk
+
+// hm : http://en.wikipedia.org/wiki/.hm
+hm
+
+// hn : http://www.nic.hn/politicas/ps02,,05.html
+hn
+com.hn
+edu.hn
+org.hn
+net.hn
+mil.hn
+gob.hn
+
+// hr : http://www.dns.hr/documents/pdf/HRTLD-regulations.pdf
+hr
+iz.hr
+from.hr
+name.hr
+com.hr
+
+// ht : http://www.nic.ht/info/charte.cfm
+ht
+com.ht
+shop.ht
+firm.ht
+info.ht
+adult.ht
+net.ht
+pro.ht
+org.ht
+med.ht
+art.ht
+coop.ht
+pol.ht
+asso.ht
+edu.ht
+rel.ht
+gouv.ht
+perso.ht
+
+// hu : http://www.domain.hu/domain/English/sld.html
+// Confirmed by registry <pa...@iszt.hu> 2008-06-12
+hu
+co.hu
+info.hu
+org.hu
+priv.hu
+sport.hu
+tm.hu
+2000.hu
+agrar.hu
+bolt.hu
+casino.hu
+city.hu
+erotica.hu
+erotika.hu
+film.hu
+forum.hu
+games.hu
+hotel.hu
+ingatlan.hu
+jogasz.hu
+konyvelo.hu
+lakas.hu
+media.hu
+news.hu
+reklam.hu
+sex.hu
+shop.hu
+suli.hu
+szex.hu
+tozsde.hu
+utazas.hu
+video.hu
+
+// id : https://register.pandi.or.id/
+id
+ac.id
+biz.id
+co.id
+desa.id
+go.id
+mil.id
+my.id
+net.id
+or.id
+sch.id
+web.id
+
+// ie : http://en.wikipedia.org/wiki/.ie
+ie
+gov.ie
+
+// il : http://en.wikipedia.org/wiki/.il
+*.il
+
+// im : https://www.nic.im/
+// Submitted by registry <in...@nic.im> 2013-11-15
+im
+ac.im
+co.im
+com.im
+ltd.co.im
+net.im
+org.im
+plc.co.im
+tt.im
+tv.im
+
+// in : http://en.wikipedia.org/wiki/.in
+// see also: https://registry.in/Policies
+// Please note, that nic.in is not an offical eTLD, but used by most
+// government institutions.
+in
+co.in
+firm.in
+net.in
+org.in
+gen.in
+ind.in
+nic.in
+ac.in
+edu.in
+res.in
+gov.in
+mil.in
+
+// info : http://en.wikipedia.org/wiki/.info
+info
+
+// int : http://en.wikipedia.org/wiki/.int
+// Confirmed by registry <ia...@icann.org> 2008-06-18
+int
+eu.int
+
+// io : http://www.nic.io/rules.html
+// list of other 2nd level tlds ?
+io
+com.io
+
+// iq : http://www.cmc.iq/english/iq/iqregister1.htm
+iq
+gov.iq
+edu.iq
+mil.iq
+com.iq
+org.iq
+net.iq
+
+// ir : http://www.nic.ir/Terms_and_Conditions_ir,_Appendix_1_Domain_Rules
+// Also see http://www.nic.ir/Internationalized_Domain_Names
+// Two <iran>.ir entries added at request of <te...@nic.ir>, 2010-04-16
+ir
+ac.ir
+co.ir
+gov.ir
+id.ir
+net.ir
+org.ir
+sch.ir
+// xn--mgba3a4f16a.ir (<iran>.ir, Persian YEH)
+ایران.ir
+// xn--mgba3a4fra.ir (<iran>.ir, Arabic YEH)
+ايران.ir
+
+// is : http://www.isnic.is/domain/rules.php
+// Confirmed by registry <ma...@isgate.is> 2008-12-06
+is
+net.is
+com.is
+edu.is
+gov.is
+org.is
+int.is
+
+// it : http://en.wikipedia.org/wiki/.it
+it
+gov.it
+edu.it
+// Reserved geo-names:
+// http://www.nic.it/documenti/regolamenti-e-linee-guida/regolamento-assegnazione-versione-6.0.pdf
+// There is also a list of reserved geo-names corresponding to Italian municipalities
+// http://www.nic.it/documenti/appendice-c.pdf, but it is not included here.
+// Regions
+abr.it
+abruzzo.it
+aosta-valley.it
+aostavalley.it
+bas.it
+basilicata.it
+cal.it
+calabria.it
+cam.it
+campania.it
+emilia-romagna.it
+emiliaromagna.it
+emr.it
+friuli-v-giulia.it
+friuli-ve-giulia.it
+friuli-vegiulia.it
+friuli-venezia-giulia.it
+friuli-veneziagiulia.it
+friuli-vgiulia.it
+friuliv-giulia.it
+friulive-giulia.it
+friulivegiulia.it
+friulivenezia-giulia.it
+friuliveneziagiulia.it
+friulivgiulia.it
+fvg.it
+laz.it
+lazio.it
+lig.it
+liguria.it
+lom.it
+lombardia.it
+lombardy.it
+lucania.it
+mar.it
+marche.it
+mol.it
+molise.it
+piedmont.it
+piemonte.it
+pmn.it
+pug.it
+puglia.it
+sar.it
+sardegna.it
+sardinia.it
+sic.it
+sicilia.it
+sicily.it
+taa.it
+tos.it
+toscana.it
+trentino-a-adige.it
+trentino-aadige.it
+trentino-alto-adige.it
+trentino-altoadige.it
+trentino-s-tirol.it
+trentino-stirol.it
+trentino-sud-tirol.it
+trentino-sudtirol.it
+trentino-sued-tirol.it
+trentino-suedtirol.it
+trentinoa-adige.it
+trentinoaadige.it
+trentinoalto-adige.it
+trentinoaltoadige.it
+trentinos-tirol.it
+trentinostirol.it
+trentinosud-tirol.it
+trentinosudtirol.it
+trentinosued-tirol.it
+trentinosuedtirol.it
+tuscany.it
+umb.it
+umbria.it
+val-d-aosta.it
+val-daosta.it
+vald-aosta.it
+valdaosta.it
+valle-aosta.it
+valle-d-aosta.it
+valle-daosta.it
+valleaosta.it
+valled-aosta.it
+valledaosta.it
+vallee-aoste.it
+valleeaoste.it
+vao.it
+vda.it
+ven.it
+veneto.it
+// Provinces
+ag.it
+agrigento.it
+al.it
+alessandria.it
+alto-adige.it
+altoadige.it
+an.it
+ancona.it
+andria-barletta-trani.it
+andria-trani-barletta.it
+andriabarlettatrani.it
+andriatranibarletta.it
+ao.it
+aosta.it
+aoste.it
+ap.it
+aq.it
+aquila.it
+ar.it
+arezzo.it
+ascoli-piceno.it
+ascolipiceno.it
+asti.it
+at.it
+av.it
+avellino.it
+ba.it
+balsan.it
+bari.it
+barletta-trani-andria.it
+barlettatraniandria.it
+belluno.it
+benevento.it
+bergamo.it
+bg.it
+bi.it
+biella.it
+bl.it
+bn.it
+bo.it
+bologna.it
+bolzano.it
+bozen.it
+br.it
+brescia.it
+brindisi.it
+bs.it
+bt.it
+bz.it
+ca.it
+cagliari.it
+caltanissetta.it
+campidano-medio.it
+campidanomedio.it
+campobasso.it
+carbonia-iglesias.it
+carboniaiglesias.it
+carrara-massa.it
+carraramassa.it
+caserta.it
+catania.it
+catanzaro.it
+cb.it
+ce.it
+cesena-forli.it
+cesenaforli.it
+ch.it
+chieti.it
+ci.it
+cl.it
+cn.it
+co.it
+como.it
+cosenza.it
+cr.it
+cremona.it
+crotone.it
+cs.it
+ct.it
+cuneo.it
+cz.it
+dell-ogliastra.it
+dellogliastra.it
+en.it
+enna.it
+fc.it
+fe.it
+fermo.it
+ferrara.it
+fg.it
+fi.it
+firenze.it
+florence.it
+fm.it
+foggia.it
+forli-cesena.it
+forlicesena.it
+fr.it
+frosinone.it
+ge.it
+genoa.it
+genova.it
+go.it
+gorizia.it
+gr.it
+grosseto.it
+iglesias-carbonia.it
+iglesiascarbonia.it
+im.it
+imperia.it
+is.it
+isernia.it
+kr.it
+la-spezia.it
+laquila.it
+laspezia.it
+latina.it
+lc.it
+le.it
+lecce.it
+lecco.it
+li.it
+livorno.it
+lo.it
+lodi.it
+lt.it
+lu.it
+lucca.it
+macerata.it
+mantova.it
+massa-carrara.it
+massacarrara.it
+matera.it
+mb.it
+mc.it
+me.it
+medio-campidano.it
+mediocampidano.it
+messina.it
+mi.it
+milan.it
+milano.it
+mn.it
+mo.it
+modena.it
+monza-brianza.it
+monza-e-della-brianza.it
+monza.it
+monzabrianza.it
+monzaebrianza.it
+monzaedellabrianza.it
+ms.it
+mt.it
+na.it
+naples.it
+napoli.it
+no.it
+novara.it
+nu.it
+nuoro.it
+og.it
+ogliastra.it
+olbia-tempio.it
+olbiatempio.it
+or.it
+oristano.it
+ot.it
+pa.it
+padova.it
+padua.it
+palermo.it
+parma.it
+pavia.it
+pc.it
+pd.it
+pe.it
+perugia.it
+pesaro-urbino.it
+pesarourbino.it
+pescara.it
+pg.it
+pi.it
+piacenza.it
+pisa.it
+pistoia.it
+pn.it
+po.it
+pordenone.it
+potenza.it
+pr.it
+prato.it
+pt.it
+pu.it
+pv.it
+pz.it
+ra.it
+ragusa.it
+ravenna.it
+rc.it
+re.it
+reggio-calabria.it
+reggio-emilia.it
+reggiocalabria.it
+reggioemilia.it
+rg.it
+ri.it
+rieti.it
+rimini.it
+rm.it
+rn.it
+ro.it
+roma.it
+rome.it
+rovigo.it
+sa.it
+salerno.it
+sassari.it
+savona.it
+si.it
+siena.it
+siracusa.it
+so.it
+sondrio.it
+sp.it
+sr.it
+ss.it
+suedtirol.it
+sv.it
+ta.it
+taranto.it
+te.it
+tempio-olbia.it
+tempioolbia.it
+teramo.it
+terni.it
+tn.it
+to.it
+torino.it
+tp.it
+tr.it
+trani-andria-barletta.it
+trani-barletta-andria.it
+traniandriabarletta.it
+tranibarlettaandria.it
+trapani.it
+trentino.it
+trento.it
+treviso.it
+trieste.it
+ts.it
+turin.it
+tv.it
+ud.it
+udine.it
+urbino-pesaro.it
+urbinopesaro.it
+va.it
+varese.it
+vb.it
+vc.it
+ve.it
+venezia.it
+venice.it
+verbania.it
+vercelli.it
+verona.it
+vi.it
+vibo-valentia.it
+vibovalentia.it
+vicenza.it
+viterbo.it
+vr.it
+vs.it
+vt.it
+vv.it
+
+// je : http://www.channelisles.net/register-domains/
+// Confirmed by registry <ni...@channelisles.net> 2013-11-28
+je
+co.je
+net.je
+org.je
+
+// jm : http://www.com.jm/register.html
+*.jm
+
+// jo : http://www.dns.jo/Registration_policy.aspx
+jo
+com.jo
+org.jo
+net.jo
+edu.jo
+sch.jo
+gov.jo
+mil.jo
+name.jo
+
+// jobs : http://en.wikipedia.org/wiki/.jobs
+jobs
+
+// jp : http://en.wikipedia.org/wiki/.jp
+// http://jprs.co.jp/en/jpdomain.html
+// Submitted by registry <in...@jprs.jp> 2014-10-30
+jp
+// jp organizational type names
+ac.jp
+ad.jp
+co.jp
+ed.jp
+go.jp
+gr.jp
+lg.jp
+ne.jp
+or.jp
+// jp prefecture type names
+aichi.jp
+akita.jp
+aomori.jp
+chiba.jp
+ehime.jp
+fukui.jp
+fukuoka.jp
+fukushima.jp
+gifu.jp
+gunma.jp
+hiroshima.jp
+hokkaido.jp
+hyogo.jp
+ibaraki.jp
+ishikawa.jp
+iwate.jp
+kagawa.jp
+kagoshima.jp
+kanagawa.jp
+kochi.jp
+kumamoto.jp
+kyoto.jp
+mie.jp
+miyagi.jp
+miyazaki.jp
+nagano.jp
+nagasaki.jp
+nara.jp
+niigata.jp
+oita.jp
+okayama.jp
+okinawa.jp
+osaka.jp
+saga.jp
+saitama.jp
+shiga.jp
+shimane.jp
+shizuoka.jp
+tochigi.jp
+tokushima.jp
+tokyo.jp
+tottori.jp
+toyama.jp
+wakayama.jp
+yamagata.jp
+yamaguchi.jp
+yamanashi.jp
+栃木.jp
+愛知.jp
+愛媛.jp
+兵庫.jp
+熊本.jp
+茨城.jp
+北海道.jp
+千葉.jp
+和歌山.jp
+長崎.jp
+長野.jp
+新潟.jp
+青森.jp
+静岡.jp
+東京.jp
+石川.jp
+埼玉.jp
+三重.jp
+京都.jp
+佐賀.jp
+大分.jp
+大阪.jp
+奈良.jp
+宮城.jp
+宮崎.jp
+富山.jp
+山口.jp
+山形.jp
+山梨.jp
+岩手.jp
+岐阜.jp
+岡山.jp
+島根.jp
+広島.jp
+徳島.jp
+沖縄.jp
+滋賀.jp
+神奈川.jp
+福井.jp
+福岡.jp
+福島.jp
+秋田.jp
+群馬.jp
+香川.jp
+高知.jp
+鳥取.jp
+鹿児島.jp
+// jp geographic type names
+// http://jprs.jp/doc/rule/saisoku-1.html
+*.kawasaki.jp
+*.kitakyushu.jp
+*.kobe.jp
+*.nagoya.jp
+*.sapporo.jp
+*.sendai.jp
+*.yokohama.jp
+!city.kawasaki.jp
+!city.kitakyushu.jp
+!city.kobe.jp
+!city.nagoya.jp
+!city.sapporo.jp
+!city.sendai.jp
+!city.yokohama.jp
+// 4th level registration
+aisai.aichi.jp
+ama.aichi.jp
+anjo.aichi.jp
+asuke.aichi.jp
+chiryu.aichi.jp
+chita.aichi.jp
+fuso.aichi.jp
+gamagori.aichi.jp
+handa.aichi.jp
+hazu.aichi.jp
+hekinan.aichi.jp
+higashiura.aichi.jp
+ichinomiya.aichi.jp
+inazawa.aichi.jp
+inuyama.aichi.jp
+isshiki.aichi.jp
+iwakura.aichi.jp
+kanie.aichi.jp
+kariya.aichi.jp
+kasugai.aichi.jp
+kira.aichi.jp
+kiyosu.aichi.jp
+komaki.aichi.jp
+konan.aichi.jp
+kota.aichi.jp
+mihama.aichi.jp
+miyoshi.aichi.jp
+nishio.aichi.jp
+nisshin.aichi.jp
+obu.aichi.jp
+oguchi.aichi.jp
+oharu.aichi.jp
+okazaki.aichi.jp
+owariasahi.aichi.jp
+seto.aichi.jp
+shikatsu.aichi.jp
+shinshiro.aichi.jp
+shitara.aichi.jp
+tahara.aichi.jp
+takahama.aichi.jp
+tobishima.aichi.jp
+toei.aichi.jp
+togo.aichi.jp
+tokai.aichi.jp
+tokoname.aichi.jp
+toyoake.aichi.jp
+toyohashi.aichi.jp
+toyokawa.aichi.jp
+toyone.aichi.jp
+toyota.aichi.jp
+tsushima.aichi.jp
+yatomi.aichi.jp
+akita.akita.jp
+daisen.akita.jp
+fujisato.akita.jp
+gojome.akita.jp
+hachirogata.akita.jp
+happou.akita.jp
+higashinaruse.akita.jp
+honjo.akita.jp
+honjyo.akita.jp
+ikawa.akita.jp
+kamikoani.akita.jp
+kamioka.akita.jp
+katagami.akita.jp
+kazuno.akita.jp
+kitaakita.akita.jp
+kosaka.akita.jp
+kyowa.akita.jp
+misato.akita.jp
+mitane.akita.jp
+moriyoshi.akita.jp
+nikaho.akita.jp
+noshiro.akita.jp
+odate.akita.jp
+oga.akita.jp
+ogata.akita.jp
+semboku.akita.jp
+yokote.akita.jp
+yurihonjo.akita.jp
+aomori.aomori.jp
+gonohe.aomori.jp
+hachinohe.aomori.jp
+hashikami.aomori.jp
+hiranai.aomori.jp
+hirosaki.aomori.jp
+itayanagi.aomori.jp
+kuroishi.aomori.jp
+misawa.aomori.jp
+mutsu.aomori.jp
+nakadomari.aomori.jp
+noheji.aomori.jp
+oirase.aomori.jp
+owani.aomori.jp
+rokunohe.aomori.jp
+sannohe.aomori.jp
+shichinohe.aomori.jp
+shingo.aomori.jp
+takko.aomori.jp
+towada.aomori.jp
+tsugaru.aomori.jp
+tsuruta.aomori.jp
+abiko.chiba.jp
+asahi.chiba.jp
+chonan.chiba.jp
+chosei.chiba.jp
+choshi.chiba.jp
+chuo.chiba.jp
+funabashi.chiba.jp
+futtsu.chiba.jp
+hanamigawa.chiba.jp
+ichihara.chiba.jp
+ichikawa.chiba.jp
+ichinomiya.chiba.jp
+inzai.chiba.jp
+isumi.chiba.jp
+kamagaya.chiba.jp
+kamogawa.chiba.jp
+kashiwa.chiba.jp
+katori.chiba.jp
+katsuura.chiba.jp
+kimitsu.chiba.jp
+kisarazu.chiba.jp
+kozaki.chiba.jp
+kujukuri.chiba.jp
+kyonan.chiba.jp
+matsudo.chiba.jp
+midori.chiba.jp
+mihama.chiba.jp
+minamiboso.chiba.jp
+mobara.chiba.jp
+mutsuzawa.chiba.jp
+nagara.chiba.jp
+nagareyama.chiba.jp
+narashino.chiba.jp
+narita.chiba.jp
+noda.chiba.jp
+oamishirasato.chiba.jp
+omigawa.chiba.jp
+onjuku.chiba.jp
+otaki.chiba.jp
+sakae.chiba.jp
+sakura.chiba.jp
+shimofusa.chiba.jp
+shirako.chiba.jp
+shiroi.chiba.jp
+shisui.chiba.jp
+sodegaura.chiba.jp
+sosa.chiba.jp
+tako.chiba.jp
+tateyama.chiba.jp
+togane.chiba.jp
+tohnosho.chiba.jp
+tomisato.chiba.jp
+urayasu.chiba.jp
+yachimata.chiba.jp
+yachiyo.chiba.jp
+yokaichiba.chiba.jp
+yokoshibahikari.chiba.jp
+yotsukaido.chiba.jp
+ainan.ehime.jp
+honai.ehime.jp
+ikata.ehime.jp
+imabari.ehime.jp
+iyo.ehime.jp
+kamijima.ehime.jp
+kihoku.ehime.jp
+kumakogen.ehime.jp
+masaki.ehime.jp
+matsuno.ehime.jp
+matsuyama.ehime.jp
+namikata.ehime.jp
+niihama.ehime.jp
+ozu.ehime.jp
+saijo.ehime.jp
+seiyo.ehime.jp
+shikokuchuo.ehime.jp
+tobe.ehime.jp
+toon.ehime.jp
+uchiko.ehime.jp
+uwajima.ehime.jp
+yawatahama.ehime.jp
+echizen.fukui.jp
+eiheiji.fukui.jp
+fukui.fukui.jp
+ikeda.fukui.jp
+katsuyama.fukui.jp
+mihama.fukui.jp
+minamiechizen.fukui.jp
+obama.fukui.jp
+ohi.fukui.jp
+ono.fukui.jp
+sabae.fukui.jp
+sakai.fukui.jp
+takahama.fukui.jp
+tsuruga.fukui.jp
+wakasa.fukui.jp
+ashiya.fukuoka.jp
+buzen.fukuoka.jp
+chikugo.fukuoka.jp
+chikuho.fukuoka.jp
+chikujo.fukuoka.jp
+chikushino.fukuoka.jp
+chikuzen.fukuoka.jp
+chuo.fukuoka.jp
+dazaifu.fukuoka.jp
+fukuchi.fukuoka.jp
+hakata.fukuoka.jp
+higashi.fukuoka.jp
+hirokawa.fukuoka.jp
+hisayama.fukuoka.jp
+iizuka.fukuoka.jp
+inatsuki.fukuoka.jp
+kaho.fukuoka.jp
+kasuga.fukuoka.jp
+kasuya.fukuoka.jp
+kawara.fukuoka.jp
+keisen.fukuoka.jp
+koga.fukuoka.jp
+kurate.fukuoka.jp
+kurogi.fukuoka.jp
+kurume.fukuoka.jp
+minami.fukuoka.jp
+miyako.fukuoka.jp
+miyama.fukuoka.jp
+miyawaka.fukuoka.jp
+mizumaki.fukuoka.jp
+munakata.fukuoka.jp
+nakagawa.fukuoka.jp
+nakama.fukuoka.jp
+nishi.fukuoka.jp
+nogata.fukuoka.jp
+ogori.fukuoka.jp
+okagaki.fukuoka.jp
+okawa.fukuoka.jp
+oki.fukuoka.jp
+omuta.fukuoka.jp
+onga.fukuoka.jp
+onojo.fukuoka.jp
+oto.fukuoka.jp
+saigawa.fukuoka.jp
+sasaguri.fukuoka.jp
+shingu.fukuoka.jp
+shinyoshitomi.fukuoka.jp
+shonai.fukuoka.jp
+soeda.fukuoka.jp
+sue.fukuoka.jp
+tachiarai.fukuoka.jp
+tagawa.fukuoka.jp
+takata.fukuoka.jp
+toho.fukuoka.jp
+toyotsu.fukuoka.jp
+tsuiki.fukuoka.jp
+ukiha.fukuoka.jp
+umi.fukuoka.jp
+usui.fukuoka.jp
+yamada.fukuoka.jp
+yame.fukuoka.jp
+yanagawa.fukuoka.jp
+yukuhashi.fukuoka.jp
+aizubange.fukushima.jp
+aizumisato.fukushima.jp
+aizuwakamatsu.fukushima.jp
+asakawa.fukushima.jp
+bandai.fukushima.jp
+date.fukushima.jp
+fukushima.fukushima.jp
+furudono.fukushima.jp
+futaba.fukushima.jp
+hanawa.fukushima.jp
+higashi.fukushima.jp
+hirata.fukushima.jp
+hirono.fukushima.jp
+iitate.fukushima.jp
+inawashiro.fukushima.jp
+ishikawa.fukushima.jp
+iwaki.fukushima.jp
+izumizaki.fukushima.jp
+kagamiishi.fukushima.jp
+kaneyama.fukushima.jp
+kawamata.fukushima.jp
+kitakata.fukushima.jp
+kitashiobara.fukushima.jp
+koori.fukushima.jp
+koriyama.fukushima.jp
+kunimi.fukushima.jp
+miharu.fukushima.jp
+mishima.fukushima.jp
+namie.fukushima.jp
+nango.fukushima.jp
+nishiaizu.fukushima.jp
+nishigo.fukushima.jp
+okuma.fukushima.jp
+omotego.fukushima.jp
+ono.fukushima.jp
+otama.fukushima.jp
+samegawa.fukushima.jp
+shimogo.fukushima.jp
+shirakawa.fukushima.jp
+showa.fukushima.jp
+soma.fukushima.jp
+sukagawa.fukushima.jp
+taishin.fukushima.jp
+tamakawa.fukushima.jp
+tanagura.fukushima.jp
+tenei.fukushima.jp
+yabuki.fukushima.jp
+yamato.fukushima.jp
+yamatsuri.fukushima.jp
+yanaizu.fukushima.jp
+yugawa.fukushima.jp
+anpachi.gifu.jp
+ena.gifu.jp
+gifu.gifu.jp
+ginan.gifu.jp
+godo.gifu.jp
+gujo.gifu.jp
+hashima.gifu.jp
+hichiso.gifu.jp
+hida.gifu.jp
+higashishirakawa.gifu.jp
+ibigawa.gifu.jp
+ikeda.gifu.jp
+kakamigahara.gifu.jp
+kani.gifu.jp
+kasahara.gifu.jp
+kasamatsu.gifu.jp
+kawaue.gifu.jp
+kitagata.gifu.jp
+mino.gifu.jp
+minokamo.gifu.jp
+mitake.gifu.jp
+mizunami.gifu.jp
+motosu.gifu.jp
+nakatsugawa.gifu.jp
+ogaki.gifu.jp
+sakahogi.gifu.jp
+seki.gifu.jp
+sekigahara.gifu.jp
+shirakawa.gifu.jp
+tajimi.gifu.jp
+takayama.gifu.jp
+tarui.gifu.jp
+toki.gifu.jp
+tomika.gifu.jp
+wanouchi.gifu.jp
+yamagata.gifu.jp
+yaotsu.gifu.jp
+yoro.gifu.jp
+annaka.gunma.jp
+chiyoda.gunma.jp
+fujioka.gunma.jp
+higashiagatsuma.gunma.jp
+isesaki.gunma.jp
+itakura.gunma.jp
+kanna.gunma.jp
+kanra.gunma.jp
+katashina.gunma.jp
+kawaba.gunma.jp
+kiryu.gunma.jp
+kusatsu.gunma.jp
+maebashi.gunma.jp
+meiwa.gunma.jp
+midori.gunma.jp
+minakami.gunma.jp
+naganohara.gunma.jp
+nakanojo.gunma.jp
+nanmoku.gunma.jp
+numata.gunma.jp
+oizumi.gunma.jp
+ora.gunma.jp
+ota.gunma.jp
+shibukawa.gunma.jp
+shimonita.gunma.jp
+shinto.gunma.jp
+showa.gunma.jp
+takasaki.gunma.jp
+takayama.gunma.jp
+tamamura.gunma.jp
+tatebayashi.gunma.jp
+tomioka.gunma.jp
+tsukiyono.gunma.jp
+tsumagoi.gunma.jp
+ueno.gunma.jp
+yoshioka.gunma.jp
+asaminami.hiroshima.jp
+daiwa.hiroshima.jp
+etajima.hiroshima.jp
+fuchu.hiroshima.jp
+fukuyama.hiroshima.jp
+hatsukaichi.hiroshima.jp
+higashihiroshima.hiroshima.jp
+hongo.hiroshima.jp
+jinsekikogen.hiroshima.jp
+kaita.hiroshima.jp
+kui.hiroshima.jp
+kumano.hiroshima.jp
+kure.hiroshima.jp
+mihara.hiroshima.jp
+miyoshi.hiroshima.jp
+naka.hiroshima.jp
+onomichi.hiroshima.jp
+osakikamijima.hiroshima.jp
+otake.hiroshima.jp
+saka.hiroshima.jp
+sera.hiroshima.jp
+seranishi.hiroshima.jp
+shinichi.hiroshima.jp
+shobara.hiroshima.jp
+takehara.hiroshima.jp
+abashiri.hokkaido.jp
+abira.hokkaido.jp
+aibetsu.hokkaido.jp
+akabira.hokkaido.jp
+akkeshi.hokkaido.jp
+asahikawa.hokkaido.jp
+ashibetsu.hokkaido.jp
+ashoro.hokkaido.jp
+assabu.hokkaido.jp
+atsuma.hokkaido.jp
+bibai.hokkaido.jp
+biei.hokkaido.jp
+bifuka.hokkaido.jp
+bihoro.hokkaido.jp
+biratori.hokkaido.jp
+chippubetsu.hokkaido.jp
+chitose.hokkaido.jp
+date.hokkaido.jp
+ebetsu.hokkaido.jp
+embetsu.hokkaido.jp
+eniwa.hokkaido.jp
+erimo.hokkaido.jp
+esan.hokkaido.jp
+esashi.hokkaido.jp
+fukagawa.hokkaido.jp
+fukushima.hokkaido.jp
+furano.hokkaido.jp
+furubira.hokkaido.jp
+haboro.hokkaido.jp
+hakodate.hokkaido.jp
+hamatonbetsu.hokkaido.jp
+hidaka.hokkaido.jp
+higashikagura.hokkaido.jp
+higashikawa.hokkaido.jp
+hiroo.hokkaido.jp
+hokuryu.hokkaido.jp
+hokuto.hokkaido.jp
+honbetsu.hokkaido.jp
+horokanai.hokkaido.jp
+horonobe.hokkaido.jp
+ikeda.hokkaido.jp
+imakane.hokkaido.jp
+ishikari.hokkaido.jp
+iwamizawa.hokkaido.jp
+iwanai.hokkaido.jp
+kamifurano.hokkaido.jp
+kamikawa.hokkaido.jp
+kamishihoro.hokkaido.jp
+kamisunagawa.hokkaido.jp
+kamoenai.hokkaido.jp
+kayabe.hokkaido.jp
+kembuchi.hokkaido.jp
+kikonai.hokkaido.jp
+kimobetsu.hokkaido.jp
+kitahiroshima.hokkaido.jp
+kitami.hokkaido.jp
+kiyosato.hokkaido.jp
+koshimizu.hokkaido.jp
+kunneppu.hokkaido.jp
+kuriyama.hokkaido.jp
+kuromatsunai.hokkaido.jp
+kushiro.hokkaido.jp
+kutchan.hokkaido.jp
+kyowa.hokkaido.jp
+mashike.hokkaido.jp
+matsumae.hokkaido.jp
+mikasa.hokkaido.jp
+minamifurano.hokkaido.jp
+mombetsu.hokkaido.jp
+moseushi.hokkaido.jp
+mukawa.hokkaido.jp
+muroran.hokkaido.jp
+naie.hokkaido.jp
+nakagawa.hokkaido.jp
+nakasatsunai.hokkaido.jp
+nakatombetsu.hokkaido.jp
+nanae.hokkaido.jp
+nanporo.hokkaido.jp
+nayoro.hokkaido.jp
+nemuro.hokkaido.jp
+niikappu.hokkaido.jp
+niki.hokkaido.jp
+nishiokoppe.hokkaido.jp
+noboribetsu.hokkaido.jp
+numata.hokkaido.jp
+obihiro.hokkaido.jp
+obira.hokkaido.jp
+oketo.hokkaido.jp
+okoppe.hokkaido.jp
+otaru.hokkaido.jp
+otobe.hokkaido.jp
+otofuke.hokkaido.jp
+otoineppu.hokkaido.jp
+oumu.hokkaido.jp
+ozora.hokkaido.jp
+pippu.hokkaido.jp
+rankoshi.hokkaido.jp
+rebun.hokkaido.jp
+rikubetsu.hokkaido.jp
+rishiri.hokkaido.jp
+rishirifuji.hokkaido.jp
+saroma.hokkaido.jp
+sarufutsu.hokkaido.jp
+shakotan.hokkaido.jp
+shari.hokkaido.jp
+shibecha.hokkaido.jp
+shibetsu.hokkaido.jp
+shikabe.hokkaido.jp
+shikaoi.hokkaido.jp
+shimamaki.hokkaido.jp
+shimizu.hokkaido.jp
+shimokawa.hokkaido.jp
+shinshinotsu.hokkaido.jp
+shintoku.hokkaido.jp
+shiranuka.hokkaido.jp
+shiraoi.hokkaido.jp
+shiriuchi.hokkaido.jp
+sobetsu.hokkaido.jp
+sunagawa.hokkaido.jp
+taiki.hokkaido.jp
+takasu.hokkaido.jp
+takikawa.hokkaido.jp
+takinoue.hokkaido.jp
+teshikaga.hokkaido.jp
+tobetsu.hokkaido.jp
+tohma.hokkaido.jp
+tomakomai.hokkaido.jp
+tomari.hokkaido.jp
+toya.hokkaido.jp
+toyako.hokkaido.jp
+toyotomi.hokkaido.jp
+toyoura.hokkaido.jp
+tsubetsu.hokkaido.jp
+tsukigata.hokkaido.jp
+urakawa.hokkaido.jp
+urausu.hokkaido.jp
+uryu.hokkaido.jp
+utashinai.hokkaido.jp
+wakkanai.hokkaido.jp
+wassamu.hokkaido.jp
+yakumo.hokkaido.jp
+yoichi.hokkaido.jp
+aioi.hyogo.jp
+akashi.hyogo.jp
+ako.hyogo.jp
+amagasaki.hyogo.jp
+aogaki.hyogo.jp
+asago.hyogo.jp
+ashiya.hyogo.jp
+awaji.hyogo.jp
+fukusaki.hyogo.jp
+goshiki.hyogo.jp
+harima.hyogo.jp
+himeji.hyogo.jp
+ichikawa.hyogo.jp
+inagawa.hyogo.jp
+itami.hyogo.jp
+kakogawa.hyogo.jp
+kamigori.hyogo.jp
+kamikawa.hyogo.jp
+kasai.hyogo.jp
+kasuga.hyogo.jp
+kawanishi.hyogo.jp
+miki.hyogo.jp
+minamiawaji.hyogo.jp
+nishinomiya.hyogo.jp
+nishiwaki.hyogo.jp
+ono.hyogo.jp
+sanda.hyogo.jp
+sannan.hyogo.jp
+sasayama.hyogo.jp
+sayo.hyogo.jp
+shingu.hyogo.jp
+shinonsen.hyogo.jp
+shiso.hyogo.jp
+sumoto.hyogo.jp
+taishi.hyogo.jp
+taka.hyogo.jp
+takarazuka.hyogo.jp
+takasago.hyogo.jp
+takino.hyogo.jp
+tamba.hyogo.jp
+tatsuno.hyogo.jp
+toyooka.hyogo.jp
+yabu.hyogo.jp
+yashiro.hyogo.jp
+yoka.hyogo.jp
+yokawa.hyogo.jp
+ami.ibaraki.jp
+asahi.ibaraki.jp
+bando.ibaraki.jp
+chikusei.ibaraki.jp
+daigo.ibaraki.jp
+fujishiro.ibaraki.jp
+hitachi.ibaraki.jp
+hitachinaka.ibaraki.jp
+hitachiomiya.ibaraki.jp
+hitachiota.ibaraki.jp
+ibaraki.ibaraki.jp
+ina.ibaraki.jp
+inashiki.ibaraki.jp
+itako.ibaraki.jp
+iwama.ibaraki.jp
+joso.ibaraki.jp
+kamisu.ibaraki.jp
+kasama.ibaraki.jp
+kashima.ibaraki.jp
+kasumigaura.ibaraki.jp
+koga.ibaraki.jp
+miho.ibaraki.jp
+mito.ibaraki.jp
+moriya.ibaraki.jp
+naka.ibaraki.jp
+namegata.ibaraki.jp
+oarai.ibaraki.jp
+ogawa.ibaraki.jp
+omitama.ibaraki.jp
+ryugasaki.ibaraki.jp
+sakai.ibaraki.jp
+sakuragawa.ibaraki.jp
+shimodate.ibaraki.jp
+shimotsuma.ibaraki.jp
+shirosato.ibaraki.jp
+sowa.ibaraki.jp
+suifu.ibaraki.jp
+takahagi.ibaraki.jp
+tamatsukuri.ibaraki.jp
+tokai.ibaraki.jp
+tomobe.ibaraki.jp
+tone.ibaraki.jp
+toride.ibaraki.jp
+tsuchiura.ibaraki.jp
+tsukuba.ibaraki.jp
+uchihara.ibaraki.jp
+ushiku.ibaraki.jp
+yachiyo.ibaraki.jp
+yamagata.ibaraki.jp
+yawara.ibaraki.jp
+yuki.ibaraki.jp
+anamizu.ishikawa.jp
+hakui.ishikawa.jp
+hakusan.ishikawa.jp
+kaga.ishikawa.jp
+kahoku.ishikawa.jp
+kanazawa.ishikawa.jp
+kawakita.ishikawa.jp
+komatsu.ishikawa.jp
+nakanoto.ishikawa.jp
+nanao.ishikawa.jp
+nomi.ishikawa.jp
+nonoichi.ishikawa.jp
+noto.ishikawa.jp
+shika.ishikawa.jp
+suzu.ishikawa.jp
+tsubata.ishikawa.jp
+tsurugi.ishikawa.jp
+uchinada.ishikawa.jp
+wajima.ishikawa.jp
+fudai.iwate.jp
+fujisawa.iwate.jp
+hanamaki.iwate.jp
+hiraizumi.iwate.jp
+hirono.iwate.jp
+ichinohe.iwate.jp
+ichinoseki.iwate.jp
+iwaizumi.iwate.jp
+iwate.iwate.jp
+joboji.iwate.jp
+kamaishi.iwate.jp
+kanegasaki.iwate.jp
+karumai.iwate.jp
+kawai.iwate.jp
+kitakami.iwate.jp
+kuji.iwate.jp
+kunohe.iwate.jp
+kuzumaki.iwate.jp
+miyako.iwate.jp
+mizusawa.iwate.jp
+morioka.iwate.jp
+ninohe.iwate.jp
+noda.iwate.jp
+ofunato.iwate.jp
+oshu.iwate.jp
+otsuchi.iwate.jp
+rikuzentakata.iwate.jp
+shiwa.iwate.jp
+shizukuishi.iwate.jp
+sumita.iwate.jp
+tanohata.iwate.jp
+tono.iwate.jp
+yahaba.iwate.jp
+yamada.iwate.jp
+ayagawa.kagawa.jp
+higashikagawa.kagawa.jp
+kanonji.kagawa.jp
+kotohira.kagawa.jp
+manno.kagawa.jp
+marugame.kagawa.jp
+mitoyo.kagawa.jp
+naoshima.kagawa.jp
+sanuki.kagawa.jp
+tadotsu.kagawa.jp
+takamatsu.kagawa.jp
+tonosho.kagawa.jp
+uchinomi.kagawa.jp
+utazu.kagawa.jp
+zentsuji.kagawa.jp
+akune.kagoshima.jp
+amami.kagoshima.jp
+hioki.kagoshima.jp
+isa.kagoshima.jp
+isen.kagoshima.jp
+izumi.kagoshima.jp
+kagoshima.kagoshima.jp
+kanoya.kagoshima.jp
+kawanabe.kagoshima.jp
+kinko.kagoshima.jp
+kouyama.kagoshima.jp
+makurazaki.kagoshima.jp
+matsumoto.kagoshima.jp
+minamitane.kagoshima.jp
+nakatane.kagoshima.jp
+nishinoomote.kagoshima.jp
+satsumasendai.kagoshima.jp
+soo.kagoshima.jp
+tarumizu.kagoshima.jp
+yusui.kagoshima.jp
+aikawa.kanagawa.jp
+atsugi.kanagawa.jp
+ayase.kanagawa.jp
+chigasaki.kanagawa.jp
+ebina.kanagawa.jp
+fujisawa.kanagawa.jp
+hadano.kanagawa.jp
+hakone.kanagawa.jp
+hiratsuka.kanagawa.jp
+isehara.kanagawa.jp
+kaisei.kanagawa.jp
+kamakura.kanagawa.jp
+kiyokawa.kanagawa.jp
+matsuda.kanagawa.jp
+minamiashigara.kanagawa.jp
+miura.kanagawa.jp
+nakai.kanagawa.jp
+ninomiya.kanagawa.jp
+odawara.kanagawa.jp
+oi.kanagawa.jp
+oiso.kanagawa.jp
+sagamihara.kanagawa.jp
+samukawa.kanagawa.jp
+tsukui.kanagawa.jp
+yamakita.kanagawa.jp
+yamato.kanagawa.jp
+yokosuka.kanagawa.jp
+yugawara.kanagawa.jp
+zama.kanagawa.jp
+zushi.kanagawa.jp
+aki.kochi.jp
+geisei.kochi.jp
+hidaka.kochi.jp
+higashitsuno.kochi.jp
+ino.kochi.jp
+kagami.kochi.jp
+kami.kochi.jp
+kitagawa.kochi.jp
+kochi.kochi.jp
+mihara.kochi.jp
+motoyama.kochi.jp
+muroto.kochi.jp
+nahari.kochi.jp
+nakamura.kochi.jp
+nankoku.kochi.jp
+nishitosa.kochi.jp
+niyodogawa.kochi.jp
+ochi.kochi.jp
+okawa.kochi.jp
+otoyo.kochi.jp
+otsuki.kochi.jp
+sakawa.kochi.jp
+sukumo.kochi.jp
+susaki.kochi.jp
+tosa.kochi.jp
+tosashimizu.kochi.jp
+toyo.kochi.jp
+tsuno.kochi.jp
+umaji.kochi.jp
+yasuda.kochi.jp
+yusuhara.kochi.jp
+amakusa.kumamoto.jp
+arao.kumamoto.jp
+aso.kumamoto.jp
+choyo.kumamoto.jp
+gyokuto.kumamoto.jp
+hitoyoshi.kumamoto.jp
+kamiamakusa.kumamoto.jp
+kashima.kumamoto.jp
+kikuchi.kumamoto.jp
+kosa.kumamoto.jp
+kumamoto.kumamoto.jp
+mashiki.kumamoto.jp
+mifune.kumamoto.jp
+minamata.kumamoto.jp
+minamioguni.kumamoto.jp
+nagasu.kumamoto.jp
+nishihara.kumamoto.jp
+oguni.kumamoto.jp
+ozu.kumamoto.jp
+sumoto.kumamoto.jp
+takamori.kumamoto.jp
+uki.kumamoto.jp
+uto.kumamoto.jp
+yamaga.kumamoto.jp
+yamato.kumamoto.jp
+yatsushiro.kumamoto.jp
+ayabe.kyoto.jp
+fukuchiyama.kyoto.jp
+higashiyama.kyoto.jp
+ide.kyoto.jp
+ine.kyoto.jp
+joyo.kyoto.jp
+kameoka.kyoto.jp
+kamo.kyoto.jp
+kita.kyoto.jp
+kizu.kyoto.jp
+kumiyama.kyoto.jp
+kyotamba.kyoto.jp
+kyotanabe.kyoto.jp
+kyotango.kyoto.jp
+maizuru.kyoto.jp
+minami.kyoto.jp
+minamiyamashiro.kyoto.jp
+miyazu.kyoto.jp
+muko.kyoto.jp
+nagaokakyo.kyoto.jp
+nakagyo.kyoto.jp
+nantan.kyoto.jp
+oyamazaki.kyoto.jp
+sakyo.kyoto.jp
+seika.kyoto.jp
+tanabe.kyoto.jp
+uji.kyoto.jp
+ujitawara.kyoto.jp
+wazuka.kyoto.jp
+yamashina.kyoto.jp
+yawata.kyoto.jp
+asahi.mie.jp
+inabe.mie.jp
+ise.mie.jp
+kameyama.mie.jp
+kawagoe.mie.jp
+kiho.mie.jp
+kisosaki.mie.jp
+kiwa.mie.jp
+komono.mie.jp
+kumano.mie.jp
+kuwana.mie.jp
+matsusaka.mie.jp
+meiwa.mie.jp
+mihama.mie.jp
+minamiise.mie.jp
+misugi.mie.jp
+miyama.mie.jp
+nabari.mie.jp
+shima.mie.jp
+suzuka.mie.jp
+tado.mie.jp
+taiki.mie.jp
+taki.mie.jp
+tamaki.mie.jp
+toba.mie.jp
+tsu.mie.jp
+udono.mie.jp
+ureshino.mie.jp
+watarai.mie.jp
+yokkaichi.mie.jp
+furukawa.miyagi.jp
+higashimatsushima.miyagi.jp
+ishinomaki.miyagi.jp
+iwanuma.miyagi.jp
+kakuda.miyagi.jp
+kami.miyagi.jp
+kawasaki.miyagi.jp
+kesennuma.miyagi.jp
+marumori.miyagi.jp
+matsushima.miyagi.jp
+minamisanriku.miyagi.jp
+misato.miyagi.jp
+murata.miyagi.jp
+natori.miyagi.jp
+ogawara.miyagi.jp
+ohira.miyagi.jp
+onagawa.miyagi.jp
+osaki.miyagi.jp
+rifu.miyagi.jp
+semine.miyagi.jp
+shibata.miyagi.jp
+shichikashuku.miyagi.jp
+shikama.miyagi.jp
+shiogama.miyagi.jp
+shiroishi.miyagi.jp
+tagajo.miyagi.jp
+taiwa.miyagi.jp
+tome.miyagi.jp
+tomiya.miyagi.jp
+wakuya.miyagi.jp
+watari.miyagi.jp
+yamamoto.miyagi.jp
+zao.miyagi.jp
+aya.miyazaki.jp
+ebino.miyazaki.jp
+gokase.miyazaki.jp
+hyuga.miyazaki.jp
+kadogawa.miyazaki.jp
+kawaminami.miyazaki.jp
+kijo.miyazaki.jp
+kitagawa.miyazaki.jp
+kitakata.miyazaki.jp
+kitaura.miyazaki.jp
+kobayashi.miyazaki.jp
+kunitomi.miyazaki.jp
+kushima.miyazaki.jp
+mimata.miyazaki.jp
+miyakonojo.miyazaki.jp
+miyazaki.miyazaki.jp
+morotsuka.miyazaki.jp
+nichinan.miyazaki.jp
+nishimera.miyazaki.jp
+nobeoka.miyazaki.jp
+saito.miyazaki.jp
+shiiba.miyazaki.jp
+shintomi.miyazaki.jp
+takaharu.miyazaki.jp
+takanabe.miyazaki.jp
+takazaki.miyazaki.jp
+tsuno.miyazaki.jp
+achi.nagano.jp
+agematsu.nagano.jp
+anan.nagano.jp
+aoki.nagano.jp
+asahi.nagano.jp
+azumino.nagano.jp
+chikuhoku.nagano.jp
+chikuma.nagano.jp
+chino.nagano.jp
+fujimi.nagano.jp
+hakuba.nagano.jp
+hara.nagano.jp
+hiraya.nagano.jp
+iida.nagano.jp
+iijima.nagano.jp
+iiyama.nagano.jp
+iizuna.nagano.jp
+ikeda.nagano.jp
+ikusaka.nagano.jp
+ina.nagano.jp
+karuizawa.nagano.jp
+kawakami.nagano.jp
+kiso.nagano.jp
+kisofukushima.nagano.jp
+kitaaiki.nagano.jp
+komagane.nagano.jp
+komoro.nagano.jp
+matsukawa.nagano.jp
+matsumoto.nagano.jp
+miasa.nagano.jp
+minamiaiki.nagano.jp
+minamimaki.nagano.jp
+minamiminowa.nagano.jp
+minowa.nagano.jp
+miyada.nagano.jp
+miyota.nagano.jp
+mochizuki.nagano.jp
+nagano.nagano.jp
+nagawa.nagano.jp
+nagiso.nagano.jp
+nakagawa.nagano.jp
+nakano.nagano.jp
+nozawaonsen.nagano.jp
+obuse.nagano.jp
+ogawa.nagano.jp
+okaya.nagano.jp
+omachi.nagano.jp
+omi.nagano.jp
+ookuwa.nagano.jp
+ooshika.nagano.jp
+otaki.nagano.jp
+otari.nagano.jp
+sakae.nagano.jp
+sakaki.nagano.jp
+saku.nagano.jp
+sakuho.nagano.jp
+shimosuwa.nagano.jp
+shinanomachi.nagano.jp
+shiojiri.nagano.jp
+suwa.nagano.jp
+suzaka.nagano.jp
+takagi.nagano.jp
+takamori.nagano.jp
+takayama.nagano.jp
+tateshina.nagano.jp
+tatsuno.nagano.jp
+togakushi.nagano.jp
+togura.nagano.jp
+tomi.nagano.jp
+ueda.nagano.jp
+wada.nagano.jp
+yamagata.nagano.jp
+yamanouchi.nagano.jp
+yasaka.nagano.jp
+yasuoka.nagano.jp
+chijiwa.nagasaki.jp
+futsu.nagasaki.jp
+goto.nagasaki.jp
+hasami.nagasaki.jp
+hirado.nagasaki.jp
+iki.nagasaki.jp
+isahaya.nagasaki.jp
+kawatana.nagasaki.jp
+kuchinotsu.nagasaki.jp
+matsuura.nagasaki.jp
+nagasaki.nagasaki.jp
+obama.nagasaki.jp
+omura.nagasaki.jp
+oseto.nagasaki.jp
+saikai.nagasaki.jp
+sasebo.nagasaki.jp
+seihi.nagasaki.jp
+shimabara.nagasaki.jp
+shinkamigoto.nagasaki.jp
+togitsu.nagasaki.jp
+tsushima.nagasaki.jp
+unzen.nagasaki.jp
+ando.nara.jp
+gose.nara.jp
+heguri.nara.jp
+higashiyoshino.nara.jp
+ikaruga.nara.jp
+ikoma.nara.jp
+kamikitayama.nara.jp
+kanmaki.nara.jp
+kashiba.nara.jp
+kashihara.nara.jp
+katsuragi.nara.jp
+kawai.nara.jp
+kawakami.nara.jp
+kawanishi.nara.jp
+koryo.nara.jp
+kurotaki.nara.jp
+mitsue.nara.jp
+miyake.nara.jp
+nara.nara.jp
+nosegawa.nara.jp
+oji.nara.jp
+ouda.nara.jp
+oyodo.nara.jp
+sakurai.nara.jp
+sango.nara.jp
+shimoichi.nara.jp
+shimokitayama.nara.jp
+shinjo.nara.jp
+soni.nara.jp
+takatori.nara.jp
+tawaramoto.nara.jp
+tenkawa.nara.jp
+tenri.nara.jp
+uda.nara.jp
+yamatokoriyama.nara.jp
+yamatotakada.nara.jp
+yamazoe.nara.jp
+yoshino.nara.jp
+aga.niigata.jp
+agano.niigata.jp
+gosen.niigata.jp
+itoigawa.niigata.jp
+izumozaki.niigata.jp
+joetsu.niigata.jp
+kamo.niigata.jp
+kariwa.niigata.jp
+kashiwazaki.niigata.jp
+minamiuonuma.niigata.jp
+mitsuke.niigata.jp
+muika.niigata.jp
+murakami.niigata.jp
+myoko.niigata.jp
+nagaoka.niigata.jp
+niigata.niigata.jp
+ojiya.niigata.jp
+omi.niigata.jp
+sado.niigata.jp
+sanjo.niigata.jp
+seiro.niigata.jp
+seirou.niigata.jp
+sekikawa.niigata.jp
+shibata.niigata.jp
+tagami.niigata.jp
+tainai.niigata.jp
+tochio.niigata.jp
+tokamachi.niigata.jp
+tsubame.niigata.jp
+tsunan.niigata.jp
+uonuma.niigata.jp
+yahiko.niigata.jp
+yoita.niigata.jp
+yuzawa.niigata.jp
+beppu.oita.jp
+bungoono.oita.jp
+bungotakada.oita.jp
+hasama.oita.jp
+hiji.oita.jp
+himeshima.oita.jp
+hita.oita.jp
+kamitsue.oita.jp
+kokonoe.oita.jp
+kuju.oita.jp
+kunisaki.oita.jp
+kusu.oita.jp
+oita.oita.jp
+saiki.oita.jp
+taketa.oita.jp
+tsukumi.oita.jp
+usa.oita.jp
+usuki.oita.jp
+yufu.oita.jp
+akaiwa.okayama.jp
+asakuchi.okayama.jp
+bizen.okayama.jp
+hayashima.okayama.jp
+ibara.okayama.jp
+kagamino.okayama.jp
+kasaoka.okayama.jp
+kibichuo.okayama.jp
+kumenan.okayama.jp
+kurashiki.okayama.jp
+maniwa.okayama.jp
+misaki.okayama.jp
+nagi.okayama.jp
+niimi.okayama.jp
+nishiawakura.okayama.jp
+okayama.okayama.jp
+satosho.okayama.jp
+setouchi.okayama.jp
+shinjo.okayama.jp
+shoo.okayama.jp
+soja.okayama.jp
+takahashi.okayama.jp
+tamano.okayama.jp
+tsuyama.okayama.jp
+wake.okayama.jp
+yakage.okayama.jp
+aguni.okinawa.jp
+ginowan.okinawa.jp
+ginoza.okinawa.jp
+gushikami.okinawa.jp
+haebaru.okinawa.jp
+higashi.okinawa.jp
+hirara.okinawa.jp
+iheya.okinawa.jp
+ishigaki.okinawa.jp
+ishikawa.okinawa.jp
+itoman.okinawa.jp
+izena.okinawa.jp
+kadena.okinawa.jp
+kin.okinawa.jp
+kitadaito.okinawa.jp
+kitanakagusuku.okinawa.jp
+kumejima.okinawa.jp
+kunigami.okinawa.jp
+minamidaito.okinawa.jp
+motobu.okinawa.jp
+nago.okinawa.jp
+naha.okinawa.jp
+nakagusuku.okinawa.jp
+nakijin.okinawa.jp
+nanjo.okinawa.jp
+nishihara.okinawa.jp
+ogimi.okinawa.jp
+okinawa.okinawa.jp
+onna.okinawa.jp
+shimoji.okinawa.jp
+taketomi.okinawa.jp
+tarama.okinawa.jp
+tokashiki.okinawa.jp
+tomigusuku.okinawa.jp
+tonaki.okinawa.jp
+urasoe.okinawa.jp
+uruma.okinawa.jp
+yaese.okinawa.jp
+yomitan.okinawa.jp
+yonabaru.okinawa.jp
+yonaguni.okinawa.jp
+zamami.okinawa.jp
+abeno.osaka.jp
+chihayaakasaka.osaka.jp
+chuo.osaka.jp
+daito.osaka.jp
+fujiidera.osaka.jp
+habikino.osaka.jp
+hannan.osaka.jp
+higashiosaka.osaka.jp
+higashisumiyoshi.osaka.jp
+higashiyodogawa.osaka.jp
+hirakata.osaka.jp
+ibaraki.osaka.jp
+ikeda.osaka.jp
+izumi.osaka.jp
+izumiotsu.osaka.jp
+izumisano.osaka.jp
+kadoma.osaka.jp
+kaizuka.osaka.jp
+kanan.osaka.jp
+kashiwara.osaka.jp
+katano.osaka.jp
+kawachinagano.osaka.jp
+kishiwada.osaka.jp
+kita.osaka.jp
+kumatori.osaka.jp
+matsubara.osaka.jp
+minato.osaka.jp
+minoh.osaka.jp
+misaki.osaka.jp
+moriguchi.osaka.jp
+neyagawa.osaka.jp
+nishi.osaka.jp
+nose.osaka.jp
+osakasayama.osaka.jp
+sakai.osaka.jp
+sayama.osaka.jp
+sennan.osaka.jp
+settsu.osaka.jp
+shijonawate.osaka.jp
+shimamoto.osaka.jp
+suita.osaka.jp
+tadaoka.osaka.jp
+taishi.osaka.jp
+tajiri.osaka.jp
+takaishi.osaka.jp
+takatsuki.osaka.jp
+tondabayashi.osaka.jp
+toyonaka.osaka.jp
+toyono.osaka.jp
+yao.osaka.jp
+ariake.saga.jp
+arita.saga.jp
+fukudomi.saga.jp
+genkai.saga.jp
+hamatama.saga.jp
+hizen.saga.jp
+imari.saga.jp
+kamimine.saga.jp
+kanzaki.saga.jp
+karatsu.saga.jp
+kashima.saga.jp
+kitagata.saga.jp
+kitahata.saga.jp
+kiyama.saga.jp
+kouhoku.saga.jp
+kyuragi.saga.jp
+nishiarita.saga.jp
+ogi.saga.jp
+omachi.saga.jp
+ouchi.saga.jp
+saga.saga.jp
+shiroishi.saga.jp
+taku.saga.jp
+tara.saga.jp
+tosu.saga.jp
+yoshinogari.saga.jp
+arakawa.saitama.jp
+asaka.saitama.jp
+chichibu.saitama.jp
+fujimi.saitama.jp
+fujimino.saitama.jp
+fukaya.saitama.jp
+hanno.saitama.jp
+hanyu.saitama.jp
+hasuda.saitama.jp
+hatogaya.saitama.jp
+hatoyama.saitama.jp
+hidaka.saitama.jp
+higashichichibu.saitama.jp
+higashimatsuyama.saitama.jp
+honjo.saitama.jp
+ina.saitama.jp
+iruma.saitama.jp
+iwatsuki.saitama.jp
+kamiizumi.saitama.jp
+kamikawa.saitama.jp
+kamisato.saitama.jp
+kasukabe.saitama.jp
+kawagoe.saitama.jp
+kawaguchi.saitama.jp
+kawajima.saitama.jp
+kazo.saitama.jp
+kitamoto.saitama.jp
+koshigaya.saitama.jp
+kounosu.saitama.jp
+kuki.saitama.jp
+kumagaya.saitama.jp
+matsubushi.saitama.jp
+minano.saitama.jp
+misato.saitama.jp
+miyashiro.saitama.jp
+miyoshi.saitama.jp
+moroyama.saitama.jp
+nagatoro.saitama.jp
+namegawa.saitama.jp
+niiza.saitama.jp
+ogano.saitama.jp
+ogawa.saitama.jp
+ogose.saitama.jp
+okegawa.saitama.jp
+omiya.saitama.jp
+otaki.saitama.jp
+ranzan.saitama.jp
+ryokami.saitama.jp
+saitama.saitama.jp
+sakado.saitama.jp
+satte.saitama.jp
+sayama.saitama.jp
+shiki.saitama.jp
+shiraoka.saitama.jp
+soka.saitama.jp
+sugito.saitama.jp
+toda.saitama.jp
+tokigawa.saitama.jp
+tokorozawa.saitama.jp
+tsurugashima.saitama.jp
+urawa.saitama.jp
+warabi.saitama.jp
+yashio.saitama.jp
+yokoze.saitama.jp
+yono.saitama.jp
+yorii.saitama.jp
+yoshida.saitama.jp
+yoshikawa.saitama.jp
+yoshimi.saitama.jp
+aisho.shiga.jp
+gamo.shiga.jp
+higashiomi.shiga.jp
+hikone.shiga.jp
+koka.shiga.jp
+konan.shiga.jp
+kosei.shiga.jp
+koto.shiga.jp
+kusatsu.shiga.jp
+maibara.shiga.jp
+moriyama.shiga.jp
+nagahama.shiga.jp
+nishiazai.shiga.jp
+notogawa.shiga.jp
+omihachiman.shiga.jp
+otsu.shiga.jp
+ritto.shiga.jp
+ryuoh.shiga.jp
+takashima.shiga.jp
+takatsuki.shiga.jp
+torahime.shiga.jp
+toyosato.shiga.jp
+yasu.shiga.jp
+akagi.shimane.jp
+ama.shimane.jp
+gotsu.shimane.jp
+hamada.shimane.jp
+higashiizumo.shimane.jp
+hikawa.shimane.jp
+hikimi.shimane.jp
+izumo.shimane.jp
+kakinoki.shimane.jp
+masuda.shimane.jp
+matsue.shimane.jp
+misato.shimane.jp
+nishinoshima.shimane.jp
+ohda.shimane.jp
+okinoshima.shimane.jp
+okuizumo.shimane.jp
+shimane.shimane.jp
+tamayu.shimane.jp
+tsuwano.shimane.jp
+unnan.shimane.jp
+yakumo.shimane.jp
+yasugi.shimane.jp
+yatsuka.shimane.jp
+arai.shizuoka.jp
+atami.shizuoka.jp
+fuji.shizuoka.jp
+fujieda.shizuoka.jp
+fujikawa.shizuoka.jp
+fujinomiya.shizuoka.jp
+fukuroi.shizuoka.jp
+gotemba.shizuoka.jp
+haibara.shizuoka.jp
+hamamatsu.shizuoka.jp
+higashiizu.shizuoka.jp
+ito.shizuoka.jp
+iwata.shizuoka.jp
+izu.shizuoka.jp
+izunokuni.shizuoka.jp
+kakegawa.shizuoka.jp
+kannami.shizuoka.jp
+kawanehon.shizuoka.jp
+kawazu.shizuoka.jp
+kikugawa.shizuoka.jp
+kosai.shizuoka.jp
+makinohara.shizuoka.jp
+matsuzaki.shizuoka.jp
+minamiizu.shizuoka.jp
+mishima.shizuoka.jp
+morimachi.shizuoka.jp
+nishiizu.shizuoka.jp
+numazu.shizuoka.jp
+omaezaki.shizuoka.jp
+shimada.shizuoka.jp
+shimizu.shizuoka.jp
+shimoda.shizuoka.jp
+shizuoka.shizuoka.jp
+susono.shizuoka.jp
+yaizu.shizuoka.jp
+yoshida.shizuoka.jp
+ashikaga.tochigi.jp
+bato.tochigi.jp
+haga.tochigi.jp
+ichikai.tochigi.jp
+iwafune.tochigi.jp
+kaminokawa.tochigi.jp
+kanuma.tochigi.jp
+karasuyama.tochigi.jp
+kuroiso.tochigi.jp
+mashiko.tochigi.jp
+mibu.tochigi.jp
+moka.tochigi.jp
+motegi.tochigi.jp
+nasu.tochigi.jp
+nasushiobara.tochigi.jp
+nikko.tochigi.jp
+nishikata.tochigi.jp
+nogi.tochigi.jp
+ohira.tochigi.jp
+ohtawara.tochigi.jp
+oyama.tochigi.jp
+sakura.tochigi.jp
+sano.tochigi.jp
+shimotsuke.tochigi.jp
+shioya.tochigi.jp
+takanezawa.tochigi.jp
+tochigi.tochigi.jp
+tsuga.tochigi.jp
+ujiie.tochigi.jp
+utsunomiya.tochigi.jp
+yaita.tochigi.jp
+aizumi.tokushima.jp
+anan.tokushima.jp
+ichiba.tokushima.jp
+itano.tokushima.jp
+kainan.tokushima.jp
+komatsushima.tokushima.jp
+matsushige.tokushima.jp
+mima.tokushima.jp
+minami.tokushima.jp
+miyoshi.tokushima.jp
+mugi.tokushima.jp
+nakagawa.tokushima.jp
+naruto.tokushima.jp
+sanagochi.tokushima.jp
+shishikui.tokushima.jp
+tokushima.tokushima.jp
+wajiki.tokushima.jp
+adachi.tokyo.jp
+akiruno.tokyo.jp
+akishima.tokyo.jp
+aogashima.tokyo.jp
+arakawa.tokyo.jp
+bunkyo.tokyo.jp
+chiyoda.tokyo.jp
+chofu.tokyo.jp
+chuo.tokyo.jp
+edogawa.tokyo.jp
+fuchu.tokyo.jp
+fussa.tokyo.jp
+hachijo.tokyo.jp
+hachioji.tokyo.jp
+hamura.tokyo.jp
+higashikurume.tokyo.jp
+higashimurayama.tokyo.jp
+higashiyamato.tokyo.jp
+hino.tokyo.jp
+hinode.tokyo.jp
+hinohara.tokyo.jp
+inagi.tokyo.jp
+itabashi.tokyo.jp
+katsushika.tokyo.jp
+kita.tokyo.jp
+kiyose.tokyo.jp
+kodaira.tokyo.jp
+koganei.tokyo.jp
+kokubunji.tokyo.jp
+komae.tokyo.jp
+koto.tokyo.jp
+kouzushima.tokyo.jp
+kunitachi.tokyo.jp
+machida.tokyo.jp
+meguro.tokyo.jp
+minato.tokyo.jp
+mitaka.tokyo.jp
+mizuho.tokyo.jp
+musashimurayama.tokyo.jp
+musashino.tokyo.jp
+nakano.tokyo.jp
+nerima.tokyo.jp
+ogasawara.tokyo.jp
+okutama.tokyo.jp
+ome.tokyo.jp
+oshima.tokyo.jp
+ota.tokyo.jp
+setagaya.tokyo.jp
+shibuya.tokyo.jp
+shinagawa.tokyo.jp
+shinjuku.tokyo.jp
+suginami.tokyo.jp
+sumida.tokyo.jp
+tachikawa.tokyo.jp
+taito.tokyo.jp
+tama.tokyo.jp
+toshima.tokyo.jp
+chizu.tottori.jp
+hino.tottori.jp
+kawahara.tottori.jp
+koge.tottori.jp
+kotoura.tottori.jp
+misasa.tottori.jp
+nanbu.tottori.jp
+nichinan.tottori.jp
+sakaiminato.tottori.jp
+tottori.tottori.jp
+wakasa.tottori.jp
+yazu.tottori.jp
+yonago.tottori.jp
+asahi.toyama.jp
+fuchu.toyama.jp
+fukumitsu.toyama.jp
+funahashi.toyama.jp
+himi.toyama.jp
+imizu.toyama.jp
+inami.toyama.jp
+johana.toyama.jp
+kamiichi.toyama.jp
+kurobe.toyama.jp
+nakaniikawa.toyama.jp
+namerikawa.toyama.jp
+nanto.toyama.jp
+nyuzen.toyama.jp
+oyabe.toyama.jp
+taira.toyama.jp
+takaoka.toyama.jp
+tateyama.toyama.jp
+toga.toyama.jp
+tonami.toyama.jp
+toyama.toyama.jp
+unazuki.toyama.jp
+uozu.toyama.jp
+yamada.toyama.jp
+arida.wakayama.jp
+aridagawa.wakayama.jp
+gobo.wakayama.jp
+hashimoto.wakayama.jp
+hidaka.wakayama.jp
+hirogawa.wakayama.jp
+inami.wakayama.jp
+iwade.wakayama.jp
+kainan.wakayama.jp
+kamitonda.wakayama.jp
+katsuragi.wakayama.jp
+kimino.wakayama.jp
+kinokawa.wakayama.jp
+kitayama.wakayama.jp
+koya.wakayama.jp
+koza.wakayama.jp
+kozagawa.wakayama.jp
+kudoyama.wakayama.jp
+kushimoto.wakayama.jp
+mihama.wakayama.jp
+misato.wakayama.jp
+nachikatsuura.wakayama.jp
+shingu.wakayama.jp
+shirahama.wakayama.jp
+taiji.wakayama.jp
+tanabe.wakayama.jp
+wakayama.wakayama.jp
+yuasa.wakayama.jp
+yura.wakayama.jp
+asahi.yamagata.jp
+funagata.yamagata.jp
+higashine.yamagata.jp
+iide.yamagata.jp
+kahoku.yamagata.jp
+kaminoyama.yamagata.jp
+kaneyama.yamagata.jp
+kawanishi.yamagata.jp
+mamurogawa.yamagata.jp
+mikawa.yamagata.jp
+murayama.yamagata.jp
+nagai.yamagata.jp
+nakayama.yamagata.jp
+nanyo.yamagata.jp
+nishikawa.yamagata.jp
+obanazawa.yamagata.jp
+oe.yamagata.jp
+oguni.yamagata.jp
+ohkura.yamagata.jp
+oishida.yamagata.jp
+sagae.yamagata.jp
+sakata.yamagata.jp
+sakegawa.yamagata.jp
+shinjo.yamagata.jp
+shirataka.yamagata.jp
+shonai.yamagata.jp
+takahata.yamagata.jp
+tendo.yamagata.jp
+tozawa.yamagata.jp
+tsuruoka.yamagata.jp
+yamagata.yamagata.jp
+yamanobe.yamagata.jp
+yonezawa.yamagata.jp
+yuza.yamagata.jp
+abu.yamaguchi.jp
+hagi.yamaguchi.jp
+hikari.yamaguchi.jp
+hofu.yamaguchi.jp
+iwakuni.yamaguchi.jp
+kudamatsu.yamaguchi.jp
+mitou.yamaguchi.jp
+nagato.yamaguchi.jp
+oshima.yamaguchi.jp
+shimonoseki.yamaguchi.jp
+shunan.yamaguchi.jp
+tabuse.yamaguchi.jp
+tokuyama.yamaguchi.jp
+toyota.yamaguchi.jp
+ube.yamaguchi.jp
+yuu.yamaguchi.jp
+chuo.yamanashi.jp
+doshi.yamanashi.jp
+fuefuki.yamanashi.jp
+fujikawa.yamanashi.jp
+fujikawaguchiko.yamanashi.jp
+fujiyoshida.yamanashi.jp
+hayakawa.yamanashi.jp
+hokuto.yamanashi.jp
+ichikawamisato.yamanashi.jp
+kai.yamanashi.jp
+kofu.yamanashi.jp
+koshu.yamanashi.jp
+kosuge.yamanashi.jp
+minami-alps.yamanashi.jp
+minobu.yamanashi.jp
+nakamichi.yamanashi.jp
+nanbu.yamanashi.jp
+narusawa.yamanashi.jp
+nirasaki.yamanashi.jp
+nishikatsura.yamanashi.jp
+oshino.yamanashi.jp
+otsuki.yamanashi.jp
+showa.yamanashi.jp
+tabayama.yamanashi.jp
+tsuru.yamanashi.jp
+uenohara.yamanashi.jp
+yamanakako.yamanashi.jp
+yamanashi.yamanashi.jp
+
+// ke : http://www.kenic.or.ke/index.php?option=com_content&task=view&id=117&Itemid=145
+*.ke
+
+// kg : http://www.domain.kg/dmn_n.html
+kg
+org.kg
+net.kg
+com.kg
+edu.kg
+gov.kg
+mil.kg
+
+// kh : http://www.mptc.gov.kh/dns_registration.htm
+*.kh
+
+// ki : http://www.ki/dns/index.html
+ki
+edu.ki
+biz.ki
+net.ki
+org.ki
+gov.ki
+info.ki
+com.ki
+
+// km : http://en.wikipedia.org/wiki/.km
+// http://www.domaine.km/documents/charte.doc
+km
+org.km
+nom.km
+gov.km
+prd.km
+tm.km
+edu.km
+mil.km
+ass.km
+com.km
+// These are only mentioned as proposed suggestions at domaine.km, but
+// http://en.wikipedia.org/wiki/.km says they're available for registration:
+coop.km
+asso.km
+presse.km
+medecin.km
+notaires.km
+pharmaciens.km
+veterinaire.km
+gouv.km
+
+// kn : http://en.wikipedia.org/wiki/.kn
+// http://www.dot.kn/domainRules.html
+kn
+net.kn
+org.kn
+edu.kn
+gov.kn
+
+// kp : http://www.kcce.kp/en_index.php
+kp
+com.kp
+edu.kp
+gov.kp
+org.kp
+rep.kp
+tra.kp
+
+// kr : http://en.wikipedia.org/wiki/.kr
+// see also: http://domain.nida.or.kr/eng/registration.jsp
+kr
+ac.kr
+co.kr
+es.kr
+go.kr
+hs.kr
+kg.kr
+mil.kr
+ms.kr
+ne.kr
+or.kr
+pe.kr
+re.kr
+sc.kr
+// kr geographical names
+busan.kr
+chungbuk.kr
+chungnam.kr
+daegu.kr
+daejeon.kr
+gangwon.kr
+gwangju.kr
+gyeongbuk.kr
+gyeonggi.kr
+gyeongnam.kr
+incheon.kr
+jeju.kr
+jeonbuk.kr
+jeonnam.kr
+seoul.kr
+ulsan.kr
+
+// kw : http://en.wikipedia.org/wiki/.kw
+*.kw
+
+// ky : http://www.icta.ky/da_ky_reg_dom.php
+// Confirmed by registry <ky...@perimeterusa.com> 2008-06-17
+ky
+edu.ky
+gov.ky
+com.ky
+org.ky
+net.ky
+
+// kz : http://en.wikipedia.org/wiki/.kz
+// see also: http://www.nic.kz/rules/index.jsp
+kz
+org.kz
+edu.kz
+net.kz
+gov.kz
+mil.kz
+com.kz
+
+// la : http://en.wikipedia.org/wiki/.la
+// Submitted by registry <ga...@nic.la> 2008-06-10
+la
+int.la
+net.la
+info.la
+edu.la
+gov.la
+per.la
+com.la
+org.la
+
+// lb : http://en.wikipedia.org/wiki/.lb
+// Submitted by registry <ra...@psg.com> 2008-06-17
+lb
+com.lb
+edu.lb
+gov.lb
+net.lb
+org.lb
+
+// lc : http://en.wikipedia.org/wiki/.lc
+// see also: http://www.nic.lc/rules.htm
+lc
+com.lc
+net.lc
+co.lc
+org.lc
+edu.lc
+gov.lc
+
+// li : http://en.wikipedia.org/wiki/.li
+li
+
+// lk : http://www.nic.lk/seclevpr.html
+lk
+gov.lk
+sch.lk
+net.lk
+int.lk
+com.lk
+org.lk
+edu.lk
+ngo.lk
+soc.lk
+web.lk
+ltd.lk
+assn.lk
+grp.lk
+hotel.lk
+
+// lr : http://psg.com/dns/lr/lr.txt
+// Submitted by registry <ra...@psg.com> 2008-06-17
+lr
+com.lr
+edu.lr
+gov.lr
+org.lr
+net.lr
+
+// ls : http://en.wikipedia.org/wiki/.ls
+ls
+co.ls
+org.ls
+
+// lt : http://en.wikipedia.org/wiki/.lt
+lt
+// gov.lt : http://www.gov.lt/index_en.php
+gov.lt
+
+// lu : http://www.dns.lu/en/
+lu
+
+// lv : http://www.nic.lv/DNS/En/generic.php
+lv
+com.lv
+edu.lv
+gov.lv
+org.lv
+mil.lv
+id.lv
+net.lv
+asn.lv
+conf.lv
+
+// ly : http://www.nic.ly/regulations.php
+ly
+com.ly
+net.ly
+gov.ly
+plc.ly
+edu.ly
+sch.ly
+med.ly
+org.ly
+id.ly
+
+// ma : http://en.wikipedia.org/wiki/.ma
+// http://www.anrt.ma/fr/admin/download/upload/file_fr782.pdf
+ma
+co.ma
+net.ma
+gov.ma
+org.ma
+ac.ma
+press.ma
+
+// mc : http://www.nic.mc/
+mc
+tm.mc
+asso.mc
+
+// md : http://en.wikipedia.org/wiki/.md
+md
+
+// me : http://en.wikipedia.org/wiki/.me
+me
+co.me
+net.me
+org.me
+edu.me
+ac.me
+gov.me
+its.me
+priv.me
+
+// mg : http://www.nic.mg/tarif.htm
+mg
+org.mg
+nom.mg
+gov.mg
+prd.mg
+tm.mg
+edu.mg
+mil.mg
+com.mg
+
+// mh : http://en.wikipedia.org/wiki/.mh
+mh
+
+// mil : http://en.wikipedia.org/wiki/.mil
+mil
+
+// mk : http://en.wikipedia.org/wiki/.mk
+// see also: http://dns.marnet.net.mk/postapka.php
+mk
+com.mk
+org.mk
+net.mk
+edu.mk
+gov.mk
+inf.mk
+name.mk
+
+// ml : http://www.gobin.info/domainname/ml-template.doc
+// see also: http://en.wikipedia.org/wiki/.ml
+ml
+com.ml
+edu.ml
+gouv.ml
+gov.ml
+net.ml
+org.ml
+presse.ml
+
+// mm : http://en.wikipedia.org/wiki/.mm
+*.mm
+
+// mn : http://en.wikipedia.org/wiki/.mn
+mn
+gov.mn
+edu.mn
+org.mn
+
+// mo : http://www.monic.net.mo/
+mo
+com.mo
+net.mo
+org.mo
+edu.mo
+gov.mo
+
+// mobi : http://en.wikipedia.org/wiki/.mobi
+mobi
+
+// mp : http://www.dot.mp/
+// Confirmed by registry <dc...@saipan.com> 2008-06-17
+mp
+
+// mq : http://en.wikipedia.org/wiki/.mq
+mq
+
+// mr : http://en.wikipedia.org/wiki/.mr
+mr
+gov.mr
+
+// ms : http://www.nic.ms/pdf/MS_Domain_Name_Rules.pdf
+ms
+com.ms
+edu.ms
+gov.ms
+net.ms
+org.ms
+
+// mt : https://www.nic.org.mt/go/policy
+// Submitted by registry <he...@nic.org.mt> 2013-11-19
+mt
+com.mt
+edu.mt
+net.mt
+org.mt
+
+// mu : http://en.wikipedia.org/wiki/.mu
+mu
+com.mu
+net.mu
+org.mu
+gov.mu
+ac.mu
+co.mu
+or.mu
+
+// museum : http://about.museum/naming/
+// http://index.museum/
+museum
+academy.museum
+agriculture.museum
+air.museum
+airguard.museum
+alabama.museum
+alaska.museum
+amber.museum
+ambulance.museum
+american.museum
+americana.museum
+americanantiques.museum
+americanart.museum
+amsterdam.museum
+and.museum
+annefrank.museum
+anthro.museum
+anthropology.museum
+antiques.museum
+aquarium.museum
+arboretum.museum
+archaeological.museum
+archaeology.museum
+architecture.museum
+art.museum
+artanddesign.museum
+artcenter.museum
+artdeco.museum
+arteducation.museum
+artgallery.museum
+arts.museum
+artsandcrafts.museum
+asmatart.museum
+assassination.museum
+assisi.museum
+association.museum
+astronomy.museum
+atlanta.museum
+austin.museum
+australia.museum
+automotive.museum
+aviation.museum
+axis.museum
+badajoz.museum
+baghdad.museum
+bahn.museum
+bale.museum
+baltimore.museum
+barcelona.museum
+baseball.museum
+basel.museum
+baths.museum
+bauern.museum
+beauxarts.museum
+beeldengeluid.museum
+bellevue.museum
+bergbau.museum
+berkeley.museum
+berlin.museum
+bern.museum
+bible.museum
+bilbao.museum
+bill.museum
+birdart.museum
+birthplace.museum
+bonn.museum
+boston.museum
+botanical.museum
+botanicalgarden.museum
+botanicgarden.museum
+botany.museum
+brandywinevalley.museum
+brasil.museum
+bristol.museum
+british.museum
+britishcolumbia.museum
+broadcast.museum
+brunel.museum
+brussel.museum
+brussels.museum
+bruxelles.museum
+building.museum
+burghof.museum
+bus.museum
+bushey.museum
+cadaques.museum
+california.museum
+cambridge.museum
+can.museum
+canada.museum
+capebreton.museum
+carrier.museum
+cartoonart.museum
+casadelamoneda.museum
+castle.museum
+castres.museum
+celtic.museum
+center.museum
+chattanooga.museum
+cheltenham.museum
+chesapeakebay.museum
+chicago.museum
+children.museum
+childrens.museum
+childrensgarden.museum
+chiropractic.museum
+chocolate.museum
+christiansburg.museum
+cincinnati.museum
+cinema.museum
+circus.museum
+civilisation.museum
+civilization.museum
+civilwar.museum
+clinton.museum
+clock.museum
+coal.museum
+coastaldefence.museum
+cody.museum
+coldwar.museum
+collection.museum
+colonialwilliamsburg.museum
+coloradoplateau.museum
+columbia.museum
+columbus.museum
+communication.museum
+communications.museum
+community.museum
+computer.museum
+computerhistory.museum
+comunicações.museum
+contemporary.museum
+contemporaryart.museum
+convent.museum
+copenhagen.museum
+corporation.museum
+correios-e-telecomunicações.museum
+corvette.museum
+costume.museum
+countryestate.museum
+county.museum
+crafts.museum
+cranbrook.museum
+creation.museum
+cultural.museum
+culturalcenter.museum
+culture.museum
+cyber.museum
+cymru.museum
+dali.museum
+dallas.museum
+database.museum
+ddr.museum
+decorativearts.museum
+delaware.museum
+delmenhorst.museum
+denmark.museum
+depot.museum
+design.museum
+detroit.museum
+dinosaur.museum
+discovery.museum
+dolls.museum
+donostia.museum
+durham.museum
+eastafrica.museum
+eastcoast.museum
+education.museum
+educational.museum
+egyptian.museum
+eisenbahn.museum
+elburg.museum
+elvendrell.museum
+embroidery.museum
+encyclopedic.museum
+england.museum
+entomology.museum
+environment.museum
+environmentalconservation.museum
+epilepsy.museum
+essex.museum
+estate.museum
+ethnology.museum
+exeter.museum
+exhibition.museum
+family.museum
+farm.museum
+farmequipment.museum
+farmers.museum
+farmstead.museum
+field.museum
+figueres.museum
+filatelia.museum
+film.museum
+fineart.museum
+finearts.museum
+finland.museum
+flanders.museum
+florida.museum
+force.museum
+fortmissoula.museum
+fortworth.museum
+foundation.museum
+francaise.museum
+frankfurt.museum
+franziskaner.museum
+freemasonry.museum
+freiburg.museum
+fribourg.museum
+frog.museum
+fundacio.museum
+furniture.museum
+gallery.museum
+garden.museum
+gateway.museum
+geelvinck.museum
+gemological.museum
+geology.museum
+georgia.museum
+giessen.museum
+glas.museum
+glass.museum
+gorge.museum
+grandrapids.museum
+graz.museum
+guernsey.museum
+halloffame.museum
+hamburg.museum
+handson.museum
+harvestcelebration.museum
+hawaii.museum
+health.museum
+heimatunduhren.museum
+hellas.museum
+helsinki.museum
+hembygdsforbund.museum
+heritage.museum
+histoire.museum
+historical.museum
+historicalsociety.museum
+historichouses.museum
+historisch.museum
+historisches.museum
+history.museum
+historyofscience.museum
+horology.museum
+house.museum
+humanities.museum
+illustration.museum
+imageandsound.museum
+indian.museum
+indiana.museum
+indianapolis.museum
+indianmarket.museum
+intelligence.museum
+interactive.museum
+iraq.museum
+iron.museum
+isleofman.museum
+jamison.museum
+jefferson.museum
+jerusalem.museum
+jewelry.museum
+jewish.museum
+jewishart.museum
+jfk.museum
+journalism.museum
+judaica.museum
+judygarland.museum
+juedisches.museum
+juif.museum
+karate.museum
+karikatur.museum
+kids.museum
+koebenhavn.museum
+koeln.museum
+kunst.museum
+kunstsammlung.museum
+kunstunddesign.museum
+labor.museum
+labour.museum
+lajolla.museum
+lancashire.museum
+landes.museum
+lans.museum
+läns.museum
+larsson.museum
+lewismiller.museum
+lincoln.museum
+linz.museum
+living.museum
+livinghistory.museum
+localhistory.museum
+london.museum
+losangeles.museum
+louvre.museum
+loyalist.museum
+lucerne.museum
+luxembourg.museum
+luzern.museum
+mad.museum
+madrid.museum
+mallorca.museum
+manchester.museum
+mansion.museum
+mansions.museum
+manx.museum
+marburg.museum
+maritime.museum
+maritimo.museum
+maryland.museum
+marylhurst.museum
+media.museum
+medical.museum
+medizinhistorisches.museum
+meeres.museum
+memorial.museum
+mesaverde.museum
+michigan.museum
+midatlantic.museum
+military.museum
+mill.museum
+miners.museum
+mining.museum
+minnesota.museum
+missile.museum
+missoula.museum
+modern.museum
+moma.museum
+money.museum
+monmouth.museum
+monticello.museum
+montreal.museum
+moscow.museum
+motorcycle.museum
+muenchen.museum
+muenster.museum
+mulhouse.museum
+muncie.museum
+museet.museum
+museumcenter.museum
+museumvereniging.museum
+music.museum
+national.museum
+nationalfirearms.museum
+nationalheritage.museum
+nativeamerican.museum
+naturalhistory.museum
+naturalhistorymuseum.museum
+naturalsciences.museum
+nature.museum
+naturhistorisches.museum
+natuurwetenschappen.museum
+naumburg.museum
+naval.museum
+nebraska.museum
+neues.museum
+newhampshire.museum
+newjersey.museum
+newmexico.museum
+newport.museum
+newspaper.museum
+newyork.museum
+niepce.museum
+norfolk.museum
+north.museum
+nrw.museum
+nuernberg.museum
+nuremberg.museum
+nyc.museum
+nyny.museum
+oceanographic.museum
+oceanographique.museum
+omaha.museum
+online.museum
+ontario.museum
+openair.museum
+oregon.museum
+oregontrail.museum
+otago.museum
+oxford.museum
+pacific.museum
+paderborn.museum
+palace.museum
+paleo.museum
+palmsprings.museum
+panama.museum
+paris.museum
+pasadena.museum
+pharmacy.museum
+philadelphia.museum
+philadelphiaarea.museum
+philately.museum
+phoenix.museum
+photography.museum
+pilots.museum
+pittsburgh.museum
+planetarium.museum
+plantation.museum
+plants.museum
+plaza.museum
+portal.museum
+portland.museum
+portlligat.museum
+posts-and-telecommunications.museum
+preservation.museum
+presidio.museum
+press.museum
+project.museum
+public.museum
+pubol.museum
+quebec.museum
+railroad.museum
+railway.museum
+research.museum
+resistance.museum
+riodejaneiro.museum
+rochester.museum
+rockart.museum
+roma.museum
+russia.museum
+saintlouis.museum
+salem.museum
+salvadordali.museum
+salzburg.museum
+sandiego.museum
+sanfrancisco.museum
+santabarbara.museum
+santacruz.museum
+santafe.museum
+saskatchewan.museum
+satx.museum
+savannahga.museum
+schlesisches.museum
+schoenbrunn.museum
+schokoladen.museum
+school.museum
+schweiz.museum
+science.museum
+scienceandhistory.museum
+scienceandindustry.museum
+sciencecenter.museum
+sciencecenters.museum
+science-fiction.museum
+sciencehistory.museum
+sciences.museum
+sciencesnaturelles.museum
+scotland.museum
+seaport.museum
+settlement.museum
+settlers.museum
+shell.museum
+sherbrooke.museum
+sibenik.museum
+silk.museum
+ski.museum
+skole.museum
+society.museum
+sologne.museum
+soundandvision.museum
+southcarolina.museum
+southwest.museum
+space.museum
+spy.museum
+square.museum
+stadt.museum
+stalbans.museum
+starnberg.museum
+state.museum
+stateofdelaware.museum
+station.museum
+steam.museum
+steiermark.museum
+stjohn.museum
+stockholm.museum
+stpetersburg.museum
+stuttgart.museum
+suisse.museum
+surgeonshall.museum
+surrey.museum
+svizzera.museum
+sweden.museum
+sydney.museum
+tank.museum
+tcm.museum
+technology.museum
+telekommunikation.museum
+television.museum
+texas.museum
+textile.museum
+theater.museum
+time.museum
+timekeeping.museum
+topology.museum
+torino.museum
+touch.museum
+town.museum
+transport.museum
+tree.museum
+trolley.museum
+trust.museum
+trustee.museum
+uhren.museum
+ulm.museum
+undersea.museum
+university.museum
+usa.museum
+usantiques.museum
+usarts.museum
+uscountryestate.museum
+usculture.museum
+usdecorativearts.museum
+usgarden.museum
+ushistory.museum
+ushuaia.museum
+uslivinghistory.museum
+utah.museum
+uvic.museum
+valley.museum
+vantaa.museum
+versailles.museum
+viking.museum
+village.museum
+virginia.museum
+virtual.museum
+virtuel.museum
+vlaanderen.museum
+volkenkunde.museum
+wales.museum
+wallonie.museum
+war.museum
+washingtondc.museum
+watchandclock.museum
+watch-and-clock.museum
+western.museum
+westfalen.museum
+whaling.museum
+wildlife.museum
+williamsburg.museum
+windmill.museum
+workshop.museum
+york.museum
+yorkshire.museum
+yosemite.museum
+youth.museum
+zoological.museum
+zoology.museum
+ירושלים.museum
+иком.museum
+
+// mv : http://en.wikipedia.org/wiki/.mv
+// "mv" included because, contra Wikipedia, google.mv exists.
+mv
+aero.mv
+biz.mv
+com.mv
+coop.mv
+edu.mv
+gov.mv
+info.mv
+int.mv
+mil.mv
+museum.mv
+name.mv
+net.mv
+org.mv
+pro.mv
+
+// mw : http://www.registrar.mw/
+mw
+ac.mw
+biz.mw
+co.mw
+com.mw
+coop.mw
+edu.mw
+gov.mw
+int.mw
+museum.mw
+net.mw
+org.mw
+
+// mx : http://www.nic.mx/
+// Submitted by registry <fa...@nic.mx> 2008-06-19
+mx
+com.mx
+org.mx
+gob.mx
+edu.mx
+net.mx
+
+// my : http://www.mynic.net.my/
+my
+com.my
+net.my
+org.my
+gov.my
+edu.my
+mil.my
+name.my
+
+// mz : http://www.gobin.info/domainname/mz-template.doc
+*.mz
+!teledata.mz
+
+// na : http://www.na-nic.com.na/
+// http://www.info.na/domain/
+na
+info.na
+pro.na
+name.na
+school.na
+or.na
+dr.na
+us.na
+mx.na
+ca.na
+in.na
+cc.na
+tv.na
+ws.na
+mobi.na
+co.na
+com.na
+org.na
+
+// name : has 2nd-level tlds, but there's no list of them
+name
+
+// nc : http://www.cctld.nc/
+nc
+asso.nc
+
+// ne : http://en.wikipedia.org/wiki/.ne
+ne
+
+// net : http://en.wikipedia.org/wiki/.net
+net
+
+// nf : http://en.wikipedia.org/wiki/.nf
+nf
+com.nf
+net.nf
+per.nf
+rec.nf
+web.nf
+arts.nf
+firm.nf
+info.nf
+other.nf
+store.nf
+
+// ng : http://psg.com/dns/ng/
+ng
+com.ng
+edu.ng
+name.ng
+net.ng
+org.ng
+sch.ng
+gov.ng
+mil.ng
+mobi.ng
+
+// ni : http://www.nic.ni/dominios.htm
+*.ni
+
+// nl : http://www.domain-registry.nl/ace.php/c,728,122,,,,Home.html
+// Confirmed by registry <An...@sidn.nl> (with technical
+// reservations) 2008-06-08
+nl
+
+// BV.nl will be a registry for dutch BV's (besloten vennootschap)
+bv.nl
+
+// no : http://www.norid.no/regelverk/index.en.html
+// The Norwegian registry has declined to notify us of updates. The web pages
+// referenced below are the official source of the data. There is also an
+// announce mailing list:
+// https://postlister.uninett.no/sympa/info/norid-diskusjon
+no
+// Norid generic domains : http://www.norid.no/regelverk/vedlegg-c.en.html
+fhs.no
+vgs.no
+fylkesbibl.no
+folkebibl.no
+museum.no
+idrett.no
+priv.no
+// Non-Norid generic domains : http://www.norid.no/regelverk/vedlegg-d.en.html
+mil.no
+stat.no
+dep.no
+kommune.no
+herad.no
+// no geographical names : http://www.norid.no/regelverk/vedlegg-b.en.html
+// counties
+aa.no
+ah.no
+bu.no
+fm.no
+hl.no
+hm.no
+jan-mayen.no
+mr.no
+nl.no
+nt.no
+of.no
+ol.no
+oslo.no
+rl.no
+sf.no
+st.no
+svalbard.no
+tm.no
+tr.no
+va.no
+vf.no
+// primary and lower secondary schools per county
+gs.aa.no
+gs.ah.no
+gs.bu.no
+gs.fm.no
+gs.hl.no
+gs.hm.no
+gs.jan-mayen.no
+gs.mr.no
+gs.nl.no
+gs.nt.no
+gs.of.no
+gs.ol.no
+gs.oslo.no
+gs.rl.no
+gs.sf.no
+gs.st.no
+gs.svalbard.no
+gs.tm.no
+gs.tr.no
+gs.va.no
+gs.vf.no
+// cities
+akrehamn.no
+åkrehamn.no
+algard.no
+ålgård.no
+arna.no
+brumunddal.no
+bryne.no
+bronnoysund.no
+brønnøysund.no
+drobak.no
+drøbak.no
+egersund.no
+fetsund.no
+floro.no
+florø.no
+fredrikstad.no
+hokksund.no
+honefoss.no
+hønefoss.no
+jessheim.no
+jorpeland.no
+jørpeland.no
+kirkenes.no
+kopervik.no
+krokstadelva.no
+langevag.no
+langevåg.no
+leirvik.no
+mjondalen.no
+mjøndalen.no
+mo-i-rana.no
+mosjoen.no
+mosjøen.no
+nesoddtangen.no
+orkanger.no
+osoyro.no
+osøyro.no
+raholt.no
+råholt.no
+sandnessjoen.no
+sandnessjøen.no
+skedsmokorset.no
+slattum.no
+spjelkavik.no
+stathelle.no
+stavern.no
+stjordalshalsen.no
+stjørdalshalsen.no
+tananger.no
+tranby.no
+vossevangen.no
+// communities
+afjord.no
+åfjord.no
+agdenes.no
+al.no
+ål.no
+alesund.no
+ålesund.no
+alstahaug.no
+alta.no
+áltá.no
+alaheadju.no
+álaheadju.no
+alvdal.no
+amli.no
+åmli.no
+amot.no
+åmot.no
+andebu.no
+andoy.no
+andøy.no
+andasuolo.no
+ardal.no
+årdal.no
+aremark.no
+arendal.no
+ås.no
+aseral.no
+åseral.no
+asker.no
+askim.no
+askvoll.no
+askoy.no
+askøy.no
+asnes.no
+åsnes.no
+audnedaln.no
+aukra.no
+aure.no
+aurland.no
+aurskog-holand.no
+aurskog-høland.no
+austevoll.no
+austrheim.no
+averoy.no
+averøy.no
+balestrand.no
+ballangen.no
+balat.no
+bálát.no
+balsfjord.no
+bahccavuotna.no
+báhccavuotna.no
+bamble.no
+bardu.no
+beardu.no
+beiarn.no
+bajddar.no
+bájddar.no
+baidar.no
+báidár.no
+berg.no
+bergen.no
+berlevag.no
+berlevåg.no
+bearalvahki.no
+bearalváhki.no
+bindal.no
+birkenes.no
+bjarkoy.no
+bjarkøy.no
+bjerkreim.no
+bjugn.no
+bodo.no
+bodø.no
+badaddja.no
+bådåddjå.no
+budejju.no
+bokn.no
+bremanger.no
+bronnoy.no
+brønnøy.no
+bygland.no
+bykle.no
+barum.no
+bærum.no
+bo.telemark.no
+bø.telemark.no
+bo.nordland.no
+bø.nordland.no
+bievat.no
+bievát.no
+bomlo.no
+bømlo.no
+batsfjord.no
+båtsfjord.no
+bahcavuotna.no
+báhcavuotna.no
+dovre.no
+drammen.no
+drangedal.no
+dyroy.no
+dyrøy.no
+donna.no
+dønna.no
+eid.no
+eidfjord.no
+eidsberg.no
+eidskog.no
+eidsvoll.no
+eigersund.no
+elverum.no
+enebakk.no
+engerdal.no
+etne.no
+etnedal.no
+evenes.no
+evenassi.no
+evenášši.no
+evje-og-hornnes.no
+farsund.no
+fauske.no
+fuossko.no
+fuoisku.no
+fedje.no
+fet.no
+finnoy.no
+finnøy.no
+fitjar.no
+fjaler.no
+fjell.no
+flakstad.no
+flatanger.no
+flekkefjord.no
+flesberg.no
+flora.no
+fla.no
+flå.no
+folldal.no
+forsand.no
+fosnes.no
+frei.no
+frogn.no
+froland.no
+frosta.no
+frana.no
+fræna.no
+froya.no
+frøya.no
+fusa.no
+fyresdal.no
+forde.no
+førde.no
+gamvik.no
+gangaviika.no
+gáŋgaviika.no
+gaular.no
+gausdal.no
+gildeskal.no
+gildeskål.no
+giske.no
+gjemnes.no
+gjerdrum.no
+gjerstad.no
+gjesdal.no
+gjovik.no
+gjøvik.no
+gloppen.no
+gol.no
+gran.no
+grane.no
+granvin.no
+gratangen.no
+grimstad.no
+grong.no
+kraanghke.no
+kråanghke.no
+grue.no
+gulen.no
+hadsel.no
+halden.no
+halsa.no
+hamar.no
+hamaroy.no
+habmer.no
+hábmer.no
+hapmir.no
+hápmir.no
+hammerfest.no
+hammarfeasta.no
+hámmárfeasta.no
+haram.no
+hareid.no
+harstad.no
+hasvik.no
+aknoluokta.no
+ákŋoluokta.no
+hattfjelldal.no
+aarborte.no
+haugesund.no
+hemne.no
+hemnes.no
+hemsedal.no
+heroy.more-og-romsdal.no
+herøy.møre-og-romsdal.no
+heroy.nordland.no
+herøy.nordland.no
+hitra.no
+hjartdal.no
+hjelmeland.no
+hobol.no
+hobøl.no
+hof.no
+hol.no
+hole.no
+holmestrand.no
+holtalen.no
+holtålen.no
+hornindal.no
+horten.no
+hurdal.no
+hurum.no
+hvaler.no
+hyllestad.no
+hagebostad.no
+hægebostad.no
+hoyanger.no
+høyanger.no
+hoylandet.no
+høylandet.no
+ha.no
+hå.no
+ibestad.no
+inderoy.no
+inderøy.no
+iveland.no
+jevnaker.no
+jondal.no
+jolster.no
+jølster.no
+karasjok.no
+karasjohka.no
+kárášjohka.no
+karlsoy.no
+galsa.no
+gálsá.no
+karmoy.no
+karmøy.no
+kautokeino.no
+guovdageaidnu.no
+klepp.no
+klabu.no
+klæbu.no
+kongsberg.no
+kongsvinger.no
+kragero.no
+kragerø.no
+kristiansand.no
+kristiansund.no
+krodsherad.no
+krødsherad.no
+kvalsund.no
+rahkkeravju.no
+ráhkkerávju.no
+kvam.no
+kvinesdal.no
+kvinnherad.no
+kviteseid.no
+kvitsoy.no
+kvitsøy.no
+kvafjord.no
+kvæfjord.no
+giehtavuoatna.no
+kvanangen.no
+kvænangen.no
+navuotna.no
+návuotna.no
+kafjord.no
+kåfjord.no
+gaivuotna.no
+gáivuotna.no
+larvik.no
+lavangen.no
+lavagis.no
+loabat.no
+loabát.no
+lebesby.no
+davvesiida.no
+leikanger.no
+leirfjord.no
+leka.no
+leksvik.no
+lenvik.no
+leangaviika.no
+leaŋgaviika.no
+lesja.no
+levanger.no
+lier.no
+lierne.no
+lillehammer.no
+lillesand.no
+lindesnes.no
+lindas.no
+lindås.no
+lom.no
+loppa.no
+lahppi.no
+láhppi.no
+lund.no
+lunner.no
+luroy.no
+lurøy.no
+luster.no
+lyngdal.no
+lyngen.no
+ivgu.no
+lardal.no
+lerdal.no
+lærdal.no
+lodingen.no
+lødingen.no
+lorenskog.no
+lørenskog.no
+loten.no
+løten.no
+malvik.no
+masoy.no
+måsøy.no
+muosat.no
+muosát.no
+mandal.no
+marker.no
+marnardal.no
+masfjorden.no
+meland.no
+meldal.no
+melhus.no
+meloy.no
+meløy.no
+meraker.no
+meråker.no
+moareke.no
+moåreke.no
+midsund.no
+midtre-gauldal.no
+modalen.no
+modum.no
+molde.no
+moskenes.no
+moss.no
+mosvik.no
+malselv.no
+målselv.no
+malatvuopmi.no
+málatvuopmi.no
+namdalseid.no
+aejrie.no
+namsos.no
+namsskogan.no
+naamesjevuemie.no
+nååmesjevuemie.no
+laakesvuemie.no
+nannestad.no
+narvik.no
+narviika.no
+naustdal.no
+nedre-eiker.no
+nes.akershus.no
+nes.buskerud.no
+nesna.no
+nesodden.no
+nesseby.no
+unjarga.no
+unjárga.no
+nesset.no
+nissedal.no
+nittedal.no
+nord-aurdal.no
+nord-fron.no
+nord-odal.no
+norddal.no
+nordkapp.no
+davvenjarga.no
+davvenjárga.no
+nordre-land.no
+nordreisa.no
+raisa.no
+ráisa.no
+nore-og-uvdal.no
+notodden.no
+naroy.no
+nærøy.no
+notteroy.no
+nøtterøy.no
+odda.no
+oksnes.no
+øksnes.no
+oppdal.no
+oppegard.no
+oppegård.no
+orkdal.no
+orland.no
+ørland.no
+orskog.no
+ørskog.no
+orsta.no
+ørsta.no
+os.hedmark.no
+os.hordaland.no
+osen.no
+osteroy.no
+osterøy.no
+ostre-toten.no
+østre-toten.no
+overhalla.no
+ovre-eiker.no
+øvre-eiker.no
+oyer.no
+øyer.no
+oygarden.no
+øygarden.no
+oystre-slidre.no
+øystre-slidre.no
+porsanger.no
+porsangu.no
+porsáŋgu.no
+porsgrunn.no
+radoy.no
+radøy.no
+rakkestad.no
+rana.no
+ruovat.no
+randaberg.no
+rauma.no
+rendalen.no
+rennebu.no
+rennesoy.no
+rennesøy.no
+rindal.no
+ringebu.no
+ringerike.no
+ringsaker.no
+rissa.no
+risor.no
+risør.no
+roan.no
+rollag.no
+rygge.no
+ralingen.no
+rælingen.no
+rodoy.no
+rødøy.no
+romskog.no
+rømskog.no
+roros.no
+røros.no
+rost.no
+røst.no
+royken.no
+røyken.no
+royrvik.no
+røyrvik.no
+rade.no
+råde.no
+salangen.no
+siellak.no
+saltdal.no
+salat.no
+sálát.no
+sálat.no
+samnanger.no
+sande.more-og-romsdal.no
+sande.møre-og-romsdal.no
+sande.vestfold.no
+sandefjord.no
+sandnes.no
+sandoy.no
+sandøy.no
+sarpsborg.no
+sauda.no
+sauherad.no
+sel.no
+selbu.no
+selje.no
+seljord.no
+sigdal.no
+siljan.no
+sirdal.no
+skaun.no
+skedsmo.no
+ski.no
+skien.no
+skiptvet.no
+skjervoy.no
+skjervøy.no
+skierva.no
+skiervá.no
+skjak.no
+skjåk.no
+skodje.no
+skanland.no
+skånland.no
+skanit.no
+skánit.no
+smola.no
+smøla.no
+snillfjord.no
+snasa.no
+snåsa.no
+snoasa.no
+snaase.no
+snåase.no
+sogndal.no
+sokndal.no
+sola.no
+solund.no
+songdalen.no
+sortland.no
+spydeberg.no
+stange.no
+stavanger.no
+steigen.no
+steinkjer.no
+stjordal.no
+stjørdal.no
+stokke.no
+stor-elvdal.no
+stord.no
+stordal.no
+storfjord.no
+omasvuotna.no
+strand.no
+stranda.no
+stryn.no
+sula.no
+suldal.no
+sund.no
+sunndal.no
+surnadal.no
+sveio.no
+svelvik.no
+sykkylven.no
+sogne.no
+søgne.no
+somna.no
+sømna.no
+sondre-land.no
+søndre-land.no
+sor-aurdal.no
+sør-aurdal.no
+sor-fron.no
+sør-fron.no
+sor-odal.no
+sør-odal.no
+sor-varanger.no
+sør-varanger.no
+matta-varjjat.no
+mátta-várjjat.no
+sorfold.no
+sørfold.no
+sorreisa.no
+sørreisa.no
+sorum.no
+sørum.no
+tana.no
+deatnu.no
+time.no
+tingvoll.no
+tinn.no
+tjeldsund.no
+dielddanuorri.no
+tjome.no
+tjøme.no
+tokke.no
+tolga.no
+torsken.no
+tranoy.no
+tranøy.no
+tromso.no
+tromsø.no
+tromsa.no
+romsa.no
+trondheim.no
+troandin.no
+trysil.no
+trana.no
+træna.no
+trogstad.no
+trøgstad.no
+tvedestrand.no
+tydal.no
+tynset.no
+tysfjord.no
+divtasvuodna.no
+divttasvuotna.no
+tysnes.no
+tysvar.no
+tysvær.no
+tonsberg.no
+tønsberg.no
+ullensaker.no
+ullensvang.no
+ulvik.no
+utsira.no
+vadso.no
+vadsø.no
+cahcesuolo.no
+čáhcesuolo.no
+vaksdal.no
+valle.no
+vang.no
+vanylven.no
+vardo.no
+vardø.no
+varggat.no
+várggát.no
+vefsn.no
+vaapste.no
+vega.no
+vegarshei.no
+vegårshei.no
+vennesla.no
+verdal.no
+verran.no
+vestby.no
+vestnes.no
+vestre-slidre.no
+vestre-toten.no
+vestvagoy.no
+vestvågøy.no
+vevelstad.no
+vik.no
+vikna.no
+vindafjord.no
+volda.no
+voss.no
+varoy.no
+værøy.no
+vagan.no
+vågan.no
+voagat.no
+vagsoy.no
+vågsøy.no
+vaga.no
+vågå.no
+valer.ostfold.no
+våler.østfold.no
+valer.hedmark.no
+våler.hedmark.no
+
+// np : http://www.mos.com.np/register.html
+*.np
+
+// nr : http://cenpac.net.nr/dns/index.html
+// Confirmed by registry <te...@cenpac.net.nr> 2008-06-17
+nr
+biz.nr
+info.nr
+gov.nr
+edu.nr
+org.nr
+net.nr
+com.nr
+
+// nu : http://en.wikipedia.org/wiki/.nu
+nu
+
+// nz : http://en.wikipedia.org/wiki/.nz
+// Confirmed by registry <ja...@nzrs.net.nz> 2014-05-19
+nz
+ac.nz
+co.nz
+cri.nz
+geek.nz
+gen.nz
+govt.nz
+health.nz
+iwi.nz
+kiwi.nz
+maori.nz
+mil.nz
+māori.nz
+net.nz
+org.nz
+parliament.nz
+school.nz
+
+// om : http://en.wikipedia.org/wiki/.om
+om
+co.om
+com.om
+edu.om
+gov.om
+med.om
+museum.om
+net.om
+org.om
+pro.om
+
+// org : http://en.wikipedia.org/wiki/.org
+org
+
+// pa : http://www.nic.pa/
+// Some additional second level "domains" resolve directly as hostnames, such as
+// pannet.pa, so we add a rule for "pa".
+pa
+ac.pa
+gob.pa
+com.pa
+org.pa
+sld.pa
+edu.pa
+net.pa
+ing.pa
+abo.pa
+med.pa
+nom.pa
+
+// pe : https://www.nic.pe/InformeFinalComision.pdf
+pe
+edu.pe
+gob.pe
+nom.pe
+mil.pe
+org.pe
+com.pe
+net.pe
+
+// pf : http://www.gobin.info/domainname/formulaire-pf.pdf
+pf
+com.pf
+org.pf
+edu.pf
+
+// pg : http://en.wikipedia.org/wiki/.pg
+*.pg
+
+// ph : http://www.domains.ph/FAQ2.asp
+// Submitted by registry <je...@email.com.ph> 2008-06-13
+ph
+com.ph
+net.ph
+org.ph
+gov.ph
+edu.ph
+ngo.ph
+mil.ph
+i.ph
+
+// pk : http://pk5.pknic.net.pk/pk5/msgNamepk.PK
+pk
+com.pk
+net.pk
+edu.pk
+org.pk
+fam.pk
+biz.pk
+web.pk
+gov.pk
+gob.pk
+gok.pk
+gon.pk
+gop.pk
+gos.pk
+info.pk
+
+// pl http://www.dns.pl/english/index.html
+// confirmed on 26.09.2014 from Bogna Tchórzewska <pa...@dns.pl>
+pl
+com.pl
+net.pl
+org.pl
+info.pl
+waw.pl
+gov.pl
+// pl functional domains (http://www.dns.pl/english/index.html)
+aid.pl
+agro.pl
+atm.pl
+auto.pl
+biz.pl
+edu.pl
+gmina.pl
+gsm.pl
+mail.pl
+miasta.pl
+media.pl
+mil.pl
+nieruchomosci.pl
+nom.pl
+pc.pl
+powiat.pl
+priv.pl
+realestate.pl
+rel.pl
+sex.pl
+shop.pl
+sklep.pl
+sos.pl
+szkola.pl
+targi.pl
+tm.pl
+tourism.pl
+travel.pl
+turystyka.pl
+// Government domains (administred by ippt.gov.pl)
+uw.gov.pl
+um.gov.pl
+ug.gov.pl
+upow.gov.pl
+starostwo.gov.pl
+so.gov.pl
+sr.gov.pl
+po.gov.pl
+pa.gov.pl
+// pl regional domains (http://www.dns.pl/english/index.html)
+augustow.pl
+babia-gora.pl
+bedzin.pl
+beskidy.pl
+bialowieza.pl
+bialystok.pl
+bielawa.pl
+bieszczady.pl
+boleslawiec.pl
+bydgoszcz.pl
+bytom.pl
+cieszyn.pl
+czeladz.pl
+czest.pl
+dlugoleka.pl
+elblag.pl
+elk.pl
+glogow.pl
+gniezno.pl
+gorlice.pl
+grajewo.pl
+ilawa.pl
+jaworzno.pl
+jelenia-gora.pl
+jgora.pl
+kalisz.pl
+kazimierz-dolny.pl
+karpacz.pl
+kartuzy.pl
+kaszuby.pl
+katowice.pl
+kepno.pl
+ketrzyn.pl
+klodzko.pl
+kobierzyce.pl
+kolobrzeg.pl
+konin.pl
+konskowola.pl
+kutno.pl
+lapy.pl
+lebork.pl
+legnica.pl
+lezajsk.pl
+limanowa.pl
+lomza.pl
+lowicz.pl
+lubin.pl
+lukow.pl
+malbork.pl
+malopolska.pl
+mazowsze.pl
+mazury.pl
+mielec.pl
+mielno.pl
+mragowo.pl
+naklo.pl
+nowaruda.pl
+nysa.pl
+olawa.pl
+olecko.pl
+olkusz.pl
+olsztyn.pl
+opoczno.pl
+opole.pl
+ostroda.pl
+ostroleka.pl
+ostrowiec.pl
+ostrowwlkp.pl
+pila.pl
+pisz.pl
+podhale.pl
+podlasie.pl
+polkowice.pl
+pomorze.pl
+pomorskie.pl
+prochowice.pl
+pruszkow.pl
+przeworsk.pl
+pulawy.pl
+radom.pl
+rawa-maz.pl
+rybnik.pl
+rzeszow.pl
+sanok.pl
+sejny.pl
+slask.pl
+slupsk.pl
+sosnowiec.pl
+stalowa-wola.pl
+skoczow.pl
+starachowice.pl
+stargard.pl
+suwalki.pl
+swidnica.pl
+swiebodzin.pl
+swinoujscie.pl
+szczecin.pl
+szczytno.pl
+tarnobrzeg.pl
+tgory.pl
+turek.pl
+tychy.pl
+ustka.pl
+walbrzych.pl
+warmia.pl
+warszawa.pl
+wegrow.pl
+wielun.pl
+wlocl.pl
+wloclawek.pl
+wodzislaw.pl
+wolomin.pl
+wroclaw.pl
+zachpomor.pl
+zagan.pl
+zarow.pl
+zgora.pl
+zgorzelec.pl
+
+// pm : http://www.afnic.fr/medias/documents/AFNIC-naming-policy2012.pdf
+pm
+
+// pn : http://www.government.pn/PnRegistry/policies.htm
+pn
+gov.pn
+co.pn
+org.pn
+edu.pn
+net.pn
+
+// post : http://en.wikipedia.org/wiki/.post
+post
+
+// pr : http://www.nic.pr/index.asp?f=1
+pr
+com.pr
+net.pr
+org.pr
+gov.pr
+edu.pr
+isla.pr
+pro.pr
+biz.pr
+info.pr
+name.pr
+// these aren't mentioned on nic.pr, but on http://en.wikipedia.org/wiki/.pr
+est.pr
+prof.pr
+ac.pr
+
+// pro : http://www.nic.pro/support_faq.htm
+pro
+aca.pro
+bar.pro
+cpa.pro
+jur.pro
+law.pro
+med.pro
+eng.pro
+
+// ps : http://en.wikipedia.org/wiki/.ps
+// http://www.nic.ps/registration/policy.html#reg
+ps
+edu.ps
+gov.ps
+sec.ps
+plo.ps
+com.ps
+org.ps
+net.ps
+
+// pt : http://online.dns.pt/dns/start_dns
+pt
+net.pt
+gov.pt
+org.pt
+edu.pt
+int.pt
+publ.pt
+com.pt
+nome.pt
+
+// pw : http://en.wikipedia.org/wiki/.pw
+pw
+co.pw
+ne.pw
+or.pw
+ed.pw
+go.pw
+belau.pw
+
+// py : http://www.nic.py/pautas.html#seccion_9
+// Confirmed by registry 2012-10-03
+py
+com.py
+coop.py
+edu.py
+gov.py
+mil.py
+net.py
+org.py
+
+// qa : http://domains.qa/en/
+qa
+com.qa
+edu.qa
+gov.qa
+mil.qa
+name.qa
+net.qa
+org.qa
+sch.qa
+
+// re : http://www.afnic.re/obtenir/chartes/nommage-re/annexe-descriptifs
+re
+com.re
+asso.re
+nom.re
+
+// ro : http://www.rotld.ro/
+ro
+com.ro
+org.ro
+tm.ro
+nt.ro
+nom.ro
+info.ro
+rec.ro
+arts.ro
+firm.ro
+store.ro
+www.ro
+
+// rs : http://en.wikipedia.org/wiki/.rs
+rs
+co.rs
+org.rs
+edu.rs
+ac.rs
+gov.rs
+in.rs
+
+// ru : http://www.cctld.ru/ru/docs/aktiv_8.php
+// Industry domains
+ru
+ac.ru
+com.ru
+edu.ru
+int.ru
+net.ru
+org.ru
+pp.ru
+// Geographical domains
+adygeya.ru
+altai.ru
+amur.ru
+arkhangelsk.ru
+astrakhan.ru
+bashkiria.ru
+belgorod.ru
+bir.ru
+bryansk.ru
+buryatia.ru
+cbg.ru
+chel.ru
+chelyabinsk.ru
+chita.ru
+chukotka.ru
+chuvashia.ru
+dagestan.ru
+dudinka.ru
+e-burg.ru
+grozny.ru
+irkutsk.ru
+ivanovo.ru
+izhevsk.ru
+jar.ru
+joshkar-ola.ru
+kalmykia.ru
+kaluga.ru
+kamchatka.ru
+karelia.ru
+kazan.ru
+kchr.ru
+kemerovo.ru
+khabarovsk.ru
+khakassia.ru
+khv.ru
+kirov.ru
+koenig.ru
+komi.ru
+kostroma.ru
+krasnoyarsk.ru
+kuban.ru
+kurgan.ru
+kursk.ru
+lipetsk.ru
+magadan.ru
+mari.ru
+mari-el.ru
+marine.ru
+mordovia.ru
+// mosreg.ru  Bug 1090800 - removed at request of Aleksey Konstantinov <ko...@mosreg.ru>
+msk.ru
+murmansk.ru
+nalchik.ru
+nnov.ru
+nov.ru
+novosibirsk.ru
+nsk.ru
+omsk.ru
+orenburg.ru
+oryol.ru
+palana.ru
+penza.ru
+perm.ru
+ptz.ru
+rnd.ru
+ryazan.ru
+sakhalin.ru
+samara.ru
+saratov.ru
+simbirsk.ru
+smolensk.ru
+spb.ru
+stavropol.ru
+stv.ru
+surgut.ru
+tambov.ru
+tatarstan.ru
+tom.ru
+tomsk.ru
+tsaritsyn.ru
+tsk.ru
+tula.ru
+tuva.ru
+tver.ru
+tyumen.ru
+udm.ru
+udmurtia.ru
+ulan-ude.ru
+vladikavkaz.ru
+vladimir.ru
+vladivostok.ru
+volgograd.ru
+vologda.ru
+voronezh.ru
+vrn.ru
+vyatka.ru
+yakutia.ru
+yamal.ru
+yaroslavl.ru
+yekaterinburg.ru
+yuzhno-sakhalinsk.ru
+// More geographical domains
+amursk.ru
+baikal.ru
+cmw.ru
+fareast.ru
+jamal.ru
+kms.ru
+k-uralsk.ru
+kustanai.ru
+kuzbass.ru
+magnitka.ru
+mytis.ru
+nakhodka.ru
+nkz.ru
+norilsk.ru
+oskol.ru
+pyatigorsk.ru
+rubtsovsk.ru
+snz.ru
+syzran.ru
+vdonsk.ru
+zgrad.ru
+// State domains
+gov.ru
+mil.ru
+// Technical domains
+test.ru
+
+// rw : http://www.nic.rw/cgi-bin/policy.pl
+rw
+gov.rw
+net.rw
+edu.rw
+ac.rw
+com.rw
+co.rw
+int.rw
+mil.rw
+gouv.rw
+
+// sa : http://www.nic.net.sa/
+sa
+com.sa
+net.sa
+org.sa
+gov.sa
+med.sa
+pub.sa
+edu.sa
+sch.sa
+
+// sb : http://www.sbnic.net.sb/
+// Submitted by registry <le...@telekom.com.sb> 2008-06-08
+sb
+com.sb
+edu.sb
+gov.sb
+net.sb
+org.sb
+
+// sc : http://www.nic.sc/
+sc
+com.sc
+gov.sc
+net.sc
+org.sc
+edu.sc
+
+// sd : http://www.isoc.sd/sudanic.isoc.sd/billing_pricing.htm
+// Submitted by registry <ad...@isoc.sd> 2008-06-17
+sd
+com.sd
+net.sd
+org.sd
+edu.s

<TRUNCATED>


[05/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/enrichment/EnrichmentKey.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/enrichment/EnrichmentKey.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/enrichment/EnrichmentKey.java
deleted file mode 100644
index b688ad3..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/enrichment/EnrichmentKey.java
+++ /dev/null
@@ -1,119 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.hbase.converters.enrichment;
-
-import com.google.common.base.Joiner;
-import com.google.common.base.Splitter;
-import com.google.common.collect.Iterables;
-import com.google.common.hash.HashFunction;
-import com.google.common.hash.Hasher;
-import com.google.common.hash.Hashing;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.metron.reference.lookup.LookupKey;
-
-import java.io.*;
-
-public class EnrichmentKey implements LookupKey {
-  private static final int SEED = 0xDEADBEEF;
-  private static final int HASH_PREFIX_SIZE=16;
-  ThreadLocal<HashFunction> hFunction= new ThreadLocal<HashFunction>() {
-    @Override
-    protected HashFunction initialValue() {
-      return Hashing.murmur3_128(SEED);
-    }
-  };
-
-  public String indicator;
-  public String type;
-
-  public EnrichmentKey() {
-
-  }
-  public EnrichmentKey(String type, String indicator) {
-    this.indicator = indicator;
-    this.type = type;
-  }
-
-  private byte[] typedIndicatorToBytes() throws IOException {
-    ByteArrayOutputStream baos = new ByteArrayOutputStream();
-    DataOutputStream w = new DataOutputStream(baos);
-    w.writeUTF(type);
-    w.writeUTF(indicator);
-    w.flush();
-    return baos.toByteArray();
-  }
-
-  @Override
-  public byte[] toBytes() {
-    byte[] indicatorBytes = new byte[0];
-    try {
-      indicatorBytes = typedIndicatorToBytes();
-    } catch (IOException e) {
-      throw new RuntimeException("Unable to convert type and indicator to bytes", e);
-    }
-    Hasher hasher = hFunction.get().newHasher();
-    hasher.putBytes(Bytes.toBytes(indicator));
-    byte[] prefix = hasher.hash().asBytes();
-    byte[] val = new byte[indicatorBytes.length + prefix.length];
-    int offset = 0;
-    System.arraycopy(prefix, 0, val, offset, prefix.length);
-    offset += prefix.length;
-    System.arraycopy(indicatorBytes, 0, val, offset, indicatorBytes.length);
-    return val;
-  }
-
-  @Override
-  public void fromBytes(byte[] row) {
-    ByteArrayInputStream baos = new ByteArrayInputStream(row);
-    baos.skip(HASH_PREFIX_SIZE);
-    DataInputStream w = new DataInputStream(baos);
-    try {
-      type = w.readUTF();
-      indicator = w.readUTF();
-    } catch (IOException e) {
-      throw new RuntimeException("Unable to convert type and indicator from bytes", e);
-    }
-  }
-
-  @Override
-  public boolean equals(Object o) {
-    if (this == o) return true;
-    if (o == null || getClass() != o.getClass()) return false;
-
-    EnrichmentKey that = (EnrichmentKey) o;
-
-    if (indicator != null ? !indicator.equals(that.indicator) : that.indicator != null) return false;
-    return type != null ? type.equals(that.type) : that.type == null;
-
-  }
-
-  @Override
-  public int hashCode() {
-    int result = indicator != null ? indicator.hashCode() : 0;
-    result = 31 * result + (type != null ? type.hashCode() : 0);
-    return result;
-  }
-
-  @Override
-  public String toString() {
-    return "EnrichmentKey{" +
-            "indicator='" + indicator + '\'' +
-            ", type='" + type + '\'' +
-            '}';
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/enrichment/EnrichmentValue.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/enrichment/EnrichmentValue.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/enrichment/EnrichmentValue.java
deleted file mode 100644
index f733f8e..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/converters/enrichment/EnrichmentValue.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.hbase.converters.enrichment;
-
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.metron.hbase.converters.AbstractConverter;
-import org.apache.metron.reference.lookup.LookupValue;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.type.TypeReference;
-
-import java.io.IOException;
-import java.util.Map;
-
-public class EnrichmentValue implements LookupValue {
-   private static final ThreadLocal<ObjectMapper> _mapper = new ThreadLocal<ObjectMapper>() {
-             @Override
-             protected ObjectMapper initialValue() {
-                return new ObjectMapper();
-             }
-    };
-    public static final String VALUE_COLUMN_NAME = "v";
-    public static final byte[] VALUE_COLUMN_NAME_B = Bytes.toBytes(VALUE_COLUMN_NAME);
-
-    private Map<String, String> metadata = null;
-
-    public EnrichmentValue()
-    {
-
-    }
-
-    public EnrichmentValue(Map<String, String> metadata) {
-        this.metadata = metadata;
-    }
-
-
-
-    public Map<String, String> getMetadata() {
-        return metadata;
-    }
-
-    @Override
-    public Iterable<Map.Entry<byte[], byte[]>> toColumns() {
-        return AbstractConverter.toEntries( VALUE_COLUMN_NAME_B, Bytes.toBytes(valueToString(metadata))
-                                  );
-    }
-
-    @Override
-    public void fromColumns(Iterable<Map.Entry<byte[], byte[]>> values) {
-        for(Map.Entry<byte[], byte[]> cell : values) {
-            if(Bytes.equals(cell.getKey(), VALUE_COLUMN_NAME_B)) {
-                metadata = stringToValue(Bytes.toString(cell.getValue()));
-            }
-        }
-    }
-    public Map<String, String> stringToValue(String s){
-        try {
-            return _mapper.get().readValue(s, new TypeReference<Map<String, String>>(){});
-        } catch (IOException e) {
-            throw new RuntimeException("Unable to convert string to metadata: " + s);
-        }
-    }
-    public String valueToString(Map<String, String> value) {
-        try {
-            return _mapper.get().writeValueAsString(value);
-        } catch (IOException e) {
-            throw new RuntimeException("Unable to convert metadata to string: " + value);
-        }
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
-
-        EnrichmentValue that = (EnrichmentValue) o;
-
-        return getMetadata() != null ? getMetadata().equals(that.getMetadata()) : that.getMetadata() == null;
-
-    }
-
-    @Override
-    public int hashCode() {
-        return getMetadata() != null ? getMetadata().hashCode() : 0;
-    }
-
-    @Override
-    public String toString() {
-        return "EnrichmentValue{" +
-                "metadata=" + metadata +
-                '}';
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/lookup/EnrichmentLookup.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/lookup/EnrichmentLookup.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/lookup/EnrichmentLookup.java
deleted file mode 100644
index 059d6a6..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/hbase/lookup/EnrichmentLookup.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.hbase.lookup;
-
-import com.google.common.collect.ImmutableList;
-import org.apache.hadoop.hbase.client.Get;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.metron.hbase.converters.HbaseConverter;
-import org.apache.metron.hbase.converters.enrichment.EnrichmentConverter;
-import org.apache.metron.hbase.converters.enrichment.EnrichmentKey;
-import org.apache.metron.hbase.converters.enrichment.EnrichmentValue;
-import org.apache.metron.reference.lookup.Lookup;
-import org.apache.metron.reference.lookup.LookupKV;
-import org.apache.metron.reference.lookup.accesstracker.AccessTracker;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-
-public class EnrichmentLookup extends Lookup<HTableInterface, EnrichmentKey, LookupKV<EnrichmentKey,EnrichmentValue>> implements AutoCloseable {
-
-  public static class Handler implements org.apache.metron.reference.lookup.handler.Handler<HTableInterface,EnrichmentKey,LookupKV<EnrichmentKey,EnrichmentValue>> {
-    String columnFamily;
-    HbaseConverter<EnrichmentKey, EnrichmentValue> converter = new EnrichmentConverter();
-    public Handler(String columnFamily) {
-      this.columnFamily = columnFamily;
-    }
-    @Override
-    public boolean exists(EnrichmentKey key, HTableInterface table, boolean logAccess) throws IOException {
-      return table.exists(converter.toGet(columnFamily, key));
-    }
-
-    @Override
-    public LookupKV<EnrichmentKey, EnrichmentValue> get(EnrichmentKey key, HTableInterface table, boolean logAccess) throws IOException {
-      return converter.fromResult(table.get(converter.toGet(columnFamily, key)), columnFamily);
-    }
-
-    private List<Get> keysToGets(Iterable<EnrichmentKey> keys) {
-      List<Get> ret = new ArrayList<>();
-      for(EnrichmentKey key : keys) {
-        ret.add(converter.toGet(columnFamily, key));
-      }
-      return ret;
-    }
-
-    @Override
-    public Iterable<Boolean> exists(Iterable<EnrichmentKey> key, HTableInterface table, boolean logAccess) throws IOException {
-      List<Boolean> ret = new ArrayList<>();
-      for(boolean b : table.existsAll(keysToGets(key))) {
-        ret.add(b);
-      }
-      return ret;
-    }
-
-    @Override
-    public Iterable<LookupKV<EnrichmentKey, EnrichmentValue>> get( Iterable<EnrichmentKey> keys
-                                                                 , HTableInterface table
-                                                                 , boolean logAccess
-                                                                 ) throws IOException
-    {
-      List<LookupKV<EnrichmentKey, EnrichmentValue>> ret = new ArrayList<>();
-      for(Result result : table.get(keysToGets(keys))) {
-        ret.add(converter.fromResult(result, columnFamily));
-      }
-      return ret;
-    }
-
-
-    @Override
-    public void close() throws Exception {
-
-    }
-  }
-  private HTableInterface table;
-  public EnrichmentLookup(HTableInterface table, String columnFamily, AccessTracker tracker) {
-    this.table = table;
-    this.setLookupHandler(new Handler(columnFamily));
-    this.setAccessTracker(tracker);
-  }
-
-  public HTableInterface getTable() {
-    return table;
-  }
-
-  @Override
-  public void close() throws Exception {
-    super.close();
-    table.close();
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/helpers/services/PcapServiceCli.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/helpers/services/PcapServiceCli.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/helpers/services/PcapServiceCli.java
deleted file mode 100644
index 79b8067..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/helpers/services/PcapServiceCli.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.helpers.services;
-
-import org.apache.commons.cli.BasicParser;
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
-
-public class PcapServiceCli {
-
-	private String[] args = null;
-	private Options options = new Options();
-
-	int port = 8081;
-	String uri = "/pcapGetter";
-
-	public int getPort() {
-		return port;
-	}
-
-	public void setPort(int port) {
-		this.port = port;
-	}
-
-	public String getUri() {
-		return uri;
-	}
-
-	public void setUri(String uri) {
-		this.uri = uri;
-	}
-
-	public PcapServiceCli(String[] args) {
-
-		this.args = args;
-
-		Option help = new Option("h", "Display help menue");
-		options.addOption(help);
-		options.addOption(
-				"port",
-				true,
-				"OPTIONAL ARGUMENT [portnumber] If this argument sets the port for starting the service.  If this argument is not set the port will start on defaut port 8081");
-		options.addOption(
-				"endpoint_uri",
-				true,
-				"OPTIONAL ARGUMENT [/uri/to/service] This sets the URI for the service to be hosted.  The default URI is /pcapGetter");
-	}
-
-	public void parse() {
-		CommandLineParser parser = new BasicParser();
-
-		CommandLine cmd = null;
-
-		try {
-			cmd = parser.parse(options, args);
-		} catch (ParseException e1) {
-
-			e1.printStackTrace();
-		}
-
-		if (cmd.hasOption("h"))
-			help();
-
-		if (cmd.hasOption("port")) {
-
-			try {
-				port = Integer.parseInt(cmd.getOptionValue("port").trim());
-			} catch (Exception e) {
-
-				System.out.println("[Metron] Invalid value for port entered");
-				help();
-			}
-		}
-		if (cmd.hasOption("endpoint_uri")) {
-
-			try {
-
-				if (uri == null || uri.equals(""))
-					throw new Exception("invalid uri");
-
-				uri = cmd.getOptionValue("uri").trim();
-
-				if (uri.charAt(0) != '/')
-					uri = "/" + uri;
-
-				if (uri.charAt(uri.length()) == '/')
-					uri = uri.substring(0, uri.length() - 1);
-
-			} catch (Exception e) {
-				System.out.println("[Metron] Invalid URI entered");
-				help();
-			}
-		}
-
-	}
-
-	private void help() {
-		// This prints out some help
-		HelpFormatter formater = new HelpFormatter();
-
-		formater.printHelp("Topology Options:", options);
-
-		// System.out
-		// .println("[Metron] Example usage: \n storm jar Metron-Topologies-0.3BETA-SNAPSHOT.jar org.apache.metron.topology.Bro -local_mode true -config_path Metron_Configs/ -generator_spout true");
-
-		System.exit(0);
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/helpers/topology/Cli.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/helpers/topology/Cli.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/helpers/topology/Cli.java
deleted file mode 100644
index 1c3940c..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/helpers/topology/Cli.java
+++ /dev/null
@@ -1,203 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.helpers.topology;
-
-import java.io.File;
-
-import org.apache.commons.cli.BasicParser;
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
-
-public class Cli {
-
-	private String[] args = null;
-	private Options options = new Options();
-
-	private String path = null;
-	private boolean debug = true;
-	private boolean local_mode = true;
-	private boolean generator_spout = false;
-
-	public boolean isGenerator_spout() {
-		return generator_spout;
-	}
-
-	public void setGenerator_spout(boolean generator_spout) {
-		this.generator_spout = generator_spout;
-	}
-
-	public String getPath() {
-		return path;
-	}
-
-	public void setPath(String path) {
-		this.path = path;
-	}
-
-	public boolean isDebug() {
-		return debug;
-	}
-
-	public void setDebug(boolean debug) {
-		this.debug = debug;
-	}
-
-	public boolean isLocal_mode() {
-		return local_mode;
-	}
-
-	public void setLocal_mode(boolean local_mode) {
-		this.local_mode = local_mode;
-	}
-
-	public Cli(String[] args) {
-
-		this.args = args;
-
-		Option help = new Option("h", "Display help menue");
-		options.addOption(help);
-		options.addOption(
-				"config_path",
-				true,
-				"OPTIONAL ARGUMENT [/path/to/configs] Path to configuration folder. If not provided topology will initialize with default configs");
-		options.addOption(
-				"local_mode",
-				true,
-				"REQUIRED ARGUMENT [true|false] Local mode or cluster mode.  If set to true the topology will run in local mode.  If set to false the topology will be deployed to Storm nimbus");
-		options.addOption(
-				"debug",
-				true,
-				"OPTIONAL ARGUMENT [true|false] Storm debugging enabled.  Default value is true");
-		options.addOption(
-				"generator_spout",
-				true,
-				"REQUIRED ARGUMENT [true|false] Turn on test generator spout.  Default is set to false.  If test generator spout is turned on then kafka spout is turned off.  Instead the generator spout will read telemetry from file and ingest it into a topology");
-	}
-
-	public void parse() {
-		CommandLineParser parser = new BasicParser();
-
-		CommandLine cmd = null;
-		try {
-			cmd = parser.parse(options, args);
-
-			if (cmd.hasOption("h"))
-				help();
-
-			if (cmd.hasOption("local_mode")) {
-
-				String local_value = cmd.getOptionValue("local_mode").trim()
-						.toLowerCase();
-
-				if (local_value.equals("true"))
-					local_mode = true;
-
-				else if (local_value.equals("false"))
-					local_mode = false;
-				else {
-					System.out
-							.println("[Metron] ERROR: Invalid value for local mode");
-					System.out
-							.println("[Metron] ERROR: Using cli argument -local_mode="
-									+ cmd.getOptionValue("local_mode"));
-					help();
-				}
-			} else {
-				System.out
-						.println("[Metron] ERROR: Invalid value for local mode");
-				help();
-			}
-			if (cmd.hasOption("generator_spout")) {
-
-				String local_value = cmd.getOptionValue("generator_spout").trim()
-						.toLowerCase();
-
-				if (local_value.equals("true"))
-					generator_spout = true;
-
-				else if (local_value.equals("false"))
-					generator_spout = false;
-				else {
-					System.out
-							.println("[Metron] ERROR: Invalid value for local generator_spout");
-					System.out
-							.println("[Metron] ERROR: Using cli argument -generator_spout="
-									+ cmd.getOptionValue("generator_spout"));
-					help();
-				}
-			} else {
-				System.out
-						.println("[Metron] ERROR: Invalid value for generator_spout");
-				help();
-			}
-			if (cmd.hasOption("config_path")) {
-
-				path = cmd.getOptionValue("config_path").trim();
-
-				File file = new File(path);
-
-				if (!file.isDirectory() || !file.exists()) {
-					System.out
-							.println("[Metron] ERROR: Invalid settings directory name given");
-					System.out
-							.println("[Metron] ERROR: Using cli argument -config_path="
-									+ cmd.getOptionValue("config_path"));
-					help();
-				}
-			}
-
-			if (cmd.hasOption("debug")) {
-				String debug_value = cmd.getOptionValue("debug");
-
-				if (debug_value.equals("true"))
-					debug = true;
-				else if (debug_value.equals("false"))
-					debug = false;
-				else {
-					System.out
-							.println("[Metron] ERROR: Invalid value for debug_value");
-					System.out
-							.println("[Metron] ERROR: Using cli argument -debug_value="
-									+ cmd.getOptionValue("debug_value"));
-					help();
-				}
-			}
-
-		} catch (ParseException e) {
-			System.out
-					.println("[Metron] ERROR: Failed to parse command line arguments");
-			help();
-		}
-	}
-
-	private void help() {
-		// This prints out some help
-		HelpFormatter formater = new HelpFormatter();
-
-		formater.printHelp("Topology Options:", options);
-
-		System.out
-				.println("[Metron] Example usage: \n storm jar Metron-Topologies-0.3BETA-SNAPSHOT.jar org.apache.metron.topology.Bro -local_mode true -config_path Metron_Configs/ -generator_spout true");
-
-		System.exit(0);
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/helpers/topology/ErrorUtils.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/helpers/topology/ErrorUtils.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/helpers/topology/ErrorUtils.java
deleted file mode 100644
index a1b7ccc..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/helpers/topology/ErrorUtils.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.helpers.topology;
-
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.tuple.Values;
-import org.apache.commons.lang.exception.ExceptionUtils;
-import org.apache.metron.Constants;
-import org.json.simple.JSONObject;
-
-public class ErrorUtils {
-
-	@SuppressWarnings("unchecked")
-	public static JSONObject generateErrorMessage(String message, Throwable t)
-	{
-		JSONObject error_message = new JSONObject();
-		
-		/*
-		 * Save full stack trace in object.
-		 */
-		String stackTrace = ExceptionUtils.getStackTrace(t);
-		
-		String exception = t.toString();
-		
-		error_message.put("time", System.currentTimeMillis());
-		try {
-			error_message.put("hostname", InetAddress.getLocalHost().getHostName());
-		} catch (UnknownHostException ex) {
-			// TODO Auto-generated catch block
-			ex.printStackTrace();
-		}
-		
-		error_message.put("message", message);
-		error_message.put(Constants.SENSOR_TYPE, "error");
-		error_message.put("exception", exception);
-		error_message.put("stack", stackTrace);
-		
-		return error_message;
-	}
-
-	public static void handleError(OutputCollector collector, Throwable t, String errorStream) {
-		JSONObject error = ErrorUtils.generateErrorMessage(t.getMessage(), t);
-		collector.emit(errorStream, new Values(error));
-		collector.reportError(t);
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/helpers/topology/SettingsLoader.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/helpers/topology/SettingsLoader.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/helpers/topology/SettingsLoader.java
deleted file mode 100644
index 6977bfa..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/helpers/topology/SettingsLoader.java
+++ /dev/null
@@ -1,166 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.helpers.topology;
-
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-
-import org.apache.commons.configuration.Configuration;
-import org.apache.commons.configuration.ConfigurationException;
-import org.apache.commons.configuration.PropertiesConfiguration;
-import org.apache.commons.configuration.XMLConfiguration;
-import org.json.simple.JSONArray;
-import org.json.simple.JSONObject;
-import org.json.simple.parser.JSONParser;
-import org.json.simple.parser.ParseException;
-
-public class SettingsLoader {
-
-	@SuppressWarnings("unchecked")
-	public static JSONObject loadEnvironmentIdnetifier(String config_path)
-			throws ConfigurationException {
-		Configuration config = new PropertiesConfiguration(config_path);
-
-		String customer = config.getString("customer.id", "unknown");
-		String datacenter = config.getString("datacenter.id", "unknown");
-		String instance = config.getString("instance.id", "unknown");
-
-		JSONObject identifier = new JSONObject();
-		identifier.put("customer", customer);
-		identifier.put("datacenter", datacenter);
-		identifier.put("instance", instance);
-
-		return identifier;
-	}
-
-	@SuppressWarnings("unchecked")
-	public static JSONObject loadTopologyIdnetifier(String config_path)
-			throws ConfigurationException {
-		Configuration config = new PropertiesConfiguration(config_path);
-
-		String topology = config.getString("topology.id", "unknown");
-		String instance = config.getString("instance.id", "unknown");
-
-		JSONObject identifier = new JSONObject();
-		identifier.put("topology", topology);
-		identifier.put("topology_instance", instance);
-
-		return identifier;
-	}
-	
-
-	public static String generateTopologyName(JSONObject env, JSONObject topo) {
-
-		return (env.get("customer") + "_" + env.get("datacenter") + "_"
-				+ env.get("instance") + "_" + topo.get("topology") + "_" + topo.get("topology_instance"));
-	}
-	
-	@SuppressWarnings("unchecked")
-	public static JSONObject generateAlertsIdentifier(JSONObject env, JSONObject topo)
-	{
-		JSONObject identifier = new JSONObject();
-		identifier.put("environment", env);
-		identifier.put("topology", topo);
-		
-		return identifier;
-	}
-
-	public static Map<String, JSONObject> loadRegexAlerts(String config_path)
-			throws ConfigurationException, ParseException {
-		XMLConfiguration alert_rules = new XMLConfiguration();
-		alert_rules.setDelimiterParsingDisabled(true);
-		alert_rules.load(config_path);
-
-		//int number_of_rules = alert_rules.getList("rule.pattern").size();
-
-		String[] patterns = alert_rules.getStringArray("rule.pattern");
-		String[] alerts = alert_rules.getStringArray("rule.alert");
-
-		JSONParser pr = new JSONParser();
-		Map<String, JSONObject> rules = new HashMap<String, JSONObject>();
-
-		for (int i = 0; i < patterns.length; i++)
-			rules.put(patterns[i], (JSONObject) pr.parse(alerts[i]));
-
-		return rules;
-	}
-
-	public static Map<String, JSONObject> loadKnownHosts(String config_path)
-			throws ConfigurationException, ParseException {
-		Configuration hosts = new PropertiesConfiguration(config_path);
-
-		Iterator<String> keys = hosts.getKeys();
-		Map<String, JSONObject> known_hosts = new HashMap<String, JSONObject>();
-		JSONParser parser = new JSONParser();
-
-		while (keys.hasNext()) {
-			String key = keys.next().trim();
-			JSONArray value = (JSONArray) parser.parse(hosts.getProperty(key)
-					.toString());
-			known_hosts.put(key, (JSONObject) value.get(0));
-		}
-
-		return known_hosts;
-	}
-
-	public static void printConfigOptions(PropertiesConfiguration config, String path_fragment)
-	{
-		Iterator<String> itr = config.getKeys();
-		
-		while(itr.hasNext())
-		{
-			String key = itr.next();
-			
-			if(key.contains(path_fragment))
-			{
-				
-				System.out.println("[Metron] Key: " + key + " -> " + config.getString(key));
-			}
-		}
-
-	}
-	
-	public static void printOptionalSettings(Map<String, String> settings)
-	{
-		for(String setting: settings.keySet())
-		{
-			System.out.println("[Metron] Optional Setting: " + setting + " -> " +settings.get(setting));
-		}
-
-	}
-	
-	public static Map<String, String> getConfigOptions(PropertiesConfiguration config, String path_fragment)
-	{
-		Iterator<String> itr = config.getKeys();
-		Map<String, String> settings = new HashMap<String, String>();
-		
-		while(itr.hasNext())
-		{
-			String key = itr.next();
-			
-			if(key.contains(path_fragment))
-			{
-				String tmp_key = key.replace(path_fragment, "");
-				settings.put(tmp_key, config.getString(key));
-			}
-		}
-
-		return settings;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/index/interfaces/IndexAdapter.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/index/interfaces/IndexAdapter.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/index/interfaces/IndexAdapter.java
deleted file mode 100644
index 3c2781a..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/index/interfaces/IndexAdapter.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.index.interfaces;
-
-import java.util.Map;
-
-import org.json.simple.JSONObject;
-
-public interface IndexAdapter {
-
-	boolean initializeConnection(String ip, int port, String cluster_name,
-			String index_name, String document_name, int bulk, String date_format) throws Exception;
-
-	int bulkIndex(JSONObject raw_message);
-
-	void setOptionalSettings(Map<String, String> settings);
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ISEParser.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ISEParser.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ISEParser.java
deleted file mode 100644
index 13fffb9..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ISEParser.java
+++ /dev/null
@@ -1,661 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/* Generated By:JavaCC: Do not edit this line. ISEParser.java */
-package org.apache.metron.ise.parser;
-import java.io.*;
-import java.util.*;
-
-import org.json.simple.*;
-
-/**
-* Basic ISE data parser generated by JavaCC. 
-*/
-public class ISEParser implements Serializable, ISEParserConstants {
- // private boolean nativeNumbers = false;
-
-	private static final long serialVersionUID = -2531656825360044979L;
-
-	public ISEParser()
-	  { //do nothing
-	  }
-
-  public ISEParser(String input)
-  {
-    this (new StringReader(input));
-  }
-
-  /**
-	* Parses a ISE String into a JSON object {@code Map}.
-	*/
-  public JSONObject parseObject() throws ParseException
-  {
-    JSONObject toReturn = object();
-    if (!ensureEOF()) throw new IllegalStateException("Expected EOF, but still had content to parse");
-    return toReturn;
-  }
-
-  @SuppressWarnings("unused")
-final public boolean ensureEOF() throws ParseException {
-    switch (jj_nt.kind) {
-    case COMMA:
-      jj_consume_token(COMMA);
-      break;
-    default:
-      jj_la1[0] = jj_gen;
-      ;
-    }
-    jj_consume_token(0);
-    {if (true) return true;}
-    throw new Error("Missing return statement in function");
-  }
-
-  @SuppressWarnings({ "unchecked", "unused" })
-final public JSONObject innerMap() throws ParseException {
-  final JSONObject json = new JSONObject();
-  String key;
-  Object value;
-    key = objectKey();
-    jj_consume_token(EQUALS);
-    value = value();
-    json.put(key, value);
-    key = null;
-    value = null;
-    label_1:
-    while (true) {
-      switch (jj_nt.kind) {
-      case SLASH:
-        ;
-        break;
-      default:
-        jj_la1[1] = jj_gen;
-        break label_1;
-      }
-      jj_consume_token(SLASH);
-      jj_consume_token(COMMA);
-      key = objectKey();
-      jj_consume_token(EQUALS);
-      value = value();
-      json.put(key, value);
-      key = null;
-      value = null;
-    }
-    {if (true) return json;}
-    throw new Error("Missing return statement in function");
-  }
-
-  @SuppressWarnings({ "unused", "unchecked" })
-final public JSONObject object() throws ParseException {
-  final JSONObject json = new JSONObject();
-  String key;
-  Object value;
-    key = objectKey();
-    jj_consume_token(EQUALS);
-    value = value();
-    json.put(key, value);
-    key = null;
-    value = null;
-    label_2:
-    while (true) {
-      if (jj_2_1(2)) {
-        ;
-      } else {
-        break label_2;
-      }
-      jj_consume_token(COMMA);
-      key = objectKey();
-      jj_consume_token(EQUALS);
-      value = value();
-        json.put(key, value);
-        key = null;
-        value = null;
-    }
-    {if (true) return json;}
-    throw new Error("Missing return statement in function");
-  }
-
-  @SuppressWarnings("unused")
-final public String objectKey() throws ParseException {
-  String k;
-    k = string();
-    //  System.out.println("key == " + k);
-    {if (true) return k.trim();}
-    throw new Error("Missing return statement in function");
-  }
-
-  @SuppressWarnings({ "unused", "rawtypes" })
-final public Object value() throws ParseException {
-  Object x;
-  String eof = "EOF";
-  Map m = null;
-    if (jj_2_2(2147483647)) {
-      x = nullValue();
-    } else if (jj_2_3(2147483647)) {
-      x = innerMap();
-    } else {
-      switch (jj_nt.kind) {
-      case TAG:
-        x = tagString();
-        break;
-      default:
-        jj_la1[2] = jj_gen;
-        if (jj_2_4(2147483647)) {
-          x = blankValue();
-        } else if (jj_2_5(2147483647)) {
-          x = braced_string();
-        } else if (jj_2_6(2)) {
-          x = string();
-        } else {
-          jj_consume_token(-1);
-          throw new ParseException();
-        }
-      }
-    }
-    //  System.out.println("val == " + x);
-    //if (x instanceof Map) return "Map";
-    //return (String) x;
-    {if (true) return x;}
-    throw new Error("Missing return statement in function");
-  }
-
-  @SuppressWarnings("unused")
-final public String nullValue() throws ParseException {
-    {if (true) return null;}
-    throw new Error("Missing return statement in function");
-  }
-
-  @SuppressWarnings("unused")
-final public String tagString() throws ParseException {
-  String output = "(tag=0)";
-    jj_consume_token(TAG);
-    jj_consume_token(STRING_BODY);
-    {if (true) return output + token.image;}
-    throw new Error("Missing return statement in function");
-  }
-
-  @SuppressWarnings("unused")
-final public String blankValue() throws ParseException {
-    {if (true) return null;}
-    throw new Error("Missing return statement in function");
-  }
-
-  @SuppressWarnings("unused")
-final public String string() throws ParseException {
-  String s;
-    jj_consume_token(STRING_BODY);
-    {if (true) return token.image.trim();}
-    throw new Error("Missing return statement in function");
-  }
-
-  @SuppressWarnings("unused")
-final public String braced_string() throws ParseException {
-  String s;
-    jj_consume_token(BRACED_STRING);
-    //  System.out.println("braced == " + token.image);
-    s = token.image;
-    jj_consume_token(COMMA);
-    {if (true) return s.trim();}
-    throw new Error("Missing return statement in function");
-  }
-
-  private boolean jj_2_1(int xla) {
-    jj_la = xla; jj_lastpos = jj_scanpos = token;
-    try { return !jj_3_1(); }
-    catch(LookaheadSuccess ls) { return true; }
-    finally { jj_save(0, xla); }
-  }
-
-  private boolean jj_2_2(int xla) {
-    jj_la = xla; jj_lastpos = jj_scanpos = token;
-    try { return !jj_3_2(); }
-    catch(LookaheadSuccess ls) { return true; }
-    finally { jj_save(1, xla); }
-  }
-
-  private boolean jj_2_3(int xla) {
-    jj_la = xla; jj_lastpos = jj_scanpos = token;
-    try { return !jj_3_3(); }
-    catch(LookaheadSuccess ls) { return true; }
-    finally { jj_save(2, xla); }
-  }
-
-  private boolean jj_2_4(int xla) {
-    jj_la = xla; jj_lastpos = jj_scanpos = token;
-    try { return !jj_3_4(); }
-    catch(LookaheadSuccess ls) { return true; }
-    finally { jj_save(3, xla); }
-  }
-
-  private boolean jj_2_5(int xla) {
-    jj_la = xla; jj_lastpos = jj_scanpos = token;
-    try { return !jj_3_5(); }
-    catch(LookaheadSuccess ls) { return true; }
-    finally { jj_save(4, xla); }
-  }
-
-  private boolean jj_2_6(int xla) {
-    jj_la = xla; jj_lastpos = jj_scanpos = token;
-    try { return !jj_3_6(); }
-    catch(LookaheadSuccess ls) { return true; }
-    finally { jj_save(5, xla); }
-  }
-
-  private boolean jj_3_5() {
-    if (jj_3R_5()) return true;
-    return false;
-  }
-
-  private boolean jj_3_4() {
-    if (jj_scan_token(0)) return true;
-    return false;
-  }
-
-  private boolean jj_3R_5() {
-    if (jj_scan_token(BRACED_STRING)) return true;
-    if (jj_scan_token(COMMA)) return true;
-    return false;
-  }
-
-  private boolean jj_3_3() {
-    if (jj_3R_4()) return true;
-    return false;
-  }
-
-  private boolean jj_3R_4() {
-    if (jj_3R_3()) return true;
-    if (jj_scan_token(EQUALS)) return true;
-    if (jj_3R_7()) return true;
-    Token xsp;
-    while (true) {
-      xsp = jj_scanpos;
-      if (jj_3R_8()) { jj_scanpos = xsp; break; }
-    }
-    return false;
-  }
-
-  private boolean jj_3_2() {
-    if (jj_scan_token(COMMA)) return true;
-    return false;
-  }
-
-  private boolean jj_3_6() {
-    if (jj_3R_6()) return true;
-    return false;
-  }
-
-  private boolean jj_3_1() {
-    if (jj_scan_token(COMMA)) return true;
-    if (jj_3R_3()) return true;
-    return false;
-  }
-
-  private boolean jj_3R_13() {
-    if (jj_3R_5()) return true;
-    return false;
-  }
-
-  private boolean jj_3R_12() {
-    if (jj_3R_16()) return true;
-    return false;
-  }
-
-  private boolean jj_3R_11() {
-    if (jj_3R_15()) return true;
-    return false;
-  }
-
-  private boolean jj_3R_6() {
-    if (jj_scan_token(STRING_BODY)) return true;
-    return false;
-  }
-
-  private boolean jj_3R_10() {
-    if (jj_3R_4()) return true;
-    return false;
-  }
-
-  private boolean jj_3R_9() {
-    if (jj_3R_14()) return true;
-    return false;
-  }
-
-  private boolean jj_3R_7() {
-    Token xsp;
-    xsp = jj_scanpos;
-    if (jj_3R_9()) {
-    jj_scanpos = xsp;
-    if (jj_3R_10()) {
-    jj_scanpos = xsp;
-    if (jj_3R_11()) {
-    jj_scanpos = xsp;
-    if (jj_3R_12()) {
-    jj_scanpos = xsp;
-    if (jj_3R_13()) {
-    jj_scanpos = xsp;
-    if (jj_3_6()) return true;
-    }
-    }
-    }
-    }
-    }
-    return false;
-  }
-
-  private boolean jj_3R_16() {
-    return false;
-  }
-
-  private boolean jj_3R_15() {
-    if (jj_scan_token(TAG)) return true;
-    if (jj_scan_token(STRING_BODY)) return true;
-    return false;
-  }
-
-  private boolean jj_3R_3() {
-    if (jj_3R_6()) return true;
-    return false;
-  }
-
-  private boolean jj_3R_8() {
-    if (jj_scan_token(SLASH)) return true;
-    if (jj_scan_token(COMMA)) return true;
-    if (jj_3R_3()) return true;
-    if (jj_scan_token(EQUALS)) return true;
-    if (jj_3R_7()) return true;
-    return false;
-  }
-
-  private boolean jj_3R_14() {
-    return false;
-  }
-
-  /** Generated Token Manager. */
-  public ISEParserTokenManager token_source;
-  JavaCharStream jj_input_stream;
-  /** Current token. */
-  public Token token;
-  /** Next token. */
-  public Token jj_nt;
-  private Token jj_scanpos, jj_lastpos;
-  private int jj_la;
-  private int jj_gen;
-  final private int[] jj_la1 = new int[3];
-  static private int[] jj_la1_0;
-  static {
-      jj_la1_init_0();
-   }
-   private static void jj_la1_init_0() {
-      jj_la1_0 = new int[] {0x20,0x80,0x100,};
-   }
-  final private JJCalls[] jj_2_rtns = new JJCalls[6];
-  private boolean jj_rescan = false;
-  private int jj_gc = 0;
-
-  /** Constructor with InputStream. */
-  public ISEParser(java.io.InputStream stream) {
-     this(stream, null);
-  }
-  /** Constructor with InputStream and supplied encoding */
-  public ISEParser(java.io.InputStream stream, String encoding) {
-    try { jj_input_stream = new JavaCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
-    token_source = new ISEParserTokenManager(jj_input_stream);
-    token = new Token();
-    token.next = jj_nt = token_source.getNextToken();
-    jj_gen = 0;
-    for (int i = 0; i < 3; i++) jj_la1[i] = -1;
-    for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
-  }
-
-  /** Reinitialise. */
-  public void ReInit(java.io.InputStream stream) {
-     ReInit(stream, null);
-  }
-  /** Reinitialise. */
-  public void ReInit(java.io.InputStream stream, String encoding) {
-    try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
-    token_source.ReInit(jj_input_stream);
-    token = new Token();
-    token.next = jj_nt = token_source.getNextToken();
-    jj_gen = 0;
-    for (int i = 0; i < 3; i++) jj_la1[i] = -1;
-    for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
-  }
-
-  /** Constructor. */
-  public ISEParser(java.io.Reader stream) {
-    jj_input_stream = new JavaCharStream(stream, 1, 1);
-    token_source = new ISEParserTokenManager(jj_input_stream);
-    token = new Token();
-    token.next = jj_nt = token_source.getNextToken();
-    jj_gen = 0;
-    for (int i = 0; i < 3; i++) jj_la1[i] = -1;
-    for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
-  }
-
-  /** Reinitialise. */
-  public void ReInit(java.io.Reader stream) {
-    jj_input_stream.ReInit(stream, 1, 1);
-    token_source.ReInit(jj_input_stream);
-    token = new Token();
-    token.next = jj_nt = token_source.getNextToken();
-    jj_gen = 0;
-    for (int i = 0; i < 3; i++) jj_la1[i] = -1;
-    for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
-  }
-
-  /** Constructor with generated Token Manager. */
-  public ISEParser(ISEParserTokenManager tm) {
-    token_source = tm;
-    token = new Token();
-    token.next = jj_nt = token_source.getNextToken();
-    jj_gen = 0;
-    for (int i = 0; i < 3; i++) jj_la1[i] = -1;
-    for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
-  }
-
-  /** Reinitialise. */
-  public void ReInit(ISEParserTokenManager tm) {
-    token_source = tm;
-    token = new Token();
-    token.next = jj_nt = token_source.getNextToken();
-    jj_gen = 0;
-    for (int i = 0; i < 3; i++) jj_la1[i] = -1;
-    for (int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls();
-  }
-
-  private Token jj_consume_token(int kind) throws ParseException {
-    Token oldToken = token;
-    if ((token = jj_nt).next != null) jj_nt = jj_nt.next;
-    else jj_nt = jj_nt.next = token_source.getNextToken();
-    if (token.kind == kind) {
-      jj_gen++;
-      if (++jj_gc > 100) {
-        jj_gc = 0;
-        for (int i = 0; i < jj_2_rtns.length; i++) {
-          JJCalls c = jj_2_rtns[i];
-          while (c != null) {
-            if (c.gen < jj_gen) c.first = null;
-            c = c.next;
-          }
-        }
-      }
-      return token;
-    }
-    jj_nt = token;
-    token = oldToken;
-    jj_kind = kind;
-    throw generateParseException();
-  }
-
-  static private final class LookaheadSuccess extends java.lang.Error {
-
-	private static final long serialVersionUID = -5724812746511794505L; }
-  final private LookaheadSuccess jj_ls = new LookaheadSuccess();
-  private boolean jj_scan_token(int kind) {
-    if (jj_scanpos == jj_lastpos) {
-      jj_la--;
-      if (jj_scanpos.next == null) {
-        jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.getNextToken();
-      } else {
-        jj_lastpos = jj_scanpos = jj_scanpos.next;
-      }
-    } else {
-      jj_scanpos = jj_scanpos.next;
-    }
-    if (jj_rescan) {
-      int i = 0; Token tok = token;
-      while (tok != null && tok != jj_scanpos) { i++; tok = tok.next; }
-      if (tok != null) jj_add_error_token(kind, i);
-    }
-    if (jj_scanpos.kind != kind) return true;
-    if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls;
-    return false;
-  }
-
-
-/** Get the next Token. */
-  final public Token getNextToken() {
-    if ((token = jj_nt).next != null) jj_nt = jj_nt.next;
-    else jj_nt = jj_nt.next = token_source.getNextToken();
-    jj_gen++;
-    return token;
-  }
-
-/** Get the specific Token. */
-  final public Token getToken(int index) {
-    Token t = token;
-    for (int i = 0; i < index; i++) {
-      if (t.next != null) t = t.next;
-      else t = t.next = token_source.getNextToken();
-    }
-    return t;
-  }
-
-  private java.util.List<int[]> jj_expentries = new java.util.ArrayList<int[]>();
-  private int[] jj_expentry;
-  private int jj_kind = -1;
-  private int[] jj_lasttokens = new int[100];
-  private int jj_endpos;
-
-  private void jj_add_error_token(int kind, int pos) {
-    if (pos >= 100) return;
-    if (pos == jj_endpos + 1) {
-      jj_lasttokens[jj_endpos++] = kind;
-    } else if (jj_endpos != 0) {
-      jj_expentry = new int[jj_endpos];
-      for (int i = 0; i < jj_endpos; i++) {
-        jj_expentry[i] = jj_lasttokens[i];
-      }
-      jj_entries_loop: for (java.util.Iterator<?> it = jj_expentries.iterator(); it.hasNext();) {
-        int[] oldentry = (int[])(it.next());
-        if (oldentry.length == jj_expentry.length) {
-          for (int i = 0; i < jj_expentry.length; i++) {
-            if (oldentry[i] != jj_expentry[i]) {
-              continue jj_entries_loop;
-            }
-          }
-          jj_expentries.add(jj_expentry);
-          break jj_entries_loop;
-        }
-      }
-      if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind;
-    }
-  }
-
-  /** Generate ParseException. */
-  public ParseException generateParseException() {
-    jj_expentries.clear();
-    boolean[] la1tokens = new boolean[11];
-    if (jj_kind >= 0) {
-      la1tokens[jj_kind] = true;
-      jj_kind = -1;
-    }
-    for (int i = 0; i < 3; i++) {
-      if (jj_la1[i] == jj_gen) {
-        for (int j = 0; j < 32; j++) {
-          if ((jj_la1_0[i] & (1<<j)) != 0) {
-            la1tokens[j] = true;
-          }
-        }
-      }
-    }
-    for (int i = 0; i < 11; i++) {
-      if (la1tokens[i]) {
-        jj_expentry = new int[1];
-        jj_expentry[0] = i;
-        jj_expentries.add(jj_expentry);
-      }
-    }
-    jj_endpos = 0;
-    jj_rescan_token();
-    jj_add_error_token(0, 0);
-    int[][] exptokseq = new int[jj_expentries.size()][];
-    for (int i = 0; i < jj_expentries.size(); i++) {
-      exptokseq[i] = jj_expentries.get(i);
-    }
-    return new ParseException(token, exptokseq, tokenImage);
-  }
-
-  /** Enable tracing. */
-  final public void enable_tracing() {
-  }
-
-  /** Disable tracing. */
-  final public void disable_tracing() {
-  }
-
-  private void jj_rescan_token() {
-    jj_rescan = true;
-    for (int i = 0; i < 6; i++) {
-    try {
-      JJCalls p = jj_2_rtns[i];
-      do {
-        if (p.gen > jj_gen) {
-          jj_la = p.arg; jj_lastpos = jj_scanpos = p.first;
-          switch (i) {
-            case 0: jj_3_1(); break;
-            case 1: jj_3_2(); break;
-            case 2: jj_3_3(); break;
-            case 3: jj_3_4(); break;
-            case 4: jj_3_5(); break;
-            case 5: jj_3_6(); break;
-          }
-        }
-        p = p.next;
-      } while (p != null);
-      } catch(LookaheadSuccess ls) { }
-    }
-    jj_rescan = false;
-  }
-
-  private void jj_save(int index, int xla) {
-    JJCalls p = jj_2_rtns[index];
-    while (p.gen > jj_gen) {
-      if (p.next == null) { p = p.next = new JJCalls(); break; }
-      p = p.next;
-    }
-    p.gen = jj_gen + xla - jj_la; p.first = token; p.arg = xla;
-  }
-
-  static final class JJCalls {
-    int gen;
-    Token first;
-    int arg;
-    JJCalls next;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ISEParser.jj
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ISEParser.jj b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ISEParser.jj
deleted file mode 100644
index 6071922..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ISEParser.jj
+++ /dev/null
@@ -1,12 +0,0 @@
-options{  CHOICE_AMBIGUITY_CHECK = 3;  OTHER_AMBIGUITY_CHECK = 2;  //DEBUG_PARSER=true
-  //DEBUG_LOOKAHEAD=true
-  //DEBUG_TOKEN_MANAGER=true
-  ERROR_REPORTING = true;  JAVA_UNICODE_ESCAPE = true;  UNICODE_INPUT = true;  IGNORE_CASE = true;  SUPPORT_CLASS_VISIBILITY_PUBLIC = false;  FORCE_LA_CHECK = true;  CACHE_TOKENS = true;  SANITY_CHECK = true;  STATIC = false;  //KEEP_LINE_COLUMN=true;
-}PARSER_BEGIN(ISEParser)/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements.  See the NOTICE file * distributed with this work for additional information * regarding copyright ownership.  The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License.  You may obtain a copy of the License at * *     http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */package org.apache.metron.ise.parser;import java.io.*;import java.util.*;import org.json.simple.*;/**
-* Basic ISE data parser generated by JavaCC.
-*/public class ISEParser implements Serializable{  private boolean nativeNumbers = false;  public ISEParser()  { //do nothing
-  }  public ISEParser(String input)  {    this (new StringReader(input));  }  /**
-	* Parses a ISE String into a JSON object {@code Map}.
-	*/  public JSONObject parseObject() throws ParseException  {    JSONObject toReturn = object();    if (!ensureEOF()) throw new IllegalStateException("Expected EOF, but still had content to parse");    return toReturn;  }}PARSER_END(ISEParser)// Ignore commentsSKIP :{  < C_SINGLE_COMMENT : "//" (~[ "\n", "\r", "\f" ])* < EOL >>| < C_MULTILINE_COMMENT : "/*" (~[ ])* "*/" >| < SH_SINGLE_COMMENT : "#" (~[ "\n", "\r", "\f" ])* < EOL >>  /*| < WHITESPACE :    " "  | "\t" >*/| < EOL :    "\n"  | "\r"  | "\f" >}// Common tokens
-TOKEN :{  < COMMA : "," >| < EQUALS : "=" >| < SLASH : "\\" >| < TAG : "(tag=0)" >}// Null token/*TOKEN :{  //< NULL : "null" >}*/// String tokens
-TOKEN :{  //< SYMBOL : ([ "a"-"z", "A"-"Z", "0", "1"-"9", " ", "\t" , ":" , "-" , "." ])+ >  < STRING_BODY :    (      (~[ "\"", "\r", "\n", "\f", "\t", "=", "," ])    |      (        "\\"        (          "r"        | "n"        | "f"        | "\\"        | "/"        | "\""        | "b"        | "t"        | ","        )      )    )+ >| < BRACED_STRING :    (      "{" (~[ "{", "}" ])+ "}"    ) >}boolean ensureEOF() :{}{  (< COMMA >)? < EOF >  {    return true;  }}JSONObject innerMap() :{  final JSONObject json = new JSONObject();  String key;  Object value;}{  key = objectKey() < EQUALS > value = value()  {    json.put(key, value);  }  {    key = null;    value = null;  }  (    < SLASH > < COMMA > key = objectKey() < EQUALS > value = value()    {      json.put(key, value);    }    {      key = null;      value = null;    }  )*  {    return json;  }}JSONObject object() :{  final JSONObject json = new JSONObject()
 ;  String key;  Object value;}{  key = objectKey() < EQUALS > value = value()  {    json.put(key, value);  }  {    key = null;    value = null;  }  (    (      LOOKAHEAD(2)      < COMMA > key = objectKey() < EQUALS > value = value()      {        json.put(key, value);      }      {        key = null;        value = null;      }    )*  | LOOKAHEAD(2)    < COMMA > < EOF >  )  // ensureEOF()  {    return json;  }}String objectKey() :{  String k;}{  (    k = string()  )  {    //  System.out.println("key == " + k);    return k.trim();  }}Object value() :{  Object x;  String eof = "EOF";  Map m = null;}{  (    LOOKAHEAD(< COMMA >)    x = nullValue()  | LOOKAHEAD(innerMap())    x = innerMap()  | x = tagString()  | LOOKAHEAD(< EOF >)    x = blankValue()  | LOOKAHEAD(braced_string())    x = braced_string()  | LOOKAHEAD(2)    x = string()  )  {    //  System.out.println("val == " + x);    //if (x instanceof Map) return "Map
 ";    //return (String) x;    return x;  }}String nullValue() :{}{  {    return null;  }}String tagString() :{  String output = "(tag=0)";}{  < TAG > < STRING_BODY >  {    return output + token.image;  }}String blankValue() :{}{  {    return null;  }}String string() :{  String s;}{  < STRING_BODY >  {    return token.image.trim();  }}String braced_string() :{  String s;}{  < BRACED_STRING >  {    //  System.out.println("braced == " + token.image);    s = token.image;  }  < COMMA >  {    return s.trim();  }}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ISEParserConstants.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ISEParserConstants.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ISEParserConstants.java
deleted file mode 100644
index 923a64e..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ISEParserConstants.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/* Generated By:JavaCC: Do not edit this line. ISEParserConstants.java */
-package org.apache.metron.ise.parser;
-
-
-/**
- * Token literal values and constants.
- * Generated by org.javacc.parser.OtherFilesGen#start()
- */
-interface ISEParserConstants {
-
-  /** End of File. */
-  int EOF = 0;
-  /** RegularExpression Id. */
-  int C_SINGLE_COMMENT = 1;
-  /** RegularExpression Id. */
-  int C_MULTILINE_COMMENT = 2;
-  /** RegularExpression Id. */
-  int SH_SINGLE_COMMENT = 3;
-  /** RegularExpression Id. */
-  int EOL = 4;
-  /** RegularExpression Id. */
-  int COMMA = 5;
-  /** RegularExpression Id. */
-  int EQUALS = 6;
-  /** RegularExpression Id. */
-  int SLASH = 7;
-  /** RegularExpression Id. */
-  int TAG = 8;
-  /** RegularExpression Id. */
-  int STRING_BODY = 9;
-  /** RegularExpression Id. */
-  int BRACED_STRING = 10;
-
-  /** Lexical state. */
-  int DEFAULT = 0;
-
-  /** Literal token values. */
-  String[] tokenImage = {
-    "<EOF>",
-    "<C_SINGLE_COMMENT>",
-    "<C_MULTILINE_COMMENT>",
-    "<SH_SINGLE_COMMENT>",
-    "<EOL>",
-    "\",\"",
-    "\"=\"",
-    "\"\\\\\"",
-    "\"(tag=0)\"",
-    "<STRING_BODY>",
-    "<BRACED_STRING>",
-  };
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ISEParserTokenManager.java
----------------------------------------------------------------------
diff --git a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ISEParserTokenManager.java b/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ISEParserTokenManager.java
deleted file mode 100644
index d8d3c10..0000000
--- a/metron-streaming/Metron-Common/src/main/java/org/apache/metron/ise/parser/ISEParserTokenManager.java
+++ /dev/null
@@ -1,676 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/* Generated By:JavaCC: Do not edit this line. ISEParserTokenManager.java */
-package org.apache.metron.ise.parser;
-
-/** Token Manager. */
-class ISEParserTokenManager implements ISEParserConstants
-{
-
-  /** Debug output. */
-  public  java.io.PrintStream debugStream = System.out;
-  /** Set debug output. */
-  public  void setDebugStream(java.io.PrintStream ds) { debugStream = ds; }
-private final int jjStopStringLiteralDfa_0(int pos, long active0)
-{
-   switch (pos)
-   {
-      case 0:
-         if ((active0 & 0x100L) != 0L)
-         {
-            jjmatchedKind = 9;
-            return 18;
-         }
-         if ((active0 & 0x80L) != 0L)
-            return 6;
-         return -1;
-      case 1:
-         if ((active0 & 0x100L) != 0L)
-         {
-            jjmatchedKind = 9;
-            jjmatchedPos = 1;
-            return 18;
-         }
-         return -1;
-      case 2:
-         if ((active0 & 0x100L) != 0L)
-         {
-            jjmatchedKind = 9;
-            jjmatchedPos = 2;
-            return 18;
-         }
-         return -1;
-      case 3:
-         if ((active0 & 0x100L) != 0L)
-         {
-            jjmatchedKind = 9;
-            jjmatchedPos = 3;
-            return 18;
-         }
-         return -1;
-      case 4:
-         if ((active0 & 0x100L) != 0L)
-         {
-            if (jjmatchedPos < 3)
-            {
-               jjmatchedKind = 9;
-               jjmatchedPos = 3;
-            }
-            return -1;
-         }
-         return -1;
-      case 5:
-         if ((active0 & 0x100L) != 0L)
-         {
-            if (jjmatchedPos < 3)
-            {
-               jjmatchedKind = 9;
-               jjmatchedPos = 3;
-            }
-            return -1;
-         }
-         return -1;
-      default :
-         return -1;
-   }
-}
-private final int jjStartNfa_0(int pos, long active0)
-{
-   return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0), pos + 1);
-}
-private int jjStopAtPos(int pos, int kind)
-{
-   jjmatchedKind = kind;
-   jjmatchedPos = pos;
-   return pos + 1;
-}
-private int jjMoveStringLiteralDfa0_0()
-{
-   switch(curChar)
-   {
-      case 40:
-         return jjMoveStringLiteralDfa1_0(0x100L);
-      case 44:
-         return jjStopAtPos(0, 5);
-      case 61:
-         return jjStopAtPos(0, 6);
-      case 92:
-         return jjStartNfaWithStates_0(0, 7, 6);
-      default :
-         return jjMoveNfa_0(0, 0);
-   }
-}
-private int jjMoveStringLiteralDfa1_0(long active0)
-{
-   try { curChar = input_stream.readChar(); }
-   catch(java.io.IOException e) {
-      jjStopStringLiteralDfa_0(0, active0);
-      return 1;
-   }
-   switch(curChar)
-   {
-      case 84:
-      case 116:
-         return jjMoveStringLiteralDfa2_0(active0, 0x100L);
-      default :
-         break;
-   }
-   return jjStartNfa_0(0, active0);
-}
-private int jjMoveStringLiteralDfa2_0(long old0, long active0)
-{
-   if (((active0 &= old0)) == 0L)
-      return jjStartNfa_0(0, old0);
-   try { curChar = input_stream.readChar(); }
-   catch(java.io.IOException e) {
-      jjStopStringLiteralDfa_0(1, active0);
-      return 2;
-   }
-   switch(curChar)
-   {
-      case 65:
-      case 97:
-         return jjMoveStringLiteralDfa3_0(active0, 0x100L);
-      default :
-         break;
-   }
-   return jjStartNfa_0(1, active0);
-}
-private int jjMoveStringLiteralDfa3_0(long old0, long active0)
-{
-   if (((active0 &= old0)) == 0L)
-      return jjStartNfa_0(1, old0);
-   try { curChar = input_stream.readChar(); }
-   catch(java.io.IOException e) {
-      jjStopStringLiteralDfa_0(2, active0);
-      return 3;
-   }
-   switch(curChar)
-   {
-      case 71:
-      case 103:
-         return jjMoveStringLiteralDfa4_0(active0, 0x100L);
-      default :
-         break;
-   }
-   return jjStartNfa_0(2, active0);
-}
-private int jjMoveStringLiteralDfa4_0(long old0, long active0)
-{
-   if (((active0 &= old0)) == 0L)
-      return jjStartNfa_0(2, old0);
-   try { curChar = input_stream.readChar(); }
-   catch(java.io.IOException e) {
-      jjStopStringLiteralDfa_0(3, active0);
-      return 4;
-   }
-   switch(curChar)
-   {
-      case 61:
-         return jjMoveStringLiteralDfa5_0(active0, 0x100L);
-      default :
-         break;
-   }
-   return jjStartNfa_0(3, active0);
-}
-private int jjMoveStringLiteralDfa5_0(long old0, long active0)
-{
-   if (((active0 &= old0)) == 0L)
-      return jjStartNfa_0(3, old0);
-   try { curChar = input_stream.readChar(); }
-   catch(java.io.IOException e) {
-      jjStopStringLiteralDfa_0(4, active0);
-      return 5;
-   }
-   switch(curChar)
-   {
-      case 48:
-         return jjMoveStringLiteralDfa6_0(active0, 0x100L);
-      default :
-         break;
-   }
-   return jjStartNfa_0(4, active0);
-}
-private int jjMoveStringLiteralDfa6_0(long old0, long active0)
-{
-   if (((active0 &= old0)) == 0L)
-      return jjStartNfa_0(4, old0);
-   try { curChar = input_stream.readChar(); }
-   catch(java.io.IOException e) {
-      jjStopStringLiteralDfa_0(5, active0);
-      return 6;
-   }
-   switch(curChar)
-   {
-      case 41:
-         if ((active0 & 0x100L) != 0L)
-            return jjStopAtPos(6, 8);
-         break;
-      default :
-         break;
-   }
-   return jjStartNfa_0(5, active0);
-}
-private int jjStartNfaWithStates_0(int pos, int kind, int state)
-{
-   jjmatchedKind = kind;
-   jjmatchedPos = pos;
-   try { curChar = input_stream.readChar(); }
-   catch(java.io.IOException e) { return pos + 1; }
-   return jjMoveNfa_0(state, pos + 1);
-}
-static final long[] jjbitVec0 = {
-   0xfffffffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL
-};
-static final long[] jjbitVec2 = {
-   0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL
-};
-private int jjMoveNfa_0(int startState, int curPos)
-{
-   int startsAt = 0;
-   jjnewStateCnt = 18;
-   int i = 1;
-   jjstateSet[0] = startState;
-   int kind = 0x7fffffff;
-   for (;;)
-   {
-      if (++jjround == 0x7fffffff)
-         ReInitRounds();
-      if (curChar < 64)
-      {
-         long l = 1L << curChar;
-         do
-         {
-            switch(jjstateSet[--i])
-            {
-               case 18:
-               case 4:
-                  if ((0xdfffeffbffffc9ffL & l) == 0L)
-                     break;
-                  if (kind > 9)
-                     kind = 9;
-                  jjCheckNAddTwoStates(4, 5);
-                  break;
-               case 0:
-                  if ((0xdfffeffbffffc9ffL & l) != 0L)
-                  {
-                     if (kind > 9)
-                        kind = 9;
-                     jjCheckNAddTwoStates(4, 5);
-                  }
-                  else if ((0x3400L & l) != 0L)
-                  {
-                     if (kind > 4)
-                        kind = 4;
-                  }
-                  if (curChar == 47)
-                     jjAddStates(0, 1);
-                  else if (curChar == 35)
-                     jjCheckNAddTwoStates(1, 2);
-                  break;
-               case 6:
-                  if ((0xdfffeffbffffc9ffL & l) != 0L)
-                  {
-                     if (kind > 9)
-                        kind = 9;
-                     jjCheckNAddTwoStates(4, 5);
-                  }
-                  if ((0x900400000000L & l) != 0L)
-                  {
-                     if (kind > 9)
-                        kind = 9;
-                     jjCheckNAddTwoStates(4, 5);
-                  }
-                  break;
-               case 1:
-                  if ((0xffffffffffffcbffL & l) != 0L)
-                     jjCheckNAddTwoStates(1, 2);
-                  break;
-               case 2:
-                  if ((0x3400L & l) != 0L && kind > 3)
-                     kind = 3;
-                  break;
-               case 3:
-                  if ((0x3400L & l) != 0L && kind > 4)
-                     kind = 4;
-                  break;
-               case 8:
-                  jjAddStates(2, 3);
-                  break;
-               case 10:
-                  if (curChar == 47)
-                     jjAddStates(0, 1);
-                  break;
-               case 11:
-                  if (curChar == 47)
-                     jjCheckNAddTwoStates(12, 13);
-                  break;
-               case 12:
-                  if ((0xffffffffffffcbffL & l) != 0L)
-                     jjCheckNAddTwoStates(12, 13);
-                  break;
-               case 13:
-                  if ((0x3400L & l) != 0L && kind > 1)
-                     kind = 1;
-                  break;
-               case 14:
-                  if (curChar == 42)
-                     jjCheckNAddTwoStates(15, 17);
-                  break;
-               case 15:
-                  jjCheckNAddTwoStates(15, 17);
-                  break;
-               case 16:
-                  if (curChar == 47 && kind > 2)
-                     kind = 2;
-                  break;
-               case 17:
-                  if (curChar == 42)
-                     jjstateSet[jjnewStateCnt++] = 16;
-                  break;
-               default : break;
-            }
-         } while(i != startsAt);
-      }
-      else if (curChar < 128)
-      {
-         long l = 1L << (curChar & 077);
-         do
-         {
-            switch(jjstateSet[--i])
-            {
-               case 18:
-                  if (kind > 9)
-                     kind = 9;
-                  jjCheckNAddTwoStates(4, 5);
-                  if (curChar == 92)
-                     jjstateSet[jjnewStateCnt++] = 6;
-                  break;
-               case 0:
-                  if (kind > 9)
-                     kind = 9;
-                  jjCheckNAddTwoStates(4, 5);
-                  if (curChar == 123)
-                     jjCheckNAdd(8);
-                  else if (curChar == 92)
-                     jjstateSet[jjnewStateCnt++] = 6;
-                  break;
-               case 6:
-                  if (kind > 9)
-                     kind = 9;
-                  jjCheckNAddTwoStates(4, 5);
-                  if ((0x14404410144044L & l) != 0L)
-                  {
-                     if (kind > 9)
-                        kind = 9;
-                     jjCheckNAddTwoStates(4, 5);
-                  }
-                  if (curChar == 92)
-                     jjstateSet[jjnewStateCnt++] = 6;
-                  break;
-               case 1:
-                  jjAddStates(4, 5);
-                  break;
-               case 4:
-                  if (kind > 9)
-                     kind = 9;
-                  jjCheckNAddTwoStates(4, 5);
-                  break;
-               case 5:
-                  if (curChar == 92)
-                     jjstateSet[jjnewStateCnt++] = 6;
-                  break;
-               case 7:
-                  if (curChar == 123)
-                     jjCheckNAdd(8);
-                  break;
-               case 8:
-                  if ((0xd7ffffffffffffffL & l) != 0L)
-                     jjCheckNAddTwoStates(8, 9);
-                  break;
-               case 9:
-                  if (curChar == 125 && kind > 10)
-                     kind = 10;
-                  break;
-               case 12:
-                  jjAddStates(6, 7);
-                  break;
-               case 15:
-                  jjAddStates(8, 9);
-                  break;
-               default : break;
-            }
-         } while(i != startsAt);
-      }
-      else
-      {
-         int hiByte = (int)(curChar >> 8);
-         int i1 = hiByte >> 6;
-         long l1 = 1L << (hiByte & 077);
-         int i2 = (curChar & 0xff) >> 6;
-         long l2 = 1L << (curChar & 077);
-         do
-         {
-            switch(jjstateSet[--i])
-            {
-               case 18:
-               case 4:
-                  if (!jjCanMove_0(hiByte, i1, i2, l1, l2))
-                     break;
-                  if (kind > 9)
-                     kind = 9;
-                  jjCheckNAddTwoStates(4, 5);
-                  break;
-               case 0:
-                  if (!jjCanMove_0(hiByte, i1, i2, l1, l2))
-                     break;
-                  if (kind > 9)
-                     kind = 9;
-                  jjCheckNAddTwoStates(4, 5);
-                  break;
-               case 6:
-                  if (!jjCanMove_0(hiByte, i1, i2, l1, l2))
-                     break;
-                  if (kind > 9)
-                     kind = 9;
-                  jjCheckNAddTwoStates(4, 5);
-                  break;
-               case 1:
-                  if (jjCanMove_0(hiByte, i1, i2, l1, l2))
-                     jjAddStates(4, 5);
-                  break;
-               case 8:
-                  if (jjCanMove_0(hiByte, i1, i2, l1, l2))
-                     jjAddStates(2, 3);
-                  break;
-               case 12:
-                  if (jjCanMove_0(hiByte, i1, i2, l1, l2))
-                     jjAddStates(6, 7);
-                  break;
-               case 15:
-                  if (jjCanMove_0(hiByte, i1, i2, l1, l2))
-                     jjAddStates(8, 9);
-                  break;
-               default : break;
-            }
-         } while(i != startsAt);
-      }
-      if (kind != 0x7fffffff)
-      {
-         jjmatchedKind = kind;
-         jjmatchedPos = curPos;
-         kind = 0x7fffffff;
-      }
-      ++curPos;
-      if ((i = jjnewStateCnt) == (startsAt = 18 - (jjnewStateCnt = startsAt)))
-         return curPos;
-      try { curChar = input_stream.readChar(); }
-      catch(java.io.IOException e) { return curPos; }
-   }
-}
-static final int[] jjnextStates = {
-   11, 14, 8, 9, 1, 2, 12, 13, 15, 17, 
-};
-private static final boolean jjCanMove_0(int hiByte, int i1, int i2, long l1, long l2)
-{
-   switch(hiByte)
-   {
-      case 0:
-         return ((jjbitVec2[i2] & l2) != 0L);
-      default :
-         if ((jjbitVec0[i1] & l1) != 0L)
-            return true;
-         return false;
-   }
-}
-
-/** Token literal values. */
-public static final String[] jjstrLiteralImages = {
-"", null, null, null, null, "\54", "\75", "\134", null, null, null, };
-
-/** Lexer state names. */
-public static final String[] lexStateNames = {
-   "DEFAULT",
-};
-static final long[] jjtoToken = {
-   0x7e1L, 
-};
-static final long[] jjtoSkip = {
-   0x1eL, 
-};
-protected JavaCharStream input_stream;
-private final int[] jjrounds = new int[18];
-private final int[] jjstateSet = new int[36];
-protected char curChar;
-/** Constructor. */
-public ISEParserTokenManager(JavaCharStream stream){
-   if (JavaCharStream.staticFlag)
-      throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer.");
-   input_stream = stream;
-}
-
-/** Constructor. */
-public ISEParserTokenManager(JavaCharStream stream, int lexState){
-   this(stream);
-   SwitchTo(lexState);
-}
-
-/** Reinitialise parser. */
-public void ReInit(JavaCharStream stream)
-{
-   jjmatchedPos = jjnewStateCnt = 0;
-   curLexState = defaultLexState;
-   input_stream = stream;
-   ReInitRounds();
-}
-private void ReInitRounds()
-{
-   int i;
-   jjround = 0x80000001;
-   for (i = 18; i-- > 0;)
-      jjrounds[i] = 0x80000000;
-}
-
-/** Reinitialise parser. */
-public void ReInit(JavaCharStream stream, int lexState)
-{
-   ReInit(stream);
-   SwitchTo(lexState);
-}
-
-/** Switch to specified lex state. */
-public void SwitchTo(int lexState)
-{
-   if (lexState >= 1 || lexState < 0)
-      throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
-   else
-      curLexState = lexState;
-}
-
-protected Token jjFillToken()
-{
-   final Token t;
-   final String curTokenImage;
-   final int beginLine;
-   final int endLine;
-   final int beginColumn;
-   final int endColumn;
-   String im = jjstrLiteralImages[jjmatchedKind];
-   curTokenImage = (im == null) ? input_stream.GetImage() : im;
-   beginLine = input_stream.getBeginLine();
-   beginColumn = input_stream.getBeginColumn();
-   endLine = input_stream.getEndLine();
-   endColumn = input_stream.getEndColumn();
-   t = Token.newToken(jjmatchedKind, curTokenImage);
-
-   t.beginLine = beginLine;
-   t.endLine = endLine;
-   t.beginColumn = beginColumn;
-   t.endColumn = endColumn;
-
-   return t;
-}
-
-int curLexState = 0;
-int defaultLexState = 0;
-int jjnewStateCnt;
-int jjround;
-int jjmatchedPos;
-int jjmatchedKind;
-
-/** Get the next Token. */
-public Token getNextToken() 
-{
-  Token matchedToken;
-  int curPos = 0;
-
-  EOFLoop :
-  for (;;)
-  {
-   try
-   {
-      curChar = input_stream.BeginToken();
-   }
-   catch(java.io.IOException e)
-   {
-      jjmatchedKind = 0;
-      matchedToken = jjFillToken();
-      return matchedToken;
-   }
-
-   jjmatchedKind = 0x7fffffff;
-   jjmatchedPos = 0;
-   curPos = jjMoveStringLiteralDfa0_0();
-   if (jjmatchedKind != 0x7fffffff)
-   {
-      if (jjmatchedPos + 1 < curPos)
-         input_stream.backup(curPos - jjmatchedPos - 1);
-      if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
-      {
-         matchedToken = jjFillToken();
-         return matchedToken;
-      }
-      else
-      {
-         continue EOFLoop;
-      }
-   }
-   int error_line = input_stream.getEndLine();
-   int error_column = input_stream.getEndColumn();
-   String error_after = null;
-   boolean EOFSeen = false;
-   try { input_stream.readChar(); input_stream.backup(1); }
-   catch (java.io.IOException e1) {
-      EOFSeen = true;
-      error_after = curPos <= 1 ? "" : input_stream.GetImage();
-      if (curChar == '\n' || curChar == '\r') {
-         error_line++;
-         error_column = 0;
-      }
-      else
-         error_column++;
-   }
-   if (!EOFSeen) {
-      input_stream.backup(1);
-      error_after = curPos <= 1 ? "" : input_stream.GetImage();
-   }
-   throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
-  }
-}
-
-private void jjCheckNAdd(int state)
-{
-   if (jjrounds[state] != jjround)
-   {
-      jjstateSet[jjnewStateCnt++] = state;
-      jjrounds[state] = jjround;
-   }
-}
-private void jjAddStates(int start, int end)
-{
-   do {
-      jjstateSet[jjnewStateCnt++] = jjnextStates[start];
-   } while (start++ != end);
-}
-private void jjCheckNAddTwoStates(int state1, int state2)
-{
-   jjCheckNAdd(state1);
-   jjCheckNAdd(state2);
-}
-
-}


[38/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/RestTestingUtil.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/RestTestingUtil.java b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/RestTestingUtil.java
new file mode 100644
index 0000000..e452b89
--- /dev/null
+++ b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/RestTestingUtil.java
@@ -0,0 +1,329 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.MediaType;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.client.RestTemplate;
+
+/**
+ * The Class RestTestingUtil.
+ */
+public class RestTestingUtil {
+  
+  /** The host name. */
+  public static String hostName = null;
+
+  /**
+   * Gets the pcaps by keys.
+   * 
+   * @param keys
+   *          the keys
+   * @return the pcaps by keys
+   */
+  @SuppressWarnings("unchecked")
+  private static void getPcapsByKeys(String keys) {
+    System.out
+        .println("**********************getPcapsByKeys ******************************************************************************************");
+    // 1.
+    String url = "http://" + hostName
+        + "/cisco-rest/pcapGetter/getPcapsByKeys?keys={keys}"
+        + "&includeReverseTraffic={includeReverseTraffic}"
+        + "&startTime={startTime}" + "&endTime={endTime}"
+        + "&maxResponseSize={maxResponseSize}";
+    // default values
+    String startTime = "-1";
+    String endTime = "-1";
+    String maxResponseSize = "6";
+    String includeReverseTraffic = "false";
+
+    @SuppressWarnings("rawtypes")
+    Map map = new HashMap();
+    map.put("keys", keys);
+    map.put("includeReverseTraffic", includeReverseTraffic);
+    map.put("startTime", startTime);
+    map.put("endTime", endTime);
+    map.put("maxResponseSize", maxResponseSize);
+
+    RestTemplate template = new RestTemplate();
+
+    // set headers and entity to send
+    HttpHeaders headers = new HttpHeaders();
+    headers.set("Accept", MediaType.APPLICATION_OCTET_STREAM_VALUE);
+    HttpEntity<Object> requestEntity = new HttpEntity<Object>(headers);
+
+    // 1.
+    ResponseEntity<byte[]> response1 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
+            response1);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 2. with reverse traffic
+    includeReverseTraffic = "true";
+    map.put("includeReverseTraffic", includeReverseTraffic);
+    ResponseEntity<byte[]> response2 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
+            response2);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 3.with time range
+    startTime = System.getProperty("startTime", "-1");
+    endTime = System.getProperty("endTime", "-1");
+    map.put("startTime", startTime);
+    map.put("endTime", endTime);
+    ResponseEntity<byte[]> response3 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
+            response3);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 4.with maxResponseSize
+    maxResponseSize = System.getProperty("maxResponseSize", "6");
+    map.put("maxResponseSize", maxResponseSize);
+    ResponseEntity<byte[]> response4 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
+            response4);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+  }
+
+  /**
+   * Gets the pcaps by keys range.
+   * 
+   * @param startKey
+   *          the start key
+   * @param endKey
+   *          the end key
+   * @return the pcaps by keys range
+   */
+  @SuppressWarnings("unchecked")
+  private static void getPcapsByKeysRange(String startKey, String endKey) {
+    System.out
+        .println("**********************getPcapsByKeysRange ******************************************************************************************");
+    // 1.
+    String url = "http://" + hostName
+        + "/cisco-rest/pcapGetter/getPcapsByKeyRange?startKey={startKey}"
+        + "&endKey={endKey}" + "&startTime={startTime}" + "&endTime={endTime}"
+        + "&maxResponseSize={maxResponseSize}";
+    // default values
+    String startTime = "-1";
+    String endTime = "-1";
+    String maxResponseSize = "6";
+    @SuppressWarnings("rawtypes")
+    Map map = new HashMap();
+    map.put("startKey", startKey);
+    map.put("endKey", "endKey");
+    map.put("startTime", startTime);
+    map.put("endTime", endTime);
+    map.put("maxResponseSize", maxResponseSize);
+
+    RestTemplate template = new RestTemplate();
+
+    // set headers and entity to send
+    HttpHeaders headers = new HttpHeaders();
+    headers.set("Accept", MediaType.APPLICATION_OCTET_STREAM_VALUE);
+    HttpEntity<Object> requestEntity = new HttpEntity<Object>(headers);
+
+    // 1.
+    ResponseEntity<byte[]> response1 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            startKey, endKey, startTime, endTime, maxResponseSize, response1);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 2. with time range
+    startTime = System.getProperty("startTime", "-1");
+    endTime = System.getProperty("endTime", "-1");
+    map.put("startTime", startTime);
+    map.put("endTime", endTime);
+    ResponseEntity<byte[]> response2 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            startKey, endKey, startTime, endTime, maxResponseSize, response2);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 3. with maxResponseSize
+    maxResponseSize = System.getProperty("maxResponseSize", "6");
+    map.put("maxResponseSize", maxResponseSize);
+    ResponseEntity<byte[]> response3 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            startKey, endKey, startTime, endTime, maxResponseSize, response3);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+  }
+
+  private static void getPcapsByIdentifiers(String srcIp, String dstIp, String protocol, String srcPort, String dstPort) {
+    System.out
+            .println("**********************getPcapsByKeysRange ******************************************************************************************");
+    // 1.
+    String url = "http://" + hostName
+            + "/pcapGetter/getPcapsByIdentifiers?srcIp={srcIp}"
+            + "&dstIp={dstIp}" + "&protocol={protocol}" + "&srcPort={srcPort}"
+            + "&dstPort={dstPort}";
+    // default values
+    String startTime = "-1";
+    String endTime = "-1";
+    String maxResponseSize = "6";
+    @SuppressWarnings("rawtypes")
+    Map map = new HashMap();
+    map.put("srcIp", srcIp);
+    map.put("dstIp", dstIp);
+    map.put("protocol", protocol);
+    map.put("srcPort", srcPort);
+    map.put("dstPort", dstPort);
+
+    RestTemplate template = new RestTemplate();
+
+    // set headers and entity to send
+    HttpHeaders headers = new HttpHeaders();
+    headers.set("Accept", MediaType.APPLICATION_OCTET_STREAM_VALUE);
+    HttpEntity<Object> requestEntity = new HttpEntity<Object>(headers);
+
+    // 1.
+    ResponseEntity<byte[]> response1 = template.exchange(url, HttpMethod.GET,
+            requestEntity, byte[].class, map);
+    System.out
+            .println("----------------------------------------------------------------------------------------------------");
+    System.out
+            .format(
+                    "getPcapsByIdentifiers : request= <srcIp=%s; dstIp=%s; protocol=%s; srcPort=%s; dstPort=%s> \n response= %s \n",
+                    srcIp, dstIp, protocol, endTime, srcPort, dstPort, response1);
+    System.out
+            .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 2. with time range
+    startTime = System.getProperty("startTime", "-1");
+    endTime = System.getProperty("endTime", "-1");
+    map.put("startTime", startTime);
+    map.put("endTime", endTime);
+    ResponseEntity<byte[]> response2 = template.exchange(url, HttpMethod.GET,
+            requestEntity, byte[].class, map);
+    System.out
+            .println("----------------------------------------------------------------------------------------------------");
+    System.out
+            .format(
+                    "getPcapsByIdentifiers : request= <srcIp=%s; dstIp=%s; protocol=%s; srcPort=%s; dstPort=%s> \n response= %s \n",
+                    srcIp, dstIp, protocol, endTime, srcPort, dstPort, response2);
+    System.out
+            .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 3. with maxResponseSize
+    maxResponseSize = System.getProperty("maxResponseSize", "6");
+    map.put("maxResponseSize", maxResponseSize);
+    ResponseEntity<byte[]> response3 = template.exchange(url, HttpMethod.GET,
+            requestEntity, byte[].class, map);
+    System.out
+            .println("----------------------------------------------------------------------------------------------------");
+    System.out
+            .format(
+                    "getPcapsByIdentifiers : request= <srcIp=%s; dstIp=%s; protocol=%s; srcPort=%s; dstPort=%s> \n response= %s \n",
+                    srcIp, dstIp, protocol, endTime, srcPort, dstPort, response3);
+    System.out
+            .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+  }
+
+  /**
+   * The main method.
+   * 
+   * @param args
+   *          the arguments
+   */
+  public static void main(String[] args) {
+
+    /*
+     * Run this program with system properties
+     * 
+     * -DhostName=mon.hw.com:8090
+     * -Dkeys=18800006-1800000b-06-0019-b39d,18800006-
+     * 1800000b-06-0050-5af6-64840-40785
+     * -DstartKey=18000002-18800002-06-0436-0019-2440-34545
+     * -DendKey=18000002-18800002-06-b773-0019-2840-34585
+     */
+
+    hostName = System.getProperty("hostName");
+
+    String keys = System.getProperty("keys");
+
+    String statyKey = System.getProperty("startKey");
+    String endKey = System.getProperty("endKey");
+
+    getPcapsByKeys(keys);
+    getPcapsByKeysRange(statyKey, endKey);
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/rest/JettyServiceRunner.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/rest/JettyServiceRunner.java b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/rest/JettyServiceRunner.java
new file mode 100644
index 0000000..f4fb27c
--- /dev/null
+++ b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/rest/JettyServiceRunner.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice.rest;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.ws.rs.core.Application;
+
+import org.apache.metron.pcapservice.PcapReceiverImplRestEasy;
+
+public class JettyServiceRunner extends Application  {
+	
+
+	private static Set services = new HashSet(); 
+		
+	public  JettyServiceRunner() {     
+		// initialize restful services   
+		services.add(new PcapReceiverImplRestEasy());  
+	}
+	@Override
+	public  Set getSingletons() {
+		return services;
+	}  
+	public  static Set getServices() {  
+		return services;
+	} 
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/rest/PcapService.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/rest/PcapService.java b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/rest/PcapService.java
new file mode 100644
index 0000000..b5832b6
--- /dev/null
+++ b/metron-platform/metron-api/src/main/java/org/apache/metron/pcapservice/rest/PcapService.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice.rest;
+
+import java.io.IOException;
+
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.jboss.resteasy.plugins.server.servlet.HttpServletDispatcher;
+
+import org.apache.metron.api.helper.service.PcapServiceCli;
+
+
+public class PcapService {
+
+	public static void main(String[] args) throws IOException {
+
+		PcapServiceCli cli = new PcapServiceCli(args);
+		cli.parse();
+		
+		Server server = new Server(cli.getPort());
+		ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
+		context.setContextPath("/");
+		ServletHolder h = new ServletHolder(new HttpServletDispatcher());
+		h.setInitParameter("javax.ws.rs.Application", "org.apache.metron.pcapservice.rest.JettyServiceRunner");
+		context.addServlet(h, "/*");
+		server.setHandler(context);
+		try {
+			server.start();
+			server.join();
+		} catch (Exception e) {
+			e.printStackTrace();
+		}
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/resources/config-definition-hbase.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/resources/config-definition-hbase.xml b/metron-platform/metron-api/src/main/resources/config-definition-hbase.xml
new file mode 100644
index 0000000..98ece42
--- /dev/null
+++ b/metron-platform/metron-api/src/main/resources/config-definition-hbase.xml
@@ -0,0 +1,50 @@
+<?xml version="1.0" encoding="ISO-8859-1" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<configuration>
+	<header>
+		<result delimiterParsingDisabled="true" forceReloadCheck="true"></result>
+		<lookups>
+      		<lookup config-prefix="expr"
+              	config-class="org.apache.commons.configuration.interpol.ExprLookup">
+        		<variables>
+          			<variable name="System" value="Class:java.lang.System"/>
+          			<variable name="net" value="Class:java.net.InetAddress"/>
+          			<variable name="String" value="Class:org.apache.commons.lang.StringUtils"/>
+        		</variables>
+      		</lookup>
+    	</lookups>
+	</header>
+	<override>
+		<!-- 1. properties from 'hbae-config.properties' are loaded first;
+				if a property is not present in this file, then it will search in the files in the order they are defined here.
+		     2. 'refreshDelay' indicates the minimum delay in milliseconds between checks to see if the underlying file is changed.
+		     3. 'config-optional' indicates this file is not required -->
+
+		<properties fileName="${expr:System.getProperty('configPath')+'/hbase-config.properties'}"  config-optional="true">
+			<reloadingStrategy refreshDelay="${expr:System.getProperty('configRefreshDelay')}"
+	      config-class="org.apache.commons.configuration.reloading.FileChangedReloadingStrategy"/>
+	     </properties>
+
+		<properties fileName="hbase-config-default.properties" config-optional="true">
+<!-- 					<reloadingStrategy refreshDelay="${expr:System.getProperty('defaultConfigRefreshDelay')}"
+	      config-class="org.apache.commons.configuration.reloading.FileChangedReloadingStrategy"/>
+ -->	     </properties>
+
+	</override>
+</configuration>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/main/resources/hbase-config-default.properties
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/main/resources/hbase-config-default.properties b/metron-platform/metron-api/src/main/resources/hbase-config-default.properties
new file mode 100644
index 0000000..0f47193
--- /dev/null
+++ b/metron-platform/metron-api/src/main/resources/hbase-config-default.properties
@@ -0,0 +1,57 @@
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+
+#hbase zoo keeper configuration
+hbase.zookeeper.quorum=zkpr1,zkpr2,zkpr3
+hbase.zookeeper.clientPort=2181
+hbase.client.retries.number=1
+zookeeper.session.timeout=60000
+zookeeper.recovery.retry=0
+
+#hbase table configuration
+hbase.table.name=pcap
+hbase.table.column.family=t
+hbase.table.column.qualifier=value
+hbase.table.column.maxVersions=5
+
+# scan size limit configuration in MB or KB; if the input is negative or greater than max value throw an error.
+hbase.scan.result.size.unit=MB
+hbase.scan.default.result.size=6
+hbase.scan.max.result.size=60
+
+# time stamp conversion configuration; possible values 'SECONDS'(seconds), 'MILLIS'(milli seconds), 'MICROS' (micro seconds)
+hbase.table.data.time.unit=MILLIS
+
+#number of retries in case of ZooKeeper or HBase server down
+hbase.hconnection.retries.number=3
+
+#configuration for including pcaps in the reverse traffic
+pcaps.include.reverse.traffic = false
+
+#maximum table row size in KB or MB 
+hbase.table.row.size.unit = KB
+hbase.table.max.row.size = 70
+
+# tokens of row key configuration
+hbase.table.row.key.tokens=7
+rest.api.input.key.min.tokens=5
+
+# whether or not to include the last row from the previous request, applicable for only partial response scenario
+hbase.table.scan.include.duplicate.lastrow= true;
+
+#number of digits for appending tokens of the row key
+hbase.table.row.key.token.appending.digits=5

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/CellTimestampComparatorTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/CellTimestampComparatorTest.java b/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/CellTimestampComparatorTest.java
new file mode 100644
index 0000000..9d10c92
--- /dev/null
+++ b/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/CellTimestampComparatorTest.java
@@ -0,0 +1,109 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.hbase.Cell;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import org.apache.metron.pcapservice.CellTimestampComparator;
+
+/**
+ * The Class CellTimestampComparatorTest.
+ */
+public class CellTimestampComparatorTest {
+
+  /**
+   * Sets the up.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @Before
+  public void setUp() throws Exception {
+  }
+
+  /**
+   * Tear down.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  /**
+   * Test_less.
+   */
+  @Test
+  public void test_less() {
+    // mocking
+    Cell cell1 = Mockito.mock(Cell.class);
+    Mockito.when(cell1.getTimestamp()).thenReturn(13945345808L);
+    Cell cell2 = Mockito.mock(Cell.class);
+    Mockito.when(cell2.getTimestamp()).thenReturn(13845345808L);
+
+    CellTimestampComparator comparator = new CellTimestampComparator();
+
+    // actual call and verify
+    Assert.assertTrue(comparator.compare(cell2, cell1) == -1);
+
+  }
+
+  /**
+   * Test_greater.
+   */
+  @Test
+  public void test_greater() {
+    // mocking
+    Cell cell1 = Mockito.mock(Cell.class);
+    Mockito.when(cell1.getTimestamp()).thenReturn(13745345808L);
+    Cell cell2 = Mockito.mock(Cell.class);
+    Mockito.when(cell2.getTimestamp()).thenReturn(13945345808L);
+
+    CellTimestampComparator comparator = new CellTimestampComparator();
+
+    // actual call and verify
+    Assert.assertTrue(comparator.compare(cell2, cell1) == 1);
+
+  }
+
+  /**
+   * Test_equal.
+   */
+  @Test
+  public void test_equal() {
+    // mocking
+    Cell cell1 = Mockito.mock(Cell.class);
+    Mockito.when(cell1.getTimestamp()).thenReturn(13945345808L);
+    Cell cell2 = Mockito.mock(Cell.class);
+    Mockito.when(cell2.getTimestamp()).thenReturn(13945345808L);
+
+    CellTimestampComparator comparator = new CellTimestampComparator();
+
+    // actual call and verify
+    Assert.assertTrue(comparator.compare(cell2, cell1) == 0);
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/ConfigurationUtilTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/ConfigurationUtilTest.java b/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/ConfigurationUtilTest.java
new file mode 100644
index 0000000..3424425
--- /dev/null
+++ b/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/ConfigurationUtilTest.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import org.junit.Test;
+
+import org.apache.metron.pcapservice.ConfigurationUtil;
+import org.apache.metron.pcapservice.ConfigurationUtil.SizeUnit;
+import org.springframework.util.Assert;
+
+/**
+ * The Class ConfigurationUtilTest.
+ */
+public class ConfigurationUtilTest {
+
+  /**
+   * Test_get max allowable result size in bytes.
+   */
+  @Test
+  public void test_getMaxAllowableResultSizeInBytes() {
+    long result = ConfigurationUtil.getMaxResultSize();
+    Assert.isTrue(result == 62914560);
+  }
+
+  /**
+   * Test_get max allowable results size unit.
+   */
+  @Test
+  public void test_getMaxAllowableResultsSizeUnit() {
+    SizeUnit result = ConfigurationUtil.getResultSizeUnit();
+    Assert.isTrue(SizeUnit.MB == result);
+  }
+
+  /**
+   * Test_get max row size in bytes.
+   */
+  @Test
+  public void test_getMaxRowSizeInBytes() {
+    long result = ConfigurationUtil.getMaxRowSize();
+    Assert.isTrue(result == 71680);
+  }
+
+  /**
+   * Test_get max row size unit.
+   */
+  @Test
+  public void test_getMaxRowSizeUnit() {
+    SizeUnit result = ConfigurationUtil.getRowSizeUnit();
+    Assert.isTrue(SizeUnit.KB == result);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/HBaseConfigurationUtilTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/HBaseConfigurationUtilTest.java b/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/HBaseConfigurationUtilTest.java
new file mode 100644
index 0000000..75ac782
--- /dev/null
+++ b/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/HBaseConfigurationUtilTest.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.springframework.util.Assert;
+
+import org.apache.metron.pcapservice.HBaseConfigurationUtil;
+
+/**
+ * The Class HBaseConfigurationUtilTest.
+ */
+public class HBaseConfigurationUtilTest {
+
+  /**
+   * Sets the up.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @Before
+  public void setUp() throws Exception {
+  }
+
+  /**
+   * Tear down.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  /**
+   * Test_read.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_read() throws IOException {
+    Configuration configuration = HBaseConfigurationUtil.read();
+    Assert.isTrue(configuration != null, "Configuration must not be null");
+    Assert.isTrue(configuration.get("hbase.client.retries.number").equals("1"),
+        "value must be equal");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/HBaseIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/HBaseIntegrationTest.java b/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/HBaseIntegrationTest.java
new file mode 100644
index 0000000..62aa721
--- /dev/null
+++ b/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/HBaseIntegrationTest.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.util.Bytes;
+
+/**
+ * The Class HBaseIntegrationTest.
+ * 
+ * @author Sayi
+ */
+public class HBaseIntegrationTest {
+
+  /** The test util. */
+  private final HBaseTestingUtility testUtil = new HBaseTestingUtility();
+
+  /** The test table. */
+  private HTable testTable;
+
+  /**
+   * Inits the cluster.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  void initCluster() throws Exception {
+    // testUtil.getConfiguration().addResource("hbase-site-local.xml");
+    // testUtil.getConfiguration().reloadConfiguration();
+    // start mini hbase cluster
+    testUtil.startMiniCluster(1);
+    // create tables
+    createTable();
+
+  }
+
+  /**
+   * Creates the table.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private void createTable() throws IOException {
+    testTable = testUtil.createTable(Bytes.toBytes("test_pcaps_local"), Bytes.toBytes("cf"));
+    System.out.println("after 'test_pcaps_local' table creation ");
+    // create put
+    Put put = new Put(Bytes.toBytes("1111")); // row key =1111
+    put.add(Bytes.toBytes("cf"), Bytes.toBytes("packet"),
+        Bytes.toBytes("aaaaaaaa"));
+    testTable.put(put);
+    System.out.println("after testTable.put(put)");
+
+  }
+
+  /**
+   * The main method.
+   * 
+   * @param args
+   *          the arguments
+   * @throws Exception
+   *           the exception
+   */
+  public static void main(String[] args) throws Exception {
+    // HBaseIntegrationTest test = new HBaseIntegrationTest();
+    // test.initCluster();
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/PcapGetterHBaseImplTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/PcapGetterHBaseImplTest.java b/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/PcapGetterHBaseImplTest.java
new file mode 100644
index 0000000..82cb278
--- /dev/null
+++ b/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/PcapGetterHBaseImplTest.java
@@ -0,0 +1,553 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.collections.ListUtils;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Scan;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+import org.springframework.util.Assert;
+
+import org.apache.metron.pcapservice.PcapGetterHBaseImpl;
+import org.apache.metron.pcapservice.PcapsResponse;
+
+/**
+ * The Class PcapGetterHBaseImplTest.
+ */
+public class PcapGetterHBaseImplTest {
+
+  /**
+   * Sets the up.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @Before
+  public void setUp() throws Exception {
+  }
+
+  /**
+   * Tear down.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  /**
+   * Test_get pcaps_with list.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @SuppressWarnings("unchecked")
+  @Test
+  public void test_getPcaps_withList() throws IOException {
+    // mocking
+    String[] keys = { "0a07002b-0a078039-06-1e8b-0087",
+        "0a070025-0a07807a-06-aab8-c360" };
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    PcapGetterHBaseImpl spy = Mockito.spy(pcapGetter);
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+
+    // Mockito.doReturn(mockPcaps).when(spy).scanPcaps(Mockito.any(ArrayList.class),
+    // Mockito.any(HTable.class), Mockito.any(Scan.class),
+    // Mockito.any(byte[].class), Mockito.any(byte[].class));
+    //
+    //
+    // actual call
+    // PcapsResponse response = spy.getPcaps(Arrays.asList(keys));
+
+    // verify
+    // Assert.assertTrue(response.getResponseSize() == mockPcaps.get(0).length);
+  }
+
+  /**
+   * Test_get pcaps_with key.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @SuppressWarnings("unchecked")
+  @Test
+  public void test_getPcaps_withKey() throws IOException {
+    // mocking
+    String key = "0a07002b-0a078039-06-1e8b-0087";
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    PcapGetterHBaseImpl spy = Mockito.spy(pcapGetter);
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+
+    // //
+    // Mockito.doReturn(mockPcaps).when(spy).scanPcaps(Mockito.any(ArrayList.class),
+    // Mockito.any(HTable.class), Mockito.any(Scan.class),
+    // Mockito.any(byte[].class), Mockito.any(byte[].class));
+    //
+
+    // actual call
+    // PcapsResponse response = spy.getPcaps(key);
+
+    // verify
+    // Assert.assertTrue(response.getResponseSize() == mockPcaps.get(0).length);
+  }
+
+  /**
+   * Test_get pcaps_with key and timestamps.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @SuppressWarnings("unchecked")
+  @Test
+  public void test_getPcaps_withKeyAndTimestamps() throws IOException {
+    // mocking
+    String key = "0a07002b-0a078039-06-1e8b-0087";
+    long startTime = 1376782349234555L;
+    long endTime = 1396782349234555L;
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    PcapGetterHBaseImpl spy = Mockito.spy(pcapGetter);
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+
+    // Mockito.doReturn(mockPcaps).when(spy).scanPcaps(Mockito.any(ArrayList.class),
+    // Mockito.any(HTable.class), Mockito.any(Scan.class),
+    // Mockito.any(byte[].class), Mockito.any(byte[].class));
+
+    // actual call
+    // PcapsResponse response = spy.getPcaps(key, startTime, endTime, false);
+
+    // verify
+    // Assert.assertTrue(response.getResponseSize() == mockPcaps.get(0).length);
+  }
+
+  /**
+   * Test_get pcaps_with key_multiple pcaps.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @SuppressWarnings("unchecked")
+  @Test
+  public void test_getPcaps_withKey_multiplePcaps() throws IOException {
+    // mocking
+    String key = "0a07002b-0a078039-06-1e8b-0087";
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    PcapGetterHBaseImpl spy = Mockito.spy(pcapGetter);
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+    mockPcaps.add(getTestPcapBytes());
+
+    /*
+     * Mockito.doReturn(mockPcaps).when(spy).scanPcaps(Mockito.any(ArrayList.class
+     * ), Mockito.any(HTable.class), Mockito.any(Scan.class),
+     * Mockito.any(byte[].class), Mockito.any(byte[].class));
+     */
+    // actual call
+    // PcapsResponse response = spy.getPcaps(key);
+
+    // verify
+    // Assert.assertNotNull(response);
+    // Assert.assertTrue(response.getResponseSize() > mockPcaps.get(0).length);
+  }
+
+  /**
+   * Gets the test pcap bytes.
+   * 
+   * @return the test pcap bytes
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private byte[] getTestPcapBytes() throws IOException {
+    File fin = new File("src/test/resources/test-tcp-packet.pcap");
+    byte[] pcapBytes = FileUtils.readFileToByteArray(fin);
+    return pcapBytes;
+  }
+
+  /**
+   * Test_remove duplicates.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_removeDuplicates() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0019-caac");
+    keys.add("18800006-1800000b-06-0050-5af6");
+
+    List<String> deDupKeys = pcapGetter.removeDuplicateKeys(keys);
+    Assert.isTrue(deDupKeys.size() == 3);
+    List<String> testKeys = new ArrayList<String>();
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0019-caac");
+
+    ListUtils.isEqualList(deDupKeys, testKeys);
+  }
+
+  /**
+   * Test_sort keys by asc order_with out reverse traffic.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_sortKeysByAscOrder_withOutReverseTraffic()
+      throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-06-0019-caac");
+
+    List<String> result = pcapGetter.sortKeysByAscOrder(keys, false);
+
+    List<String> testKeys = new ArrayList<String>();
+    testKeys.add("18800006-1800000b-06-0019-caac");
+    testKeys.add("18800006-1800000b-06-0050-5af6");
+    testKeys.add("18800006-1800000b-11-0035-3810");
+
+    Assert.isTrue(ListUtils.isEqualList(result, testKeys));
+  }
+
+  /**
+   * Test_sort keys by asc order_with reverse traffic.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_sortKeysByAscOrder_withReverseTraffic() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+    keys.add("18800006-1800000b-11-0035-3812");
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-11-0035-3811");
+
+    List<String> result = pcapGetter.sortKeysByAscOrder(keys, true);
+    Assert.isTrue(result.size() == 6);
+  }
+
+  /**
+   * Test_sort keys by asc order_get unprocessed sublist of keys.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_sortKeysByAscOrder_getUnprocessedSublistOfKeys()
+      throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-06-0019-caac");
+    System.out.println("original keys =" + keys.toString());
+
+    List<String> sortedKeys = pcapGetter.sortKeysByAscOrder(keys, false);
+    System.out.println("after sortKeysByAscOrder =" + sortedKeys.toString());
+
+    List<String> unprocessedKeys1 = pcapGetter.getUnprocessedSublistOfKeys(
+        sortedKeys, "18800006-1800000b-06-0019-caac-65140-40815");
+    System.out.println("unprocessedKeys1 =" + unprocessedKeys1);
+    Assert.isTrue(unprocessedKeys1.size() == 2);
+
+    List<String> unprocessedKeys2 = pcapGetter.getUnprocessedSublistOfKeys(
+        sortedKeys, "18800006-1800000b-06-0050-5af6-65140-40815");
+    // System.out.println("unprocessedKeys2 ="+unprocessedKeys2);
+    Assert.isTrue(unprocessedKeys2.size() == 1);
+
+    List<String> unprocessedKeys3 = pcapGetter.getUnprocessedSublistOfKeys(
+        sortedKeys, "18800006-1800000b-11-0035-3810-6514040815");
+    // System.out.println("unprocessedKeys3 ="+unprocessedKeys3);
+    Assert.isTrue(unprocessedKeys3.size() == 0);
+
+  }
+
+  /**
+   * Test_sort keys by asc order_get unprocessed sublist of keys_with out match.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_sortKeysByAscOrder_getUnprocessedSublistOfKeys_withOutMatch()
+      throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-06-0019-caac");
+    System.out.println("original keys =" + keys.toString());
+
+    List<String> sortedKeys = pcapGetter.sortKeysByAscOrder(keys, false);
+    System.out.println("after sortKeysByAscOrder =" + sortedKeys.toString());
+
+    List<String> unprocessedKeys1 = pcapGetter.getUnprocessedSublistOfKeys(
+        sortedKeys, "18800006-1800000b-11-89-455-65140-40815");
+    System.out.println("unprocessedKeys1 =" + unprocessedKeys1);
+    Assert.isTrue(unprocessedKeys1.size() == 3);
+  }
+
+  /**
+   * Test_create start and stop row keys.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createStartAndStopRowKeys() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String key = "18800006-1800000b-11-0035-3810";
+    Map<String, String> map = pcapGetter.createStartAndStopRowKeys(key, false,
+        false);
+    System.out.println("map =" + map.toString());
+
+    String lastRowKey = "18800006-1800000b-11-0035-3810-23234-32423";
+    Map<String, String> map1 = pcapGetter.createStartAndStopRowKeys(
+        lastRowKey, true, false);
+    System.out.println("map1 =" + map1.toString());
+
+    String lastRowKey2 = "18800006-1800000b-11-0035-3810-23234-32423";
+    Map<String, String> map2 = pcapGetter.createStartAndStopRowKeys(
+        lastRowKey2, true, true);
+    System.out.println("map2 =" + map2.toString());
+
+  }
+
+  /**
+   * Test_check if valid input_valid.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_checkIfValidInput_valid() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-06-0019-caac");
+
+    String lastRowKey = "18800006-1800000b-11-0035-3810-23234-32423";
+
+    boolean response = pcapGetter.checkIfValidInput(keys, lastRowKey);
+    Assert.isTrue(response);
+
+  }
+
+  /**
+   * Test_check if valid input_in valid.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_checkIfValidInput_inValid() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    @SuppressWarnings("unchecked")
+    boolean response = pcapGetter.checkIfValidInput(Collections.EMPTY_LIST,
+        null);
+    Assert.isTrue(!response);
+
+  }
+
+  /**
+   * Test_check if valid input_valid_mixed.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_checkIfValidInput_valid_mixed() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String lastRowKey = "18800006-1800000b-11-0035-3810-23234-32423";
+    @SuppressWarnings("unchecked")
+    boolean response = pcapGetter.checkIfValidInput(Collections.EMPTY_LIST,
+        lastRowKey);
+    Assert.isTrue(response);
+  }
+
+  /**
+   * Test_create get request.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createGetRequest() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String key = "18800006-1800000b-11-0035-3810-23234-324230";
+
+    long startTime = 139812323L; // in seconds
+    long endTime = 139923424L; // in seconds
+
+    Get get = pcapGetter.createGetRequest(key, startTime, endTime);
+    Assert.notNull(get);
+
+    Assert.isTrue(Arrays.equals(get.getRow(), key.getBytes()));
+    // compare in micros as the data creation time unit is set to Micros in
+    // properties file.
+    Assert.isTrue(get.getTimeRange().getMin() == startTime * 1000 );
+    Assert.isTrue(get.getTimeRange().getMax() == endTime * 1000 );
+  }
+
+  /**
+   * Test_create get request_default time range.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createGetRequest_defaultTimeRange() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String key = "18800006-1800000b-11-0035-3810-23234-324230";
+
+    Get get = pcapGetter.createGetRequest(key, -1, -1);
+    Assert.notNull(get);
+
+    Assert.isTrue(Arrays.equals(get.getRow(), key.getBytes()));
+    Assert.isTrue(get.getTimeRange().getMin() == 0);
+  }
+
+  /**
+   * Test_create get request_with start time.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createGetRequest_withStartTime() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String key = "18800006-1800000b-11-0035-3810-23234-324230";
+
+    long startTime = 139812323L; // in seconds
+
+    Get get = pcapGetter.createGetRequest(key, startTime, -1);
+    Assert.notNull(get);
+
+    Assert.isTrue(Arrays.equals(get.getRow(), key.getBytes()));
+    Assert.isTrue(get.getTimeRange().getMin() == startTime * 1000 );
+    Assert.isTrue(get.getTimeRange().getMax() == Long.valueOf(Long.MAX_VALUE));
+  }
+
+  /**
+   * Test_create get request_with end time.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createGetRequest_withEndTime() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String key = "18800006-1800000b-11-0035-3810-23234-324230";
+
+    long endTime = 139923424L; // in seconds
+
+    Get get = pcapGetter.createGetRequest(key, -1, endTime);
+    Assert.notNull(get);
+
+    Assert.isTrue(Arrays.equals(get.getRow(), key.getBytes()));
+    Assert.isTrue(get.getTimeRange().getMin() == 0);
+    Assert.isTrue(get.getTimeRange().getMax() == endTime * 1000 );
+  }
+
+  /**
+   * Test_create scan request.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createScanRequest() throws IOException {
+    // mocking
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+
+    PcapsResponse pcapsResponse = new PcapsResponse();
+
+    Map<String, String> keysMap = new HashMap<String, String>();
+    String startKey = "0a07002b-0a078039-06-1e8b-0087-00000-00000";
+    String endKey = "0a070025-0a07807a-06-aab8-c360-99999-99999";
+    keysMap.put("startKey", startKey);
+    keysMap.put("endKey", endKey);
+
+    long startTime = 139812323L; // in seconds
+    long endTime = 139923424L; // in seconds
+    long maxResultSize = 673424;
+
+    // actual call
+    Scan scan = pcapGetter.createScanRequest(pcapsResponse, keysMap, startTime,
+        endTime, maxResultSize);
+
+    // verify time range
+    Assert.isTrue(scan.getTimeRange().getMin() == startTime * 1000 ); // compare
+                                                                            // in
+                                                                            // millis
+    Assert.isTrue(scan.getTimeRange().getMax() == endTime * 1000 ); // compare
+                                                                          // in
+                                                                          // millis
+
+    // verify start and stop rows
+    Assert.isTrue(Arrays.equals(scan.getStartRow(), startKey.getBytes()));
+    Assert.isTrue(Arrays.equals(scan.getStopRow(), endKey.getBytes()));
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/PcapHelperTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/PcapHelperTest.java b/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/PcapHelperTest.java
new file mode 100644
index 0000000..73db384
--- /dev/null
+++ b/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/PcapHelperTest.java
@@ -0,0 +1,335 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import java.util.Arrays;
+import java.util.List;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+import org.apache.metron.pcapservice.PcapHelper;
+import org.apache.metron.pcapservice.PcapHelper.TimeUnit;
+import org.springframework.util.Assert;
+
+// TODO: Auto-generated Javadoc
+/**
+ * The Class PcapHelperTest.
+ * 
+ * @author Sayi
+ */
+@RunWith(PowerMockRunner.class)
+@PrepareForTest(PcapHelper.class)
+public class PcapHelperTest {
+
+  /**
+   * Sets the up.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @Before
+  public void setUp() throws Exception {
+    PowerMockito.spy(PcapHelper.class);
+  }
+
+  /**
+   * Tear down.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in SECONDS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_seconds() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.SECONDS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222L; // input time in seconds
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222L == time);
+  }
+
+  /**
+   * Input time is in MILLIS and data creation time is in SECONDS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_millis_seconds() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.SECONDS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222333L; // input time in millis
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222L == time);
+  }
+
+  /**
+   * Input time is in MICROS and data creation time is in SECONDS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_micros_seconds() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.SECONDS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222333444L; // input time in micros
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MILLIS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_millis() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MILLIS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222L; // input time in seconds
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222000L == time);
+  }
+
+  /**
+   * Input time is in MILLIS and data creation time is in MILLIS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_millis_millis() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MILLIS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 111112222233L; // input time in millis
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(111112222233L == time);
+  }
+
+  /**
+   * Input time is in MICROS and data creation time is in MILLIS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_micros_millis() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MILLIS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 111112222233344L; // input time in micros
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(111112222233L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_micros() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222L; // input time in seconds
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222000000L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_micros_random() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 13388; // input time in seconds
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(13388000000L == time);
+  }
+
+  /**
+   * Input time is in MILLIS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_millis_micros() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 111112222233L; // input time in millis
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(111112222233000L == time);
+  }
+
+  /**
+   * Input time is in MICROS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_micros_micros() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222334444L; // input time in micros
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222334444L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_micros_0() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 0; // input time in micros
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(0 == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_micros_1() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1; // input time in micros
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1000000L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_micros_decimal() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long inputTime = 13; // input time in seconds (double to long type casting)
+    long time = PcapHelper.convertSecondsToDataCreationTimeUnit(inputTime);
+
+    Assert.isTrue(13000000L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = (long) 111.333; // input time in seconds (double to long type
+                                   // casting)
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(111000000L == time);
+  }
+
+  /**
+   * Test_get data creation time unit.
+   */
+  @Test
+  public void test_getDataCreationTimeUnit() {
+    TimeUnit dataCreationTimeUnit = PcapHelper.getDataCreationTimeUnit();
+    Assert.isTrue(TimeUnit.MILLIS == dataCreationTimeUnit);
+  }
+
+  /**
+   * Test_reverse key_valid.
+   */
+  @Test
+  public void test_reverseKey_valid() {
+    String key = "162.242.152.24-162.242.153.12-TCP-38190-9092";
+    String reversekey = PcapHelper.reverseKey(key);
+    Assert.isTrue("162.242.153.12-162.242.152.24-TCP-9092-38190"
+        .equals(reversekey));
+  }
+
+  /**
+   * Test_reverse key_valid_with fragment.
+   */
+  @Test
+  public void test_reverseKey_valid_withFragment() {
+    String key = "162.242.152.24-162.242.153.12-TCP-38190-9092-fragmentId";
+    String reversekey = PcapHelper.reverseKey(key);
+    Assert.isTrue("162.242.153.12-162.242.152.24-TCP-9092-38190"
+        .equals(reversekey));
+  }
+
+  /**
+   * Test_reverse key_in valid.
+   */
+  @Test
+  public void test_reverseKey_inValid() {
+    String key = "162.242.152.24-162.242.153.12-TCP-38190-9092-ipId-fragmentId-extra";
+    String reversekey = PcapHelper.reverseKey(key);
+    Assert.isTrue("".equals(reversekey));
+  }
+
+  /**
+   * Test_reverse key_as list.
+   */
+  @Test
+  public void test_reverseKey_asList() {
+    String[] keys = {
+        "162.242.152.24-162.242.153.12-TCP-38190-9092-fragmentId",
+        "162.242.152.24-162.242.153.12-UDP-38190-9092" };
+
+    List<String> reverseKeys = PcapHelper.reverseKey(Arrays.asList(keys));
+
+    Assert.isTrue("162.242.153.12-162.242.152.24-TCP-9092-38190"
+        .equals(reverseKeys.get(0)));
+    Assert.isTrue("162.242.153.12-162.242.152.24-UDP-9092-38190"
+        .equals(reverseKeys.get(1)));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/PcapScannerHBaseImplTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/PcapScannerHBaseImplTest.java b/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/PcapScannerHBaseImplTest.java
new file mode 100644
index 0000000..afac1d3
--- /dev/null
+++ b/metron-platform/metron-api/src/test/java/org/apache/metron/pcapservice/PcapScannerHBaseImplTest.java
@@ -0,0 +1,249 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcapservice;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import junit.framework.Assert;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Scan;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+// TODO: Auto-generated Javadoc
+/**
+ * The Class PcapScannerHBaseImplTest.
+ */
+public class PcapScannerHBaseImplTest {
+
+  /**
+   * Sets the up.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @Before
+  public void setUp() throws Exception {
+  }
+
+  /**
+   * Tear down.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  /**
+   * Test_create scan request.
+   * 
+   * @throws IOException
+   *           the IO exception
+   */
+  @Test
+  public void test_createScanRequest() throws IOException {
+    // mocking
+    PcapScannerHBaseImpl pcapScanner = (PcapScannerHBaseImpl) PcapScannerHBaseImpl
+        .getInstance();
+    byte[] cf = "cf".getBytes();
+    byte[] cq = "pcap".getBytes();
+    String startKey = "0a07002b-0a078039-06-1e8b-0087";
+    String endKey = "0a070025-0a07807a-06-aab8-c360";
+    long maxResultSize = 60;
+    long startTime = 1376782349234555L;
+    long endTime = 1396782349234555L;
+
+    // actual call
+    Scan scan = pcapScanner.createScanRequest(cf, cq, startKey, endKey,
+        maxResultSize, -1, -1);
+
+    // verify
+    Assert.assertTrue(scan.getTimeRange().getMin() == 0);
+    Assert.assertTrue(Arrays.equals(scan.getStartRow(), startKey.getBytes()));
+    Assert.assertTrue(Arrays.equals(scan.getStopRow(), endKey.getBytes()));
+  }
+
+  /**
+   * Test_create scan request_with timestamps.
+   * 
+   * @throws IOException
+   *           the IO exception
+   */
+  @Test
+  public void test_createScanRequest_withTimestamps() throws IOException {
+    // mocking
+    PcapScannerHBaseImpl pcapScanner = (PcapScannerHBaseImpl) PcapScannerHBaseImpl
+        .getInstance();
+    byte[] cf = "cf".getBytes();
+    byte[] cq = "pcap".getBytes();
+    String startKey = "0a07002b-0a078039-06-1e8b-0087";
+    String endKey = "0a070025-0a07807a-06-aab8-c360";
+    long maxResultSize = 60;
+    long startTime = 1376782349234555L;
+    long endTime = 1396782349234555L;
+
+    // actual call
+    Scan scan = pcapScanner.createScanRequest(cf, cq, startKey, endKey,
+        maxResultSize, startTime, endTime);
+
+    // verify
+    Assert.assertTrue(scan.getTimeRange().getMin() == 1376782349234L);
+    Assert.assertTrue(Arrays.equals(scan.getStartRow(), startKey.getBytes()));
+    Assert.assertTrue(Arrays.equals(scan.getStopRow(), endKey.getBytes()));
+  }
+
+  /**
+   * Test_get pcaps_with all arguments.
+   * 
+   * @throws IOException
+   *           the IO exception
+   */
+  @SuppressWarnings({ "unchecked", "unused" })
+  @Test
+  public void test_getPcaps_withAllArguments() throws IOException {
+    // mocking
+    PcapScannerHBaseImpl pcapScanner = (PcapScannerHBaseImpl) PcapScannerHBaseImpl
+        .getInstance();
+    PcapScannerHBaseImpl spy = Mockito.spy(pcapScanner);
+    byte[] cf = "cf".getBytes();
+    byte[] cq = "pcap".getBytes();
+    String startKey = "0a07002b-0a078039-06-1e8b-0087";
+    String endKey = "0a070025-0a07807a-06-aab8-c360";
+    long maxResultSize = 60;
+    long startTime = 1376782349234555L;
+    long endTime = 1396782349234555L;
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+
+    Mockito
+        .doReturn(mockPcaps)
+        .when(spy)
+        .scanPcaps(Mockito.any(ArrayList.class), Mockito.any(HTable.class),
+            Mockito.any(Scan.class), Mockito.any(byte[].class),
+            Mockito.any(byte[].class));
+
+    // actual call
+    byte[] response = spy.getPcaps(startKey, endKey, maxResultSize, startTime,
+        endTime);
+
+    // verify
+    Assert.assertTrue(response.length == mockPcaps.get(0).length);
+  }
+
+  /**
+   * Test_get pcaps_with minimal arguments.
+   * 
+   * @throws IOException
+   *           the IO exception
+   */
+  @SuppressWarnings({ "unchecked", "unused" })
+  @Test
+  public void test_getPcaps_withMinimalArguments() throws IOException {
+    // mocking
+    PcapScannerHBaseImpl pcapScanner = (PcapScannerHBaseImpl) PcapScannerHBaseImpl
+        .getInstance();
+    PcapScannerHBaseImpl spy = Mockito.spy(pcapScanner);
+    byte[] cf = "cf".getBytes();
+    byte[] cq = "pcap".getBytes();
+    String startKey = "0a07002b-0a078039-06-1e8b-0087";
+    String endKey = "0a070025-0a07807a-06-aab8-c360";
+    long maxResultSize = 60;
+    long startTime = 1376782349234555L;
+    long endTime = 1396782349234555L;
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+
+    Mockito
+        .doReturn(mockPcaps)
+        .when(spy)
+        .scanPcaps(Mockito.any(ArrayList.class), Mockito.any(HTable.class),
+            Mockito.any(Scan.class), Mockito.any(byte[].class),
+            Mockito.any(byte[].class));
+
+    // actual call
+    byte[] response = spy.getPcaps(startKey, endKey);
+
+    // verify
+    Assert.assertTrue(response.length == mockPcaps.get(0).length);
+  }
+
+  /**
+   * Test_get pcaps_multiple pcaps.
+   * 
+   * @throws IOException
+   *           the IO exception
+   */
+  @SuppressWarnings({ "unchecked", "unused" })
+  @Test
+  public void test_getPcaps_multiplePcaps() throws IOException {
+    // mocking
+    PcapScannerHBaseImpl pcapScanner = (PcapScannerHBaseImpl) PcapScannerHBaseImpl
+        .getInstance();
+    PcapScannerHBaseImpl spy = Mockito.spy(pcapScanner);
+    byte[] cf = "cf".getBytes();
+    byte[] cq = "pcap".getBytes();
+    String startKey = "0a07002b-0a078039-06-1e8b-0087";
+    String endKey = "0a070025-0a07807a-06-aab8-c360";
+    long maxResultSize = 60;
+    long startTime = 1376782349234555L;
+    long endTime = 1396782349234555L;
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+    mockPcaps.add(getTestPcapBytes());
+
+    Mockito
+        .doReturn(mockPcaps)
+        .when(spy)
+        .scanPcaps(Mockito.any(ArrayList.class), Mockito.any(HTable.class),
+            Mockito.any(Scan.class), Mockito.any(byte[].class),
+            Mockito.any(byte[].class));
+
+    // actual call
+    byte[] response = spy.getPcaps(startKey, endKey);
+
+    // verify
+    Assert.assertNotNull(response);
+    Assert.assertTrue(response.length > mockPcaps.get(0).length);
+  }
+
+  /**
+   * Gets the test pcap bytes.
+   * 
+   * @return the test pcap bytes
+   * @throws IOException
+   *           the IO exception
+   */
+  private byte[] getTestPcapBytes() throws IOException {
+    File fin = new File("src/test/resources/test-tcp-packet.pcap");
+    byte[] pcapBytes = FileUtils.readFileToByteArray(fin);
+    return pcapBytes;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/test/resources/hbase-config.properties
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/test/resources/hbase-config.properties b/metron-platform/metron-api/src/test/resources/hbase-config.properties
new file mode 100644
index 0000000..89dbf06
--- /dev/null
+++ b/metron-platform/metron-api/src/test/resources/hbase-config.properties
@@ -0,0 +1,57 @@
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+
+#hbase zoo keeper configuration
+hbase.zookeeper.quorum=dn1.hw.com,dn2.hw.com,dn3.hw.com
+hbase.zookeeper.clientPort=2181
+hbase.client.retries.number=1
+zookeeper.session.timeout=60000
+zookeeper.recovery.retry=0
+
+#hbase table configuration
+hbase.table.name=pcap
+hbase.table.column.family=t
+hbase.table.column.qualifier=pcap
+hbase.table.column.maxVersions=5
+
+# scan size limit configuration in MB or KB; if the input is negative or greater than max value throw an error.
+hbase.scan.result.size.unit=MB
+hbase.scan.default.result.size=6
+hbase.scan.max.result.size=60
+
+# time stamp conversion configuration; possible values 'SECONDS'(seconds), 'MILLIS'(milli seconds), 'MICROS' (micro seconds)
+hbase.table.data.time.unit=MILLIS
+
+#number of retries in case of ZooKeeper or HBase server down
+hbase.hconnection.retries.number=3
+
+#configuration for including pcaps in the reverse traffic
+pcaps.include.reverse.traffic = false
+
+#maximum table row size in KB or MB 
+hbase.table.row.size.unit = KB
+hbase.table.max.row.size = 0.07
+
+# tokens of row key configuration
+hbase.table.row.key.tokens=7
+rest.api.input.key.min.tokens=5
+
+# whether or not to include the last row from the previous request, applicable for only partial response scenario
+hbase.table.scan.include.duplicate.lastrow= true;
+
+#number of digits for appending tokens of the row key
+hbase.table.row.key.token.appending.digits=5

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-api/src/test/resources/test-tcp-packet.pcap
----------------------------------------------------------------------
diff --git a/metron-platform/metron-api/src/test/resources/test-tcp-packet.pcap b/metron-platform/metron-api/src/test/resources/test-tcp-packet.pcap
new file mode 100644
index 0000000..25d47da
Binary files /dev/null and b/metron-platform/metron-api/src/test/resources/test-tcp-packet.pcap differ

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/.gitignore
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/.gitignore b/metron-platform/metron-common/.gitignore
new file mode 100644
index 0000000..b83d222
--- /dev/null
+++ b/metron-platform/metron-common/.gitignore
@@ -0,0 +1 @@
+/target/


[14/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/resources/patterns/asa
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/resources/patterns/asa b/metron-platform/metron-parsers/src/main/resources/patterns/asa
new file mode 100644
index 0000000..8c2da93
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/resources/patterns/asa
@@ -0,0 +1,176 @@
+# Forked from https://github.com/elasticsearch/logstash/tree/v1.4.0/patterns
+
+USERNAME [a-zA-Z0-9._-]+
+USER %{USERNAME:UNWANTED}
+INT (?:[+-]?(?:[0-9]+))
+BASE10NUM (?<![0-9.+-])(?>[+-]?(?:(?:[0-9]+(?:\.[0-9]+)?)|(?:\.[0-9]+)))
+NUMBER (?:%{BASE10NUM:UNWANTED})
+BASE16NUM (?<![0-9A-Fa-f])(?:[+-]?(?:0x)?(?:[0-9A-Fa-f]+))
+BASE16FLOAT \b(?<![0-9A-Fa-f.])(?:[+-]?(?:0x)?(?:(?:[0-9A-Fa-f]+(?:\.[0-9A-Fa-f]*)?)|(?:\.[0-9A-Fa-f]+)))\b
+
+POSINT \b(?:[1-9][0-9]*)\b
+NONNEGINT \b(?:[0-9]+)\b
+WORD \b\w+\b
+NOTSPACE \S+
+SPACE \s*
+DATA .*?
+GREEDYDATA .*
+#QUOTEDSTRING (?:(?<!\\)(?:"(?:\\.|[^\\"])*"|(?:'(?:\\.|[^\\'])*')|(?:`(?:\\.|[^\\`])*`)))
+QUOTEDSTRING (?>(?<!\\)(?>"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``))
+UUID [A-Fa-f0-9]{8}-(?:[A-Fa-f0-9]{4}-){3}[A-Fa-f0-9]{12}
+
+# Networking
+MAC (?:%{CISCOMAC:UNWANTED}|%{WINDOWSMAC:UNWANTED}|%{COMMONMAC:UNWANTED})
+CISCOMAC (?:(?:[A-Fa-f0-9]{4}\.){2}[A-Fa-f0-9]{4})
+WINDOWSMAC (?:(?:[A-Fa-f0-9]{2}-){5}[A-Fa-f0-9]{2})
+COMMONMAC (?:(?:[A-Fa-f0-9]{2}:){5}[A-Fa-f0-9]{2})
+IPV6 ((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5
 ]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?
+IPV4 (?<![0-9])(?:(?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2}))(?![0-9])
+IP (?:%{IPV6:UNWANTED}|%{IPV4:UNWANTED})
+HOSTNAME \b(?:[0-9A-Za-z][0-9A-Za-z-]{0,62})(?:\.(?:[0-9A-Za-z][0-9A-Za-z-]{0,62}))*(\.?|\b)
+HOST %{HOSTNAME:UNWANTED}
+IPORHOST (?:%{HOSTNAME:UNWANTED}|%{IP:UNWANTED})
+HOSTPORT (?:%{IPORHOST}:%{POSINT:PORT})
+
+# paths
+PATH (?:%{UNIXPATH}|%{WINPATH})
+UNIXPATH (?>/(?>[\w_%!$@:.,~-]+|\\.)*)+
+#UNIXPATH (?<![\w\/])(?:/[^\/\s?*]*)+
+TTY (?:/dev/(pts|tty([pq])?)(\w+)?/?(?:[0-9]+))
+WINPATH (?>[A-Za-z]+:|\\)(?:\\[^\\?*]*)+
+URIPROTO [A-Za-z]+(\+[A-Za-z+]+)?
+URIHOST %{IPORHOST}(?::%{POSINT:port})?
+# uripath comes loosely from RFC1738, but mostly from what Firefox
+# doesn't turn into %XX
+URIPATH (?:/[A-Za-z0-9$.+!*'(){},~:;=@#%_\-]*)+
+#URIPARAM \?(?:[A-Za-z0-9]+(?:=(?:[^&]*))?(?:&(?:[A-Za-z0-9]+(?:=(?:[^&]*))?)?)*)?
+URIPARAM \?[A-Za-z0-9$.+!*'|(){},~@#%&/=:;_?\-\[\]]*
+URIPATHPARAM %{URIPATH}(?:%{URIPARAM})?
+URI %{URIPROTO}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST})?(?:%{URIPATHPARAM})?
+
+# Months: January, Feb, 3, 03, 12, December
+MONTH \b(?:Jan(?:uary)?|Feb(?:ruary)?|Mar(?:ch)?|Apr(?:il)?|May|Jun(?:e)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|Oct(?:ober)?|Nov(?:ember)?|Dec(?:ember)?)\b
+MONTHNUM (?:0?[1-9]|1[0-2])
+MONTHNUM2 (?:0[1-9]|1[0-2])
+MONTHDAY (?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9])
+
+# Days: Monday, Tue, Thu, etc...
+DAY (?:Mon(?:day)?|Tue(?:sday)?|Wed(?:nesday)?|Thu(?:rsday)?|Fri(?:day)?|Sat(?:urday)?|Sun(?:day)?)
+
+# Years?
+YEAR (?>\d\d){1,2}
+# Time: HH:MM:SS
+#TIME \d{2}:\d{2}(?::\d{2}(?:\.\d+)?)?
+# I'm still on the fence about using grok to perform the time match,
+# since it's probably slower.
+# TIME %{POSINT<24}:%{POSINT<60}(?::%{POSINT<60}(?:\.%{POSINT})?)?
+HOUR (?:2[0123]|[01]?[0-9])
+MINUTE (?:[0-5][0-9])
+# '60' is a leap second in most time standards and thus is valid.
+SECOND (?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?)
+TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9])
+# datestamp is YYYY/MM/DD-HH:MM:SS.UUUU (or something like it)
+DATE_US %{MONTHNUM}[/-]%{MONTHDAY}[/-]%{YEAR}
+DATE_EU %{MONTHDAY}[./-]%{MONTHNUM}[./-]%{YEAR}
+ISO8601_TIMEZONE (?:Z|[+-]%{HOUR}(?::?%{MINUTE}))
+ISO8601_SECOND (?:%{SECOND}|60)
+TIMESTAMP_ISO8601 %{YEAR}-%{MONTHNUM}-%{MONTHDAY}[T ]%{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}?
+DATE %{DATE_US}|%{DATE_EU}
+DATESTAMP %{DATE}[- ]%{TIME}
+TZ (?:[PMCE][SD]T|UTC)
+DATESTAMP_RFC822 %{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{TIME} %{TZ}
+DATESTAMP_RFC2822 %{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{TIME} %{ISO8601_TIMEZONE}
+DATESTAMP_OTHER %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{TZ} %{YEAR}
+DATESTAMP_EVENTLOG %{YEAR}%{MONTHNUM2}%{MONTHDAY}%{HOUR}%{MINUTE}%{SECOND}
+GREEDYDATA .*
+
+# Syslog Dates: Month Day HH:MM:SS
+SYSLOGTIMESTAMP %{MONTH} +%{MONTHDAY} %{TIME}
+PROG (?:[\w._/%-]+)
+SYSLOGPROG %{PROG:program}(?:\[%{POSINT:pid}\])?
+SYSLOGHOST %{IPORHOST}
+SYSLOGFACILITY <%{NONNEGINT:facility}.%{NONNEGINT:priority}>
+HTTPDATE %{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME} %{INT}
+
+# Shortcuts
+QS %{QUOTEDSTRING:UNWANTED}
+
+# Log formats
+SYSLOGBASE %{SYSLOGTIMESTAMP:timestamp} (?:%{SYSLOGFACILITY} )?%{SYSLOGHOST:logsource} %{SYSLOGPROG}:
+
+MESSAGESLOG %{SYSLOGBASE} %{DATA}
+
+COMMONAPACHELOG %{IPORHOST:clientip} %{USER:ident} %{USER:auth} \[%{HTTPDATE:timestamp}\] "(?:%{WORD:verb} %{NOTSPACE:request}(?: HTTP/%{NUMBER:httpversion})?|%{DATA:rawrequest})" %{NUMBER:response} (?:%{NUMBER:bytes}|-)
+COMBINEDAPACHELOG %{COMMONAPACHELOG} %{QS:referrer} %{QS:agent}
+
+# Log Levels
+LOGLEVEL ([A|a]lert|ALERT|[T|t]race|TRACE|[D|d]ebug|DEBUG|[N|n]otice|NOTICE|[I|i]nfo|INFO|[W|w]arn?(?:ing)?|WARN?(?:ING)?|[E|e]rr?(?:or)?|ERR?(?:OR)?|[C|c]rit?(?:ical)?|CRIT?(?:ICAL)?|[F|f]atal|FATAL|[S|s]evere|SEVERE|EMERG(?:ENCY)?|[Ee]merg(?:ency)?)
+
+#== Cisco ASA ==
+CISCO_TAGGED_SYSLOG ^<%{POSINT:syslog_pri}>%{CISCOTIMESTAMP:timestamp}( %{SYSLOGHOST:sysloghost})? ?:? %%{CISCOTAG:ciscotag}:
+CISCOTIMESTAMP %{MONTH} +%{MONTHDAY}(?: %{YEAR})? %{TIME}
+CISCOTAG [A-Z0-9]+-%{INT}-(?:[A-Z0-9_]+)
+
+# Common Particles
+CISCO_ACTION Built|Teardown|Deny|Denied|denied|requested|permitted|denied by ACL|discarded|est-allowed|Dropping|created|deleted
+CISCO_REASON Duplicate TCP SYN|Failed to locate egress interface|Invalid transport field|No matching connection|DNS Response|DNS Query|(?:%{WORD}\s*)*
+CISCO_DIRECTION Inbound|inbound|Outbound|outbound
+CISCO_INTERVAL first hit|%{INT}-second interval
+CISCO_XLATE_TYPE static|dynamic
+# ASA-2-106001
+CISCOFW106001 : %{CISCO_DIRECTION:direction} %{WORD:protocol} connection %{CISCO_ACTION:action} from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} flags %{GREEDYDATA:tcp_flags} on interface %{GREEDYDATA:interface}
+# ASA-2-106006, ASA-2-106007, ASA-2-106010
+CISCOFW106006_106007_106010 : %{CISCO_ACTION:action} %{CISCO_DIRECTION:direction} %{WORD:protocol} (?:from|src) %{IP:src_ip}/%{INT:src_port}(\(%{DATA:src_fwuser}\))? (?:to|dst) %{IP:dst_ip}/%{INT:dst_port}(\(%{DATA:dst_fwuser}\))? (?:on interface %{DATA:interface}|due to %{CISCO_REASON:reason})
+# ASA-3-106014
+CISCOFW106014 : %{CISCO_ACTION:action} %{CISCO_DIRECTION:direction} %{WORD:protocol} src %{DATA:src_interface}:%{IP:src_ip}(\(%{DATA:src_fwuser}\))? dst %{DATA:dst_interface}:%{IP:dst_ip}(\(%{DATA:dst_fwuser}\))? \(type %{INT:icmp_type}, code %{INT:icmp_code}\)
+# ASA-6-106015
+CISCOFW106015 : %{CISCO_ACTION:action} %{WORD:protocol} \(%{DATA:policy_id}\) from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port} flags %{DATA:tcp_flags}  on interface %{GREEDYDATA:interface}
+# ASA-1-106021
+CISCOFW106021 : %{CISCO_ACTION:action} %{WORD:protocol} reverse path check from %{IP:src_ip} to %{IP:dst_ip} on interface %{GREEDYDATA:interface}
+# ASA-4-106023
+CISCOFW106023 : %{CISCO_ACTION:action} %{WORD:protocol} src %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? dst %{DATA:dst_interface}:%{IP:dst_ip}(/%{INT:dst_port})?(\(%{DATA:dst_fwuser}\))?( \(type %{INT:icmp_type}, code %{INT:icmp_code}\))? by access-group %{DATA:policy_id} \[%{DATA:hashcode1}, %{DATA:hashcode2}\]
+# ASA-5-106100
+CISCOFW106100 : access-list %{WORD:policy_id} %{CISCO_ACTION:action} %{WORD:protocol} %{DATA:src_interface}/%{IP:src_ip}\(%{INT:src_port}\)(\(%{DATA:src_fwuser}\))? -> %{DATA:dst_interface}/%{IP:dst_ip}\(%{INT:dst_port}\)(\(%{DATA:src_fwuser}\))? hit-cnt %{INT:hit_count} %{CISCO_INTERVAL:interval} \[%{DATA:hashcode1}, %{DATA:hashcode2}\]
+# ASA-6-110002
+CISCOFW110002 : %{CISCO_REASON:reason} for %{WORD:protocol} from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port}
+# ASA-6-302010
+CISCOFW302010 : %{INT:connection_count} in use, %{INT:connection_count_max} most used
+# ASA-6-302013, ASA-6-302014, ASA-6-302015, ASA-6-302016
+CISCOFW302013_302014_302015_302016 : %{CISCO_ACTION:action}(?: %{CISCO_DIRECTION:direction})? %{WORD:protocol} connection %{INT:connection_id} for %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port}( \(%{IP:src_mapped_ip}/%{INT:src_mapped_port}\))?(\(%{DATA:src_fwuser}\))? to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}( \(%{IP:dst_mapped_ip}/%{INT:dst_mapped_port}\))?(\(%{DATA:dst_fwuser}\))?( duration %{TIME:duration} bytes %{INT:bytes})?(?: %{CISCO_REASON:reason})?( \(%{DATA:user}\))?
+# ASA-6-302020, ASA-6-302021
+CISCOFW302020_302021 : %{CISCO_ACTION:action}(?: %{CISCO_DIRECTION:direction})? %{WORD:protocol} connection for faddr %{IP:dst_ip}/%{INT:icmp_seq_num}(?:\(%{DATA:fwuser}\))? gaddr %{IP:src_xlated_ip}/%{INT:icmp_code_xlated} laddr %{IP:src_ip}/%{INT:icmp_code}( \(%{DATA:user}\))?
+# ASA-6-305011
+CISCOFW305011 : %{CISCO_ACTION:action} %{CISCO_XLATE_TYPE:xlate_type} %{WORD:protocol} translation from %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? to %{DATA:src_xlated_interface}:%{IP:src_xlated_ip}/%{DATA:src_xlated_port}
+# ASA-3-313001, ASA-3-313004, ASA-3-313008
+CISCOFW313001_313004_313008 : %{CISCO_ACTION:action} %{WORD:protocol} type=%{INT:icmp_type}, code=%{INT:icmp_code} from %{IP:src_ip} on interface %{DATA:interface}( to %{IP:dst_ip})?
+# ASA-4-313005
+CISCOFW313005 : %{CISCO_REASON:reason} for %{WORD:protocol} error message: %{WORD:err_protocol} src %{DATA:err_src_interface}:%{IP:err_src_ip}(\(%{DATA:err_src_fwuser}\))? dst %{DATA:err_dst_interface}:%{IP:err_dst_ip}(\(%{DATA:err_dst_fwuser}\))? \(type %{INT:err_icmp_type}, code %{INT:err_icmp_code}\) on %{DATA:interface} interface\.  Original IP payload: %{WORD:protocol} src %{IP:orig_src_ip}/%{INT:orig_src_port}(\(%{DATA:orig_src_fwuser}\))? dst %{IP:orig_dst_ip}/%{INT:orig_dst_port}(\(%{DATA:orig_dst_fwuser}\))?
+# ASA-4-402117
+CISCOFW402117 : %{WORD:protocol}: Received a non-IPSec packet \(protocol= %{WORD:orig_protocol}\) from %{IP:src_ip} to %{IP:dst_ip}
+# ASA-4-402119
+CISCOFW402119 : %{WORD:protocol}: Received an %{WORD:orig_protocol} packet \(SPI= %{DATA:spi}, sequence number= %{DATA:seq_num}\) from %{IP:src_ip} \(user= %{DATA:user}\) to %{IP:dst_ip} that failed anti-replay checking
+# ASA-4-419001
+CISCOFW419001 : %{CISCO_ACTION:action} %{WORD:protocol} packet from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}, reason: %{GREEDYDATA:reason}
+# ASA-4-419002
+CISCOFW419002 : %{CISCO_REASON:reason} from %{DATA:src_interface}:%{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port} with different initial sequence number
+# ASA-4-500004
+CISCOFW500004 : %{CISCO_REASON:reason} for protocol=%{WORD:protocol}, from %{IP:src_ip}/%{INT:src_port} to %{IP:dst_ip}/%{INT:dst_port}
+# ASA-6-602303, ASA-6-602304
+CISCOFW602303_602304 : %{WORD:protocol}: An %{CISCO_DIRECTION:direction} %{GREEDYDATA:tunnel_type} SA \(SPI= %{DATA:spi}\) between %{IP:src_ip} and %{IP:dst_ip} \(user= %{DATA:user}\) has been %{CISCO_ACTION:action}
+# ASA-7-710001, ASA-7-710002, ASA-7-710003, ASA-7-710005, ASA-7-710006
+CISCOFW710001_710002_710003_710005_710006 : %{WORD:protocol} (?:request|access) %{CISCO_ACTION:action} from %{IP:src_ip}/%{INT:src_port} to %{DATA:dst_interface}:%{IP:dst_ip}/%{INT:dst_port}
+# ASA-6-713172
+CISCOFW713172 : Group = %{GREEDYDATA:group}, IP = %{IP:src_ip}, Automatic NAT Detection Status:\s+Remote end\s*%{DATA:is_remote_natted}\s*behind a NAT device\s+This\s+end\s*%{DATA:is_local_natted}\s*behind a NAT device
+# ASA-4-733100
+CISCOFW733100 : \[\s*%{DATA:drop_type}\s*\] drop %{DATA:drop_rate_id} exceeded. Current burst rate is %{INT:drop_rate_current_burst} per second, max configured rate is %{INT:drop_rate_max_burst}; Current average rate is %{INT:drop_rate_current_avg} per second, max configured rate is %{INT:drop_rate_max_avg}; Cumulative total count is %{INT:drop_total_count}
+
+
+# ASA-6-305012
+CISCOFW305012 : %{CISCO_ACTION:action} %{CISCO_XLATE_TYPE:xlate_type} %{WORD:protocol} translation from %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? to %{DATA:src_xlated_interface}:%{IP:src_xlated_ip}/%{DATA:src_xlated_port} duration %{TIME:duration}
+# ASA-7-609001
+CISCOFW609001 : %{CISCO_ACTION:action} %{WORD:protocol} %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))?
+# ASA-7-609002
+CISCOFW609002 : %{CISCO_ACTION:action} %{WORD:protocol} %{DATA:src_interface}:%{IP:src_ip}(/%{INT:src_port})?(\(%{DATA:src_fwuser}\))? duration %{TIME:duration}
+
+
+#== End Cisco ASA ==
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/resources/patterns/common
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/resources/patterns/common b/metron-platform/metron-parsers/src/main/resources/patterns/common
new file mode 100644
index 0000000..10c72dc
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/resources/patterns/common
@@ -0,0 +1,96 @@
+# Forked from https://github.com/elasticsearch/logstash/tree/v1.4.0/patterns
+
+USERNAME [a-zA-Z0-9._-]+
+USER %{USERNAME:UNWANTED}
+INT (?:[+-]?(?:[0-9]+))
+BASE10NUM (?<![0-9.+-])(?>[+-]?(?:(?:[0-9]+(?:\.[0-9]+)?)|(?:\.[0-9]+)))
+NUMBER (?:%{BASE10NUM:UNWANTED})
+BASE16NUM (?<![0-9A-Fa-f])(?:[+-]?(?:0x)?(?:[0-9A-Fa-f]+))
+BASE16FLOAT \b(?<![0-9A-Fa-f.])(?:[+-]?(?:0x)?(?:(?:[0-9A-Fa-f]+(?:\.[0-9A-Fa-f]*)?)|(?:\.[0-9A-Fa-f]+)))\b
+
+POSINT \b(?:[1-9][0-9]*)\b
+NONNEGINT \b(?:[0-9]+)\b
+WORD \b\w+\b
+NOTSPACE \S+
+SPACE \s*
+DATA .*?
+GREEDYDATA .*
+#QUOTEDSTRING (?:(?<!\\)(?:"(?:\\.|[^\\"])*"|(?:'(?:\\.|[^\\'])*')|(?:`(?:\\.|[^\\`])*`)))
+QUOTEDSTRING (?>(?<!\\)(?>"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``))
+UUID [A-Fa-f0-9]{8}-(?:[A-Fa-f0-9]{4}-){3}[A-Fa-f0-9]{12}
+
+# Networking
+MAC (?:%{CISCOMAC:UNWANTED}|%{WINDOWSMAC:UNWANTED}|%{COMMONMAC:UNWANTED})
+CISCOMAC (?:(?:[A-Fa-f0-9]{4}\.){2}[A-Fa-f0-9]{4})
+WINDOWSMAC (?:(?:[A-Fa-f0-9]{2}-){5}[A-Fa-f0-9]{2})
+COMMONMAC (?:(?:[A-Fa-f0-9]{2}:){5}[A-Fa-f0-9]{2})
+IPV6 ((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5
 ]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?
+IPV4 (?<![0-9])(?:(?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2}))(?![0-9])
+IP (?:%{IPV6:UNWANTED}|%{IPV4:UNWANTED})
+HOSTNAME \b(?:[0-9A-Za-z][0-9A-Za-z-]{0,62})(?:\.(?:[0-9A-Za-z][0-9A-Za-z-]{0,62}))*(\.?|\b)
+HOST %{HOSTNAME:UNWANTED}
+IPORHOST (?:%{HOSTNAME:UNWANTED}|%{IP:UNWANTED})
+HOSTPORT (?:%{IPORHOST}:%{POSINT:PORT})
+
+# paths
+PATH (?:%{UNIXPATH}|%{WINPATH})
+UNIXPATH (?>/(?>[\w_%!$@:.,~-]+|\\.)*)+
+#UNIXPATH (?<![\w\/])(?:/[^\/\s?*]*)+
+TTY (?:/dev/(pts|tty([pq])?)(\w+)?/?(?:[0-9]+))
+WINPATH (?>[A-Za-z]+:|\\)(?:\\[^\\?*]*)+
+URIPROTO [A-Za-z]+(\+[A-Za-z+]+)?
+URIHOST %{IPORHOST}(?::%{POSINT:port})?
+# uripath comes loosely from RFC1738, but mostly from what Firefox
+# doesn't turn into %XX
+URIPATH (?:/[A-Za-z0-9$.+!*'(){},~:;=@#%_\-]*)+
+#URIPARAM \?(?:[A-Za-z0-9]+(?:=(?:[^&]*))?(?:&(?:[A-Za-z0-9]+(?:=(?:[^&]*))?)?)*)?
+URIPARAM \?[A-Za-z0-9$.+!*'|(){},~@#%&/=:;_?\-\[\]]*
+URIPATHPARAM %{URIPATH}(?:%{URIPARAM})?
+URI %{URIPROTO}://(?:%{USER}(?::[^@]*)?@)?(?:%{URIHOST})?(?:%{URIPATHPARAM})?
+
+# Months: January, Feb, 3, 03, 12, December
+MONTH \b(?:Jan(?:uary)?|Feb(?:ruary)?|Mar(?:ch)?|Apr(?:il)?|May|Jun(?:e)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|Oct(?:ober)?|Nov(?:ember)?|Dec(?:ember)?)\b
+MONTHNUM (?:0?[1-9]|1[0-2])
+MONTHNUM2 (?:0[1-9]|1[0-2])
+MONTHDAY (?:(?:0[1-9])|(?:[12][0-9])|(?:3[01])|[1-9])
+
+# Days: Monday, Tue, Thu, etc...
+DAY (?:Mon(?:day)?|Tue(?:sday)?|Wed(?:nesday)?|Thu(?:rsday)?|Fri(?:day)?|Sat(?:urday)?|Sun(?:day)?)
+
+# Years?
+YEAR (?>\d\d){1,2}
+# Time: HH:MM:SS
+#TIME \d{2}:\d{2}(?::\d{2}(?:\.\d+)?)?
+# I'm still on the fence about using grok to perform the time match,
+# since it's probably slower.
+# TIME %{POSINT<24}:%{POSINT<60}(?::%{POSINT<60}(?:\.%{POSINT})?)?
+HOUR (?:2[0123]|[01]?[0-9])
+MINUTE (?:[0-5][0-9])
+# '60' is a leap second in most time standards and thus is valid.
+SECOND (?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?)
+TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9])
+# datestamp is YYYY/MM/DD-HH:MM:SS.UUUU (or something like it)
+DATE_US %{MONTHNUM}[/-]%{MONTHDAY}[/-]%{YEAR}
+DATE_EU %{MONTHDAY}[./-]%{MONTHNUM}[./-]%{YEAR}
+ISO8601_TIMEZONE (?:Z|[+-]%{HOUR}(?::?%{MINUTE}))
+ISO8601_SECOND (?:%{SECOND}|60)
+TIMESTAMP_ISO8601 %{YEAR}-%{MONTHNUM}-%{MONTHDAY}[T ]%{HOUR}:?%{MINUTE}(?::?%{SECOND})?%{ISO8601_TIMEZONE}?
+DATE %{DATE_US}|%{DATE_EU}
+DATESTAMP %{DATE}[- ]%{TIME}
+TZ (?:[PMCE][SD]T|UTC)
+DATESTAMP_RFC822 %{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{TIME} %{TZ}
+DATESTAMP_RFC2822 %{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{TIME} %{ISO8601_TIMEZONE}
+DATESTAMP_OTHER %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{TZ} %{YEAR}
+DATESTAMP_EVENTLOG %{YEAR}%{MONTHNUM2}%{MONTHDAY}%{HOUR}%{MINUTE}%{SECOND}
+GREEDYDATA .*
+
+# Syslog Dates: Month Day HH:MM:SS
+SYSLOGTIMESTAMP %{MONTH} +%{MONTHDAY} %{TIME}
+PROG (?:[\w._/%-]+)
+SYSLOGPROG %{PROG:program}(?:\[%{POSINT:pid}\])?
+SYSLOGHOST %{IPORHOST}
+SYSLOGFACILITY <%{NONNEGINT:facility}.%{NONNEGINT:priority}>
+HTTPDATE %{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME} %{INT}
+
+# Shortcuts
+QS %{QUOTEDSTRING:UNWANTED}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/resources/patterns/fireeye
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/resources/patterns/fireeye b/metron-platform/metron-parsers/src/main/resources/patterns/fireeye
new file mode 100644
index 0000000..5dc99bf
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/resources/patterns/fireeye
@@ -0,0 +1,9 @@
+GREEDYDATA .*
+POSINT \b(?:[1-9][0-9]*)\b
+UID [0-9.]+
+DATA .*?
+
+FIREEYE_BASE ^<%{POSINT:syslog_pri}>fenotify-%{UID:uid}.alert: %{GREEDYDATA:syslog}
+FIREEYE_MAIN <%{POSINT:syslog_pri}>fenotify-%{DATA:uid}.alert: %{DATA:meta}\|%{DATA:meta}\|%{DATA:meta}\|%{DATA:meta}\|%{DATA:meta}\|%{DATA:meta}\|%{DATA:meta}\|%{GREEDYDATA:fedata}
+#\|(.?)\|(.?)\|(.?)\|(.?)\|%{DATA:type}\|(.?)\|%{GREEDYDATA:fedata}
+FIREEYE_SUB ^<%{POSINT:syslog_pri}>fenotify-%{UID:uid}.alert: .?*\|.?*\|.?*\|.?*\|.?*\|%{DATA:type}\|.?*\|%{GREEDYDATA:fedata}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/resources/patterns/sourcefire
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/resources/patterns/sourcefire b/metron-platform/metron-parsers/src/main/resources/patterns/sourcefire
new file mode 100644
index 0000000..672f684
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/resources/patterns/sourcefire
@@ -0,0 +1,30 @@
+POSINT \b(?:[1-9][0-9]*)\b
+NONNEGINT \b(?:[0-9]+)\b
+WORD \b\w+\b
+NOTSPACE \S+
+SPACE \s*
+DATA .*?
+GREEDYDATA .*
+QUOTEDSTRING (?>(?<!\\)(?>"(?>\\.|[^\\"]+)+"|""|(?>'(?>\\.|[^\\']+)+')|''|(?>`(?>\\.|[^\\`]+)+`)|``))
+UUID [A-Fa-f0-9]{8}-(?:[A-Fa-f0-9]{4}-){3}[A-Fa-f0-9]{12}
+
+# Networking
+MAC (?:%{CISCOMAC}|%{WINDOWSMAC}|%{COMMONMAC})
+CISCOMAC (?:(?:[A-Fa-f0-9]{4}\.){2}[A-Fa-f0-9]{4})
+WINDOWSMAC (?:(?:[A-Fa-f0-9]{2}-){5}[A-Fa-f0-9]{2})
+COMMONMAC (?:(?:[A-Fa-f0-9]{2}:){5}[A-Fa-f0-9]{2})
+IPV6 ((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5
 ]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?
+IPV4 (?<![0-9])(?:(?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2}))(?![0-9])
+IP (?:%{IPV6}|%{IPV4})
+HOSTNAME \b(?:[0-9A-Za-z][0-9A-Za-z-]{0,62})(?:\.(?:[0-9A-Za-z][0-9A-Za-z-]{0,62}))*(\.?|\b)
+HOST %{HOSTNAME}
+IPORHOST (?:%{HOSTNAME}|%{IP})
+HOSTPORT %{IPORHOST}:%{POSINT}
+
+#Sourcefire Logs
+protocol \{[a-zA-Z0-9]+\}
+ip_src_addr (?<![0-9])(?:(?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2}))(?![0-9])
+ip_dst_addr (?<![0-9])(?:(?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2})[.](?:25[0-5]|2[0-4][0-9]|[0-1]?[0-9]{1,2}))(?![0-9])
+ip_src_port [0-9]+
+ip_dst_port [0-9]+
+SOURCEFIRE %{GREEDYDATA}%{protocol}\s%{ip_src_addr}\:%{ip_src_port}\s->\s%{ip_dst_addr}\:%{ip_dst_port}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/resources/patterns/yaf
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/resources/patterns/yaf b/metron-platform/metron-parsers/src/main/resources/patterns/yaf
new file mode 100644
index 0000000..c664586
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/resources/patterns/yaf
@@ -0,0 +1,2 @@
+YAF_TIME_FORMAT %{YEAR:UNWANTED}-%{MONTHNUM:UNWANTED}-%{MONTHDAY:UNWANTED}[T ]%{HOUR:UNWANTED}:%{MINUTE:UNWANTED}:%{SECOND:UNWANTED}
+YAF_DELIMITED %{YAF_TIME_FORMAT:start_time}\|%{YAF_TIME_FORMAT:end_time}\|%{SPACE:UNWANTED}%{BASE10NUM:duration}\|%{SPACE:UNWANTED}%{BASE10NUM:rtt}\|%{SPACE:UNWANTED}%{INT:protocol}\|%{SPACE:UNWANTED}%{IP:ip_src_addr}\|%{SPACE:UNWANTED}%{INT:ip_src_port}\|%{SPACE:UNWANTED}%{IP:ip_dst_addr}\|%{SPACE:UNWANTED}%{INT:ip_dst_port}\|%{SPACE:UNWANTED}%{DATA:iflags}\|%{SPACE:UNWANTED}%{DATA:uflags}\|%{SPACE:UNWANTED}%{DATA:riflags}\|%{SPACE:UNWANTED}%{DATA:ruflags}\|%{SPACE:UNWANTED}%{WORD:isn}\|%{SPACE:UNWANTED}%{DATA:risn}\|%{SPACE:UNWANTED}%{DATA:tag}\|%{GREEDYDATA:rtag}\|%{SPACE:UNWANTED}%{INT:pkt}\|%{SPACE:UNWANTED}%{INT:oct}\|%{SPACE:UNWANTED}%{INT:rpkt}\|%{SPACE:UNWANTED}%{INT:roct}\|%{SPACE:UNWANTED}%{INT:app}\|%{GREEDYDATA:end_reason}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/main/scripts/start_parser_topology.sh
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/main/scripts/start_parser_topology.sh b/metron-platform/metron-parsers/src/main/scripts/start_parser_topology.sh
new file mode 100755
index 0000000..7000935
--- /dev/null
+++ b/metron-platform/metron-parsers/src/main/scripts/start_parser_topology.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+# 
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#     http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+METRON_VERSION=0.1BETA
+METRON_HOME=/usr/metron/$METRON_VERSION
+TOPOLOGY_JAR=metron-parsers-$METRON_VERSION.jar
+storm jar $METRON_HOME/lib/$TOPOLOGY_JAR org.apache.storm.flux.Flux --remote $METRON_HOME/config/$1/remote.yaml --filter $METRON_HOME/config/parsers.properties

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/AbstractConfigTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/AbstractConfigTest.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/AbstractConfigTest.java
new file mode 100644
index 0000000..7f8ee7e
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/AbstractConfigTest.java
@@ -0,0 +1,297 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Map;
+
+import org.apache.commons.configuration.Configuration;
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.commons.configuration.PropertiesConfiguration;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.github.fge.jackson.JsonLoader;
+import com.github.fge.jsonschema.core.report.ProcessingReport;
+import com.github.fge.jsonschema.main.JsonSchemaFactory;
+import com.github.fge.jsonschema.main.JsonValidator;
+
+/**
+ * <ul>
+ * <li>Title: </li>
+ * <li>Description: The class <code>AbstractConfigTest</code> is
+ * an abstract base class for implementing JUnit tests that need to use
+ * config to connect to ZooKeeper and HBase. The <code>setup</code> method will attempt to
+ * load a properties from a file, located in src/test/resources,
+ * with the same name as the class.</li>
+ * <li>Created: Oct 10, 2014</li>
+ * </ul>
+ * @version $Revision: 1.1 $
+ */
+public class AbstractConfigTest  extends AbstractTestContext {
+         /**
+         * The configPath.
+         */
+        protected String configPath=null;   
+        
+        /**
+        * The configName.
+        */
+       protected String configName=null;           
+
+        /**
+         * The config.
+         */
+        private Configuration config=null;
+        
+         /**
+         * The settings.
+         */
+        Map<String, String> settings=null;       
+
+        /**
+         * The schemaJsonString.
+         */
+        private String schemaJsonString = null;
+        /**
+         * Any Object for mavenMode
+         * @parameter
+         *   expression="${mode}"
+         *   default-value="local"
+         */
+         private Object mode="local";        
+
+        /**
+         * Constructs a new <code>AbstractConfigTest</code> instance.
+         * @throws Exception 
+         */
+        public AbstractConfigTest() throws Exception {
+            super.setUp();
+        }
+
+        /**
+         * Constructs a new <code>AbstractTestContext</code> instance.
+         * @param name the name of the test case.
+         */
+        public AbstractConfigTest(String name) {
+            super(name);
+        }
+
+        /*
+         * (non-Javadoc)
+         * @see junit.framework.TestCase#setUp()
+         */
+        protected void setUp(String configName) throws Exception {
+            super.setUp();
+            this.setConfigPath("src/test/resources/config/"+getClass().getSimpleName()+".config");
+            try {
+                this.setConfig(new PropertiesConfiguration(this.getConfigPath()));
+               
+                Map configOptions= SettingsLoader.getConfigOptions((PropertiesConfiguration)this.config, configName+"=");
+                this.setSettings(SettingsLoader.getConfigOptions((PropertiesConfiguration)this.config, configName + "."));
+                this.getSettings().put(configName, (String) configOptions.get(configName));
+            } catch (ConfigurationException e) {
+                e.printStackTrace();
+                throw new Exception("Config not found !!"+e);
+            }
+        }
+
+        /*
+         * (non-Javadoc)
+         * @see junit.framework.TestCase#tearDown()
+         */
+        @Override
+        protected void tearDown() throws Exception {
+
+        }
+
+        
+         /**
+         * validateJsonData
+         * @param jsonSchema
+         * @param jsonData
+         * @return
+         * @throws Exception
+         */
+         
+        protected boolean validateJsonData(final String jsonSchema, final String jsonData)
+            throws Exception {
+    
+            final JsonNode d = JsonLoader.fromString(jsonData);
+            final JsonNode s = JsonLoader.fromString(jsonSchema);
+    
+            final JsonSchemaFactory factory = JsonSchemaFactory.byDefault();
+            JsonValidator v = factory.getValidator();
+    
+            ProcessingReport report = v.validate(s, d);
+            System.out.println(report);
+            
+            return report.toString().contains("success");
+        }
+        
+        protected String readSchemaFromFile(URL schema_url) throws Exception {
+            BufferedReader br = new BufferedReader(new FileReader(
+                    schema_url.getFile()));
+            String line;
+            StringBuilder sb = new StringBuilder();
+            while ((line = br.readLine()) != null) {
+                System.out.println(line);
+                sb.append(line);
+            }
+            br.close();
+
+            String schema_string = sb.toString().replaceAll("\n", "");
+            schema_string = schema_string.replaceAll(" ", "");
+
+            System.out.println("Read in schema: " + schema_string);
+
+            return schema_string;
+        }        
+  
+        protected String[] readTestDataFromFile(String test_data_url) throws Exception {
+            BufferedReader br = new BufferedReader(new FileReader(
+                    new File(test_data_url)));
+            ArrayList<String> inputDataLines = new ArrayList<String>();
+           
+            String line;
+            while ((line = br.readLine()) != null) {
+                System.out.println(line);
+                inputDataLines.add(line.toString().replaceAll("\n", ""));
+            }
+            br.close();
+            String[] inputData = new String[inputDataLines.size()];
+            inputData = inputDataLines.toArray(inputData);
+
+            return inputData;
+        }          
+       /**
+        * Skip Tests
+        */
+       public boolean skipTests(Object mode){
+           if(mode.toString().equals("local")){
+               return true;
+           }else {
+               return false;
+           }
+       }
+       
+       /**
+        * Returns the mode.
+        * @return the mode.
+        */
+       
+       public Object getMode() {
+           return mode;
+       }
+
+       /**
+        * Sets the mode.
+        * @param mode the mode.
+        */
+       
+       public void setMode(Object mode) {
+       
+           this.mode = mode;
+       }
+
+    
+         /**
+         * @param readSchemaFromFile
+         */
+        public void setSchemaJsonString(String schemaJsonString) {
+            this.schemaJsonString=schemaJsonString;
+        }
+
+    
+         /**
+         * @return
+         */
+        public String getSchemaJsonString() {
+           return this.schemaJsonString;
+        }
+        
+        /**
+        * Returns the configPath.
+        * @return the configPath.
+        */
+       public String getConfigPath() {
+           return configPath;
+       }
+    
+       /**
+        * Sets the configPath.
+        * @param configPath the configPath.
+        */
+       public void setConfigPath(String configPath) {
+           this.configPath = configPath;
+       }    
+       /**
+        * Returns the config.
+        * @return the config.
+        */
+       
+       public Configuration getConfig() {
+           return config;
+       }
+    
+       /**
+        * Sets the config.
+        * @param config the config.
+        */
+       
+       public void setConfig(Configuration config) {
+       
+           this.config = config;
+       }  
+       /**
+        * Returns the settings.
+        * @return the settings.
+        */
+       
+       public Map<String, String> getSettings() {
+           return settings;
+       }
+
+       /**
+        * Sets the settings.
+        * @param settings the settings.
+        */
+       
+       public void setSettings(Map<String, String> settings) {
+           this.settings = settings;
+       }   
+       /**
+       * Returns the configName.
+       * @return the configName.
+       */
+      public String getConfigName() {
+          return configName;
+      }
+
+      /**
+       * Sets the configName.
+       * @param configName the configName.
+       */
+      public void setConfigName(String configName) {  
+          this.configName = configName;
+      }       
+}
+
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/AbstractSchemaTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/AbstractSchemaTest.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/AbstractSchemaTest.java
new file mode 100644
index 0000000..22c736e
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/AbstractSchemaTest.java
@@ -0,0 +1,197 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers;
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.net.URL;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.github.fge.jackson.JsonLoader;
+import com.github.fge.jsonschema.core.report.ProcessingReport;
+import com.github.fge.jsonschema.main.JsonSchemaFactory;
+import com.github.fge.jsonschema.main.JsonValidator;
+
+ /**
+ * <ul>
+ * <li>Title: </li>
+ * <li>Description: The class <code>AbstractSchemaTest</code> is
+ * an abstract base class for implementing JUnit tests that need to load a
+ * Json Schema. The <code>setup</code> method will attempt to
+ * load a properties from a file, located in src/test/resources,
+ * with the same name as the class.</li>
+ * <li>Created: Aug 7, 2014</li>
+ * </ul>
+ * @version $Revision: 1.1 $
+ */
+public class AbstractSchemaTest  extends AbstractConfigTest{
+        
+        
+         /**
+         * The schemaJsonString.
+         */
+        private String schemaJsonString = null;
+        /**
+         * Any Object for mavenMode
+         * @parameter
+         *   expression="${mode}"
+         *   default-value="local"
+         */
+         private Object mode="local";        
+
+        /**
+         * Constructs a new <code>AbstractTestContext</code> instance.
+         * @throws Exception 
+         */
+        public AbstractSchemaTest() throws Exception {
+            super.setUp();
+        }
+
+        /**
+         * Constructs a new <code>AbstractTestContext</code> instance.
+         * @param name the name of the test case.
+         */
+        public AbstractSchemaTest(String name) {
+            super(name);
+            try{
+                if(System.getProperty("mode")!=null){
+                    setMode(System.getProperty("mode") );                
+                }else
+                {
+                    setMode("local");
+                }
+            }catch(Exception ex){
+                setMode("local");
+            }            
+        }
+
+        /*
+         * (non-Javadoc)
+         * @see junit.framework.TestCase#setUp()
+         */
+        @Override
+        protected void setUp() throws Exception {
+            super.setUp();
+            
+        }
+
+        /*
+         * (non-Javadoc)
+         * @see junit.framework.TestCase#tearDown()
+         */
+        @Override
+        protected void tearDown() throws Exception {
+
+        }
+
+        
+         /**
+         * validateJsonData
+         * @param jsonSchema
+         * @param jsonData
+         * @return
+         * @throws Exception
+         */
+         
+        protected boolean validateJsonData(final String jsonSchema, final String jsonData)
+            throws Exception {
+    
+            final JsonNode d = JsonLoader.fromString(jsonData);
+            final JsonNode s = JsonLoader.fromString(jsonSchema);
+    
+            final JsonSchemaFactory factory = JsonSchemaFactory.byDefault();
+            JsonValidator v = factory.getValidator();
+    
+            ProcessingReport report = v.validate(s, d);
+            System.out.println(report);
+            
+            return report.toString().contains("success");
+        }
+        
+        protected String readSchemaFromFile(URL schema_url) throws Exception {
+            BufferedReader br = new BufferedReader(new FileReader(
+                    schema_url.getFile()));
+            String line;
+            StringBuilder sb = new StringBuilder();
+            while ((line = br.readLine()) != null) {
+                System.out.println(line);
+                sb.append(line);
+            }
+            br.close();
+
+            String schema_string = sb.toString().replaceAll("\n", "");
+            schema_string = schema_string.replaceAll(" ", "");
+
+            System.out.println("Read in schema: " + schema_string);
+
+            return schema_string;
+
+        }        
+        
+       /**
+        * Skip Tests
+        */
+       public boolean skipTests(Object mode){
+           if(mode.toString().equals("local")){
+               return true;
+           }else {
+               return false;
+           }
+       }
+       
+       /**
+        * Returns the mode.
+        * @return the mode.
+        */
+       
+       public Object getMode() {
+           return mode;
+       }
+
+       /**
+        * Sets the mode.
+        * @param mode the mode.
+        */
+       
+       public void setMode(Object mode) {
+       
+           this.mode = mode;
+       }
+
+    
+     /**
+     
+     * @param readSchemaFromFile
+     */
+     
+    public void setSchemaJsonString(String schemaJsonString) {
+        this.schemaJsonString=schemaJsonString;
+    }
+
+    
+     /**
+     
+     * @return
+     */
+     
+    public String getSchemaJsonString() {
+       return this.schemaJsonString;
+    }
+     
+}
+
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/AbstractTestContext.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/AbstractTestContext.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/AbstractTestContext.java
new file mode 100644
index 0000000..134b896
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/AbstractTestContext.java
@@ -0,0 +1,190 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers;
+import org.junit.After;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Properties;
+
+ /**
+ * <ul>
+ * <li>Title: </li>
+ * <li>Description: The class <code>AbstractTestContext</code> is
+ * an abstract base class for implementing JUnit tests that need to load a
+ * test properties. The <code>setup</code> method will attempt to
+ * load a properties from a file, located in src/test/resources,
+ * with the same name as the class.</li>
+ * <li>Created: Aug 7, 2014</li>
+ * </ul>
+ * @version $Revision: 1.1 $
+ */
+public class AbstractTestContext {
+         /**
+         * The testProps.
+         */
+        protected File testPropFile=null;
+
+        /**
+         * The properties loaded for test.
+         */
+        protected Properties testProperties=new Properties();
+        
+        /**
+         * Any Object for mavenMode
+         * @parameter
+         *   expression="${mode}"
+         *   default-value="global"
+         */
+         private Object mode="local";        
+
+        /**
+         * Constructs a new <code>AbstractTestContext</code> instance.
+         */
+        public AbstractTestContext() {
+            super();
+        }
+
+        /**
+         * Constructs a new <code>AbstractTestContext</code> instance.
+         * @param name the name of the test case.
+         */
+        public AbstractTestContext(String name) {
+            try{
+                if(System.getProperty("mode")!=null){
+                    setMode(System.getProperty("mode") );                
+                }else
+                {
+                    setMode("local");
+                }
+            }catch(Exception ex){
+                setMode("local");
+            }            
+        }
+
+        /*
+         * (non-Javadoc)
+         * @see junit.framework.TestCase#setUp()
+         */
+        protected void setUp() throws Exception {
+            InputStream input=null;
+            File directory = new File("src/test/resources");
+            if (!directory.isDirectory()) {
+                return;
+            }
+            File file = new File(directory, getClass().getSimpleName() + ".properties");
+            if (!file.canRead()) {
+                return;
+            }
+            setTestPropFile(file);
+            try{
+                input=new FileInputStream(file);
+                testProperties.load(input);
+            }catch(IOException ex){
+                ex.printStackTrace();
+                throw new Exception("failed to load properties");
+            }
+            
+            
+        }
+
+        /*
+         * (non-Javadoc)
+         * @see junit.framework.TestCase#tearDown()
+         */
+        @After
+        protected void tearDown() throws Exception {
+
+        }
+
+        /**
+         * Returns the testProperties.
+         * @return the testProperties.
+         */
+        
+        public Properties getTestProperties() {
+            return testProperties;
+        }
+
+        /**
+         * Sets the testProperties.
+         * @param testProperties the testProperties.
+         */
+        
+        public void setTestProperties(Properties testProperties) {
+        
+            this.testProperties = testProperties;
+        }    
+        /**
+        * Returns the testPropFile.
+        * @return the testPropFile.
+        */
+       
+       public File getTestPropFile() {
+           return testPropFile;
+       }
+
+       /**
+        * Sets the testPropFile.
+        * @param testPropFile the testPropFile.
+        */
+       
+       public void setTestPropFile(File testPropFile) {
+       
+           this.testPropFile = testPropFile;
+       }     
+       
+       /**
+        * Skip Tests
+        */
+       public boolean skipTests(Object mode){
+           if(mode.toString().equals("local")){
+               return true;
+           }else {
+               return false;
+           }
+       }
+       
+       /**
+        * Returns the mode.
+        * @return the mode.
+        */
+       
+       public Object getMode() {
+           return mode;
+       }
+
+       /**
+        * Sets the mode.
+        * @param mode the mode.
+        */
+       
+       public void setMode(Object mode) {
+       
+           this.mode = mode;
+       }
+
+       protected void assertNotNull() {}
+       protected void assertNotNull(Object o) {}
+     
+    }
+
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/GrokParserTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/GrokParserTest.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/GrokParserTest.java
new file mode 100644
index 0000000..ec80396
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/GrokParserTest.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers;
+
+import com.google.common.collect.MapDifference;
+import com.google.common.collect.Maps;
+import junit.framework.Assert;
+import org.adrianwalker.multilinestring.Multiline;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.junit.Before;
+import org.junit.Test;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class GrokParserTest {
+
+  public String expectedRaw = "2016-01-28 15:29:48.512|2016-01-28 15:29:48.512|   0.000|   0.000|  6|                          216.21.170.221|   80|                               10.0.2.15|39468|      AS|       0|       0|       0|22efa001|00000000|000|000|       1|      44|       0|       0|    0|idle";
+
+  /**
+   * {
+   * "roct":0,
+   * "end_reason":"idle",
+   * "ip_dst_addr":"10.0.2.15",
+   * "iflags":"AS",
+   * "rpkt":0,
+   * "original_string":"2016-01-28 15:29:48.512|2016-01-28 15:29:48.512|   0.000|   0.000|  6|                          216.21.170.221|   80|                               10.0.2.15|39468|      AS|       0|       0|       0|22efa001|00000000|000|000|       1|      44|       0|       0|    0|idle",
+   * "tag":0,
+   * "risn":0,
+   * "ip_dst_port":39468,
+   * "ruflags":0,
+   * "app":0,
+   * "protocol":6
+   * ,"isn":"22efa001",
+   * "uflags":0,"duration":"0.000",
+   * "oct":44,
+   * "ip_src_port":80,
+   * "end_time":"2016-01-28 15:29:48.512",
+   * "riflags":0,"start_time":"2016-01-28 15:29:48.512",
+   * "rtt":"0.000",
+   * "rtag":0,
+   * "pkt":1,
+   * "ip_src_addr":"216.21.170.221"
+   * }
+   */
+  @Multiline
+  private String expectedParsedString;
+
+  private JSONObject expectedParsed;
+
+  @Before
+  public void parseJSON() throws ParseException {
+    JSONParser jsonParser = new JSONParser();
+    expectedParsed = (JSONObject) jsonParser.parse(expectedParsedString);
+  }
+
+  @Test
+  public void test() throws IOException, ParseException {
+    String metronHdfsHome = "../metron-parsers/src/main/";
+    String grokHdfsPath = "/patterns/yaf";
+    String patternLabel = "YAF_DELIMITED";
+    GrokParser grokParser = new GrokParser(grokHdfsPath, patternLabel);
+    grokParser.withMetronHDFSHome(metronHdfsHome);
+    grokParser.init();
+    byte[] rawMessage = expectedRaw.getBytes();
+    List<JSONObject> parsedList = grokParser.parse(rawMessage);
+    Assert.assertEquals(1, parsedList.size());
+    compare(expectedParsed, parsedList.get(0));
+  }
+
+  public boolean compare(JSONObject expected, JSONObject actual) {
+    MapDifference mapDifferences = Maps.difference(expected, actual);
+    if (mapDifferences.entriesOnlyOnLeft().size() > 0) Assert.fail("Expected JSON has extra parameters: " + mapDifferences.entriesOnlyOnLeft());
+    if (mapDifferences.entriesOnlyOnRight().size() > 0) Assert.fail("Actual JSON has extra parameters: " + mapDifferences.entriesOnlyOnRight());
+    Map actualDifferences = new HashMap();
+    if (mapDifferences.entriesDiffering().size() > 0) {
+      Map differences = Collections.unmodifiableMap(mapDifferences.entriesDiffering());
+      for (Object key : differences.keySet()) {
+        Object expectedValueObject = expected.get(key);
+        Object actualValueObject = actual.get(key);
+        if (expectedValueObject instanceof Long || expectedValueObject instanceof Integer) {
+          Long expectedValue = Long.parseLong(expectedValueObject.toString());
+          Long actualValue = Long.parseLong(actualValueObject.toString());
+          if (!expectedValue.equals(actualValue)) {
+            actualDifferences.put(key, differences.get(key));
+          }
+        } else {
+          actualDifferences.put(key, differences.get(key));
+        }
+      }
+    }
+    if (actualDifferences.size() > 0) Assert.fail("Expected and Actual JSON values don't match: " + actualDifferences);
+    return true;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/SettingsLoader.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/SettingsLoader.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/SettingsLoader.java
new file mode 100644
index 0000000..1c64c16
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/SettingsLoader.java
@@ -0,0 +1,166 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers;
+
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+import org.apache.commons.configuration.Configuration;
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.apache.commons.configuration.XMLConfiguration;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+public class SettingsLoader {
+
+	@SuppressWarnings("unchecked")
+	public static JSONObject loadEnvironmentIdnetifier(String config_path)
+			throws ConfigurationException {
+		Configuration config = new PropertiesConfiguration(config_path);
+
+		String customer = config.getString("customer.id", "unknown");
+		String datacenter = config.getString("datacenter.id", "unknown");
+		String instance = config.getString("instance.id", "unknown");
+
+		JSONObject identifier = new JSONObject();
+		identifier.put("customer", customer);
+		identifier.put("datacenter", datacenter);
+		identifier.put("instance", instance);
+
+		return identifier;
+	}
+
+	@SuppressWarnings("unchecked")
+	public static JSONObject loadTopologyIdnetifier(String config_path)
+			throws ConfigurationException {
+		Configuration config = new PropertiesConfiguration(config_path);
+
+		String topology = config.getString("topology.id", "unknown");
+		String instance = config.getString("instance.id", "unknown");
+
+		JSONObject identifier = new JSONObject();
+		identifier.put("topology", topology);
+		identifier.put("topology_instance", instance);
+
+		return identifier;
+	}
+	
+
+	public static String generateTopologyName(JSONObject env, JSONObject topo) {
+
+		return (env.get("customer") + "_" + env.get("datacenter") + "_"
+				+ env.get("instance") + "_" + topo.get("topology") + "_" + topo.get("topology_instance"));
+	}
+	
+	@SuppressWarnings("unchecked")
+	public static JSONObject generateAlertsIdentifier(JSONObject env, JSONObject topo)
+	{
+		JSONObject identifier = new JSONObject();
+		identifier.put("environment", env);
+		identifier.put("topology", topo);
+		
+		return identifier;
+	}
+
+	public static Map<String, JSONObject> loadRegexAlerts(String config_path)
+			throws ConfigurationException, ParseException {
+		XMLConfiguration alert_rules = new XMLConfiguration();
+		alert_rules.setDelimiterParsingDisabled(true);
+		alert_rules.load(config_path);
+
+		//int number_of_rules = alert_rules.getList("rule.pattern").size();
+
+		String[] patterns = alert_rules.getStringArray("rule.pattern");
+		String[] alerts = alert_rules.getStringArray("rule.alert");
+
+		JSONParser pr = new JSONParser();
+		Map<String, JSONObject> rules = new HashMap<String, JSONObject>();
+
+		for (int i = 0; i < patterns.length; i++)
+			rules.put(patterns[i], (JSONObject) pr.parse(alerts[i]));
+
+		return rules;
+	}
+
+	public static Map<String, JSONObject> loadKnownHosts(String config_path)
+			throws ConfigurationException, ParseException {
+		Configuration hosts = new PropertiesConfiguration(config_path);
+
+		Iterator<String> keys = hosts.getKeys();
+		Map<String, JSONObject> known_hosts = new HashMap<String, JSONObject>();
+		JSONParser parser = new JSONParser();
+
+		while (keys.hasNext()) {
+			String key = keys.next().trim();
+			JSONArray value = (JSONArray) parser.parse(hosts.getProperty(key)
+					.toString());
+			known_hosts.put(key, (JSONObject) value.get(0));
+		}
+
+		return known_hosts;
+	}
+
+	public static void printConfigOptions(PropertiesConfiguration config, String path_fragment)
+	{
+		Iterator<String> itr = config.getKeys();
+		
+		while(itr.hasNext())
+		{
+			String key = itr.next();
+			
+			if(key.contains(path_fragment))
+			{
+				
+				System.out.println("[Metron] Key: " + key + " -> " + config.getString(key));
+			}
+		}
+
+	}
+	
+	public static void printOptionalSettings(Map<String, String> settings)
+	{
+		for(String setting: settings.keySet())
+		{
+			System.out.println("[Metron] Optional Setting: " + setting + " -> " +settings.get(setting));
+		}
+
+	}
+	
+	public static Map<String, String> getConfigOptions(PropertiesConfiguration config, String path_fragment)
+	{
+		Iterator<String> itr = config.getKeys();
+		Map<String, String> settings = new HashMap<String, String>();
+		
+		while(itr.hasNext())
+		{
+			String key = itr.next();
+			
+			if(key.contains(path_fragment))
+			{
+				String tmp_key = key.replace(path_fragment, "");
+				settings.put(tmp_key, config.getString(key));
+			}
+		}
+
+		return settings;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/asa/GrokAsaParserTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/asa/GrokAsaParserTest.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/asa/GrokAsaParserTest.java
new file mode 100644
index 0000000..8e9da0d
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/asa/GrokAsaParserTest.java
@@ -0,0 +1,167 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.asa;
+
+import java.util.Iterator;
+import java.util.Map;
+
+import org.apache.metron.parsers.sourcefire.BasicSourcefireParser;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+import org.apache.metron.parsers.AbstractConfigTest;
+import org.junit.Assert;
+
+
+/**
+ * <ul>
+ * <li>Title: </li>
+ * <li>Description: </li>
+ * <li>Created: Feb 17, 2015 by: </li>
+ * </ul>
+ * @author $Author:  $
+ * @version $Revision: 1.1 $
+ */
+public class GrokAsaParserTest extends AbstractConfigTest{
+     /**
+     * The grokAsaStrings.
+     */
+    private static String[] grokAsaStrings=null;
+ 
+     /**
+     * The grokAsaParser.
+     */
+     
+    private GrokAsaParser grokAsaParser=null;
+    
+     /**
+     * Constructs a new <code>GrokAsaParserTest</code> instance.
+     * @throws Exception
+     */
+     
+    public GrokAsaParserTest() throws Exception {
+          super();  
+        
+    }
+	/**
+	 * @throws java.lang.Exception
+	 */
+	public static void setUpBeforeClass() throws Exception {
+	}
+
+	/**
+	 * @throws java.lang.Exception
+	 */
+	public static void tearDownAfterClass() throws Exception {
+		setGrokAsaStrings(null);
+	}
+
+    /* 
+     * (non-Javadoc)
+     * @see junit.framework.TestCase#setUp()
+     */
+	public void setUp() throws Exception {
+          super.setUp("org.apache.metron.parsers.asa.GrokAsaParserTest");
+          setGrokAsaStrings(super.readTestDataFromFile(this.getConfig().getString("logFile")));
+          grokAsaParser = new GrokAsaParser();		
+	}
+
+		/**
+		 * 	
+		 * 	
+		 * @throws java.lang.Exception
+		 */
+		public void tearDown() throws Exception {
+			grokAsaParser = null;
+		}
+
+		/**
+		 * Test method for {@link BasicSourcefireParser#parse(byte[])}.
+		 */
+		@SuppressWarnings({ "rawtypes" })
+		public void testParse() {
+		    
+			for (String grokAsaString : getGrokAsaStrings()) {
+				JSONObject parsed = grokAsaParser.parse(grokAsaString.getBytes()).get(0);
+				Assert.assertNotNull(parsed);
+			
+				System.out.println(parsed);
+				JSONParser parser = new JSONParser();
+
+				Map json=null;
+				try {
+					json = (Map) parser.parse(parsed.toJSONString());
+				} catch (ParseException e) {
+					e.printStackTrace();
+				}
+				//Ensure JSON returned is not null/empty
+				Assert.assertNotNull(json);
+				
+				Iterator iter = json.entrySet().iterator();
+				
+
+				while (iter.hasNext()) {
+					Map.Entry entry = (Map.Entry) iter.next();
+					Assert.assertNotNull(entry);
+					
+					String key = (String) entry.getKey();
+					Assert.assertNotNull(key);
+					
+					String value = (String) json.get("CISCO_TAGGED_SYSLOG").toString();
+					Assert.assertNotNull(value);
+				}
+			}
+		}
+
+		/**
+		 * Returns GrokAsa Input String
+		 */
+		public static String[] getGrokAsaStrings() {
+			return grokAsaStrings;
+		}
+
+			
+		/**
+		 * Sets GrokAsa Input String
+		 */	
+		public static void setGrokAsaStrings(String[] strings) {
+			GrokAsaParserTest.grokAsaStrings = strings;
+		}
+	    
+	    /**
+	     * Returns the grokAsaParser.
+	     * @return the grokAsaParser.
+	     */
+	    
+	    public GrokAsaParser getGrokAsaParser() {
+	        return grokAsaParser;
+	    }
+
+
+	    /**
+	     * Sets the grokAsaParser.
+	     * @param grokAsaParser the grokAsaParser.
+	     */
+	    
+	    public void setGrokAsaParser(GrokAsaParser grokAsaParser) {
+	    
+	        this.grokAsaParser = grokAsaParser;
+	    }
+		
+	}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/bolt/ParserBoltTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/bolt/ParserBoltTest.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/bolt/ParserBoltTest.java
new file mode 100644
index 0000000..a6f2ee1
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/bolt/ParserBoltTest.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.bolt;
+
+import org.apache.metron.test.bolt.BaseBoltTest;
+import org.apache.metron.common.configuration.Configurations;
+import org.apache.metron.parsers.interfaces.MessageFilter;
+import org.apache.metron.parsers.interfaces.MessageParser;
+import org.apache.metron.common.interfaces.MessageWriter;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.junit.Test;
+import org.mockito.Mock;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+public class ParserBoltTest extends BaseBoltTest {
+
+  @Mock
+  private MessageParser<JSONObject> parser;
+
+  @Mock
+  private MessageWriter<JSONObject> writer;
+
+  @Mock
+  private MessageFilter<JSONObject> filter;
+
+  @Test
+  public void test() throws Exception {
+    String sensorType = "yaf";
+    ParserBolt parserBolt = new ParserBolt("zookeeperUrl", sensorType, parser, writer);
+    parserBolt.setCuratorFramework(client);
+    parserBolt.setTreeCache(cache);
+    parserBolt.prepare(new HashMap(), topologyContext, outputCollector);
+    verify(parser, times(1)).init();
+    verify(writer, times(1)).init();
+    byte[] sampleBinary = "some binary message".getBytes();
+    JSONParser jsonParser = new JSONParser();
+    final JSONObject sampleMessage1 = (JSONObject) jsonParser.parse("{ \"field1\":\"value1\" }");
+    final JSONObject sampleMessage2 = (JSONObject) jsonParser.parse("{ \"field2\":\"value2\" }");
+    List<JSONObject> messages = new ArrayList<JSONObject>() {{
+      add(sampleMessage1);
+      add(sampleMessage2);
+    }};
+    final JSONObject finalMessage1 = (JSONObject) jsonParser.parse("{ \"field1\":\"value1\", \"source.type\":\"" + sensorType + "\" }");
+    final JSONObject finalMessage2 = (JSONObject) jsonParser.parse("{ \"field2\":\"value2\", \"source.type\":\"" + sensorType + "\" }");
+    when(tuple.getBinary(0)).thenReturn(sampleBinary);
+    when(parser.parse(sampleBinary)).thenReturn(messages);
+    when(parser.validate(eq(messages.get(0)))).thenReturn(true);
+    when(parser.validate(eq(messages.get(1)))).thenReturn(false);
+    parserBolt.execute(tuple);
+    verify(writer, times(1)).write(eq(sensorType), any(Configurations.class), eq(tuple), eq(finalMessage1));
+    verify(outputCollector, times(1)).ack(tuple);
+    when(parser.validate(eq(messages.get(0)))).thenReturn(true);
+    when(parser.validate(eq(messages.get(1)))).thenReturn(true);
+    when(filter.emitTuple(messages.get(0))).thenReturn(false);
+    when(filter.emitTuple(messages.get(1))).thenReturn(true);
+    parserBolt.withMessageFilter(filter);
+    parserBolt.execute(tuple);
+    verify(writer, times(1)).write(eq(sensorType), any(Configurations.class), eq(tuple), eq(finalMessage2));
+    verify(outputCollector, times(2)).ack(tuple);
+    doThrow(new Exception()).when(writer).write(eq(sensorType), any(Configurations.class), eq(tuple), eq(finalMessage2));
+    parserBolt.execute(tuple);
+    verify(outputCollector, times(1)).reportError(any(Throwable.class));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/bro/BasicBroParserTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/bro/BasicBroParserTest.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/bro/BasicBroParserTest.java
new file mode 100644
index 0000000..3ed1b2c
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/bro/BasicBroParserTest.java
@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.bro;
+
+import java.util.Map;
+
+import junit.framework.TestCase;
+
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.junit.Assert;
+import org.apache.metron.parsers.bro.BasicBroParser;
+
+public class BasicBroParserTest extends TestCase {
+
+	/**
+	 * The parser.
+	 */
+	private BasicBroParser broParser = null;
+	private JSONParser jsonParser = null;
+
+	/**
+	 * Constructs a new <code>BasicBroParserTest</code> instance.
+	 *
+	 * @throws Exception
+	 */
+	public BasicBroParserTest() throws Exception {
+		broParser = new BasicBroParser();
+		jsonParser = new JSONParser();
+	}
+
+    public void testUnwrappedBroMessage() throws ParseException {
+        String rawMessage = "{\"timestamp\":\"1449511228474\",\"uid\":\"CFgSLp4HgsGqXnNjZi\",\"source_ip\":\"104.130.172.191\",\"source_port\":33893,\"dest_ip\":\"69.20.0.164\",\"dest_port\":53,\"proto\":\"udp\",\"trans_id\":3514,\"rcode\":3,\"rcode_name\":\"NXDOMAIN\",\"AA\":false,\"TC\":false,\"RD\":false,\"RA\":false,\"Z\":0,\"rejected\":false,\"sensor\":\"cloudbro\",\"type\":\"dns\"}";
+
+        JSONObject rawJson = (JSONObject)jsonParser.parse(rawMessage);
+
+        JSONObject broJson = broParser.parse(rawMessage.getBytes()).get(0);
+
+				Assert.assertEquals(broJson.get("timestamp"), Long.parseLong(rawJson.get("timestamp").toString()));
+			  Assert.assertEquals(broJson.get("ip_src_addr").toString(), rawJson.get("source_ip").toString());
+			  Assert.assertEquals(broJson.get("ip_dst_addr").toString(), rawJson.get("dest_ip").toString());
+			  Assert.assertEquals(broJson.get("ip_src_port"), rawJson.get("source_port"));
+        Assert.assertEquals(broJson.get("ip_dst_port"), rawJson.get("dest_port"));
+        Assert.assertEquals(broJson.get("uid").toString(), rawJson.get("uid").toString());
+        Assert.assertEquals(broJson.get("trans_id").toString(), rawJson.get("trans_id").toString());
+        Assert.assertEquals(broJson.get("sensor").toString(), rawJson.get("sensor").toString());
+        Assert.assertEquals(broJson.get("protocol").toString(), rawJson.get("type").toString());
+        Assert.assertEquals(broJson.get("rcode").toString(), rawJson.get("rcode").toString());
+        Assert.assertEquals(broJson.get("rcode_name").toString(), rawJson.get("rcode_name").toString());
+				Assert.assertTrue(broJson.get("original_string").toString().startsWith("DNS"));
+    }
+
+	@SuppressWarnings("rawtypes")
+	public void testHttpBroMessage() throws ParseException {
+		String rawMessage = "{\"http\":{\"ts\":1402307733473,\"uid\":\"CTo78A11g7CYbbOHvj\",\"id.orig_h\":\"192.249.113.37\",\"id.orig_p\":58808,\"id.resp_h\":\"72.163.4.161\",\"id.resp_p\":80,\"trans_depth\":1,\"method\":\"GET\",\"host\":\"www.cisco.com\",\"uri\":\"/\",\"user_agent\":\"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3\",\"request_body_len\":0,\"response_body_len\":25523,\"status_code\":200,\"status_msg\":\"OK\",\"tags\":[],\"resp_fuids\":[\"FJDyMC15lxUn5ngPfd\"],\"resp_mime_types\":[\"text/html\"]}}";
+
+		Map rawMessageMap = (Map) jsonParser.parse(rawMessage);
+		JSONObject rawJson = (JSONObject) rawMessageMap.get(rawMessageMap.keySet().iterator().next());
+
+		JSONObject broJson = broParser.parse(rawMessage.getBytes()).get(0);
+		Assert.assertEquals(broJson.get("timestamp").toString(), rawJson.get("ts").toString());
+		Assert.assertEquals(broJson.get("ip_src_addr").toString(), rawJson.get("id.orig_h").toString());
+		Assert.assertEquals(broJson.get("ip_dst_addr").toString(), rawJson.get("id.resp_h").toString());
+		Assert.assertEquals(broJson.get("ip_src_port").toString(), rawJson.get("id.orig_p").toString());
+		Assert.assertEquals(broJson.get("ip_dst_port").toString(), rawJson.get("id.resp_p").toString());
+		Assert.assertTrue(broJson.get("original_string").toString().startsWith(rawMessageMap.keySet().iterator().next().toString().toUpperCase()));
+
+		Assert.assertEquals(broJson.get("uid").toString(), rawJson.get("uid").toString());
+		Assert.assertEquals(broJson.get("method").toString(), rawJson.get("method").toString());
+		Assert.assertEquals(broJson.get("host").toString(), rawJson.get("host").toString());
+		Assert.assertEquals(broJson.get("resp_mime_types").toString(), rawJson.get("resp_mime_types").toString());
+	}
+
+	@SuppressWarnings("rawtypes")
+	public void testHttpDecimalBroMessage() throws ParseException {
+		String rawMessage = "{\"http\":{\"ts\":1457149494.166991,\"uid\":\"CTo78A11g7CYbbOHvj\",\"id.orig_h\":\"192.249.113.37\",\"id.orig_p\":58808,\"id.resp_h\":\"72.163.4.161\",\"id.resp_p\":80,\"trans_depth\":1,\"method\":\"GET\",\"host\":\"www.cisco.com\",\"uri\":\"/\",\"user_agent\":\"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3\",\"request_body_len\":0,\"response_body_len\":25523,\"status_code\":200,\"status_msg\":\"OK\",\"tags\":[],\"resp_fuids\":[\"FJDyMC15lxUn5ngPfd\"],\"resp_mime_types\":[\"text/html\"]}}";
+		String expectedTimestamp = "1457149494166";
+		Map rawMessageMap = (Map) jsonParser.parse(rawMessage);
+		JSONObject rawJson = (JSONObject) rawMessageMap.get(rawMessageMap.keySet().iterator().next());
+
+		JSONObject broJson = broParser.parse(rawMessage.getBytes()).get(0);
+		Assert.assertEquals(broJson.get("timestamp").toString(), expectedTimestamp);
+		Assert.assertEquals(broJson.get("ip_src_addr").toString(), rawJson.get("id.orig_h").toString());
+		Assert.assertEquals(broJson.get("ip_dst_addr").toString(), rawJson.get("id.resp_h").toString());
+		Assert.assertEquals(broJson.get("ip_src_port").toString(), rawJson.get("id.orig_p").toString());
+		Assert.assertEquals(broJson.get("ip_dst_port").toString(), rawJson.get("id.resp_p").toString());
+		Assert.assertTrue(broJson.get("original_string").toString().startsWith(rawMessageMap.keySet().iterator().next().toString().toUpperCase()));
+
+		Assert.assertEquals(broJson.get("uid").toString(), rawJson.get("uid").toString());
+		Assert.assertEquals(broJson.get("method").toString(), rawJson.get("method").toString());
+		Assert.assertEquals(broJson.get("host").toString(), rawJson.get("host").toString());
+		Assert.assertEquals(broJson.get("resp_mime_types").toString(), rawJson.get("resp_mime_types").toString());
+	}
+
+
+	@SuppressWarnings("rawtypes")
+	public void testDnsBroMessage() throws ParseException {
+		String rawMessage = "{\"dns\":{\"ts\":1402308259609,\"uid\":\"CuJT272SKaJSuqO0Ia\",\"id.orig_h\":\"10.122.196.204\",\"id.orig_p\":33976,\"id.resp_h\":\"144.254.71.184\",\"id.resp_p\":53,\"proto\":\"udp\",\"trans_id\":62418,\"query\":\"www.cisco.com\",\"qclass\":1,\"qclass_name\":\"C_INTERNET\",\"qtype\":28,\"qtype_name\":\"AAAA\",\"rcode\":0,\"rcode_name\":\"NOERROR\",\"AA\":true,\"TC\":false,\"RD\":true,\"RA\":true,\"Z\":0,\"answers\":[\"www.cisco.com.akadns.net\",\"origin-www.cisco.com\",\"2001:420:1201:2::a\"],\"TTLs\":[3600.0,289.0,14.0],\"rejected\":false}}";
+
+		Map rawMessageMap = (Map) jsonParser.parse(rawMessage);
+		JSONObject rawJson = (JSONObject) rawMessageMap.get(rawMessageMap.keySet().iterator().next());
+
+		JSONObject broJson = broParser.parse(rawMessage.getBytes()).get(0);
+		Assert.assertEquals(broJson.get("timestamp").toString(), rawJson.get("ts").toString());
+		Assert.assertEquals(broJson.get("ip_src_addr").toString(), rawJson.get("id.orig_h").toString());
+		Assert.assertEquals(broJson.get("ip_dst_addr").toString(), rawJson.get("id.resp_h").toString());
+		Assert.assertEquals(broJson.get("ip_src_port").toString(), rawJson.get("id.orig_p").toString());
+		Assert.assertEquals(broJson.get("ip_dst_port").toString(), rawJson.get("id.resp_p").toString());
+		Assert.assertTrue(broJson.get("original_string").toString().startsWith(rawMessageMap.keySet().iterator().next().toString().toUpperCase()));
+
+		Assert.assertEquals(broJson.get("qtype").toString(), rawJson.get("qtype").toString());
+		Assert.assertEquals(broJson.get("trans_id").toString(), rawJson.get("trans_id").toString());
+	}
+
+	@SuppressWarnings("rawtypes")
+	public void testFilesBroMessage() throws ParseException {
+		String rawMessage = "{\"files\":{\"analyzers\": [\"X509\",\"MD5\",\"SHA1\"],\"conn_uids\":[\"C4tygJ3qxJBEJEBCeh\"],\"depth\": 0,\"duration\": 0.0,\"fuid\":\"FZEBC33VySG0nHSoO9\",\"is_orig\": false,\"local_orig\": false,\"md5\": \"eba37166385e3ef42464ed9752e99f1b\",\"missing_bytes\": 0,\"overflow_bytes\": 0,\"rx_hosts\": [\"10.220.15.205\"],\"seen_bytes\": 1136,\"sha1\": \"73e42686657aece354fbf685712361658f2f4357\",\"source\": \"SSL\",\"timedout\": false,\"ts\": \"1425845251334\",\"tx_hosts\": [\"68.171.237.7\"]}}";
+
+		Map rawMessageMap = (Map) jsonParser.parse(rawMessage);
+		JSONObject rawJson = (JSONObject) rawMessageMap.get(rawMessageMap.keySet().iterator().next());
+
+		JSONObject broJson = broParser.parse(rawMessage.getBytes()).get(0);
+		Assert.assertEquals(broJson.get("timestamp").toString(), rawJson.get("ts").toString());
+		Assert.assertEquals(broJson.get("ip_src_addr").toString(), ((JSONArray)rawJson.get("tx_hosts")).get(0).toString());
+		Assert.assertEquals(broJson.get("ip_dst_addr").toString(), ((JSONArray)rawJson.get("rx_hosts")).get(0).toString());
+		Assert.assertTrue(broJson.get("original_string").toString().startsWith(rawMessageMap.keySet().iterator().next().toString().toUpperCase()));
+
+		Assert.assertEquals(broJson.get("fuid").toString(), rawJson.get("fuid").toString());
+		Assert.assertEquals(broJson.get("md5").toString(), rawJson.get("md5").toString());
+		Assert.assertEquals(broJson.get("analyzers").toString(), rawJson.get("analyzers").toString());
+	}
+
+	@SuppressWarnings("rawtypes")
+	public void testProtocolKeyCleanedUp() throws ParseException {
+		String rawMessage = "{\"ht*tp\":{\"ts\":1402307733473,\"uid\":\"CTo78A11g7CYbbOHvj\",\"id.orig_h\":\"192.249.113.37\",\"id.orig_p\":58808,\"id.resp_h\":\"72.163.4.161\",\"id.resp_p\":80,\"trans_depth\":1,\"method\":\"GET\",\"host\":\"www.cisco.com\",\"uri\":\"/\",\"user_agent\":\"curl/7.22.0 (x86_64-pc-linux-gnu) libcurl/7.22.0 OpenSSL/1.0.1 zlib/1.2.3.4 libidn/1.23 librtmp/2.3\",\"request_body_len\":0,\"response_body_len\":25523,\"status_code\":200,\"status_msg\":\"OK\",\"tags\":[],\"resp_fuids\":[\"FJDyMC15lxUn5ngPfd\"],\"resp_mime_types\":[\"text/html\"]}}";
+
+		Map rawMessageMap = (Map) jsonParser.parse(rawMessage);
+		JSONObject rawJson = (JSONObject) rawMessageMap.get(rawMessageMap.keySet().iterator().next());
+
+		JSONObject broJson = broParser.parse(rawMessage.getBytes()).get(0);
+
+		Assert.assertEquals(broJson.get("timestamp").toString(), rawJson.get("ts").toString());
+		Assert.assertEquals(broJson.get("ip_src_addr").toString(), rawJson.get("id.orig_h").toString());
+		Assert.assertTrue(broJson.get("original_string").toString().startsWith("HTTP"));
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/bro/BroParserTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/bro/BroParserTest.java b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/bro/BroParserTest.java
new file mode 100644
index 0000000..2dd11c5
--- /dev/null
+++ b/metron-platform/metron-parsers/src/test/java/org/apache/metron/parsers/bro/BroParserTest.java
@@ -0,0 +1,163 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.parsers.bro;
+
+
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+
+import org.apache.metron.parsers.AbstractConfigTest;
+import org.junit.Assert;
+
+/**
+ * <ul>
+ * <li>Title: Test For BroParser</li>
+ * <li>Description: </li>
+ * <li>Created: July 8, 2014</li>
+ * </ul>
+ * @version $Revision: 1.0 $
+ */
+
+ /**
+ * <ul>
+ * <li>Title: </li>
+ * <li>Description: </li>
+ * <li>Created: Feb 20, 2015 </li>
+ * </ul>
+ * @author $Author: $
+ * @version $Revision: 1.1 $
+ */
+public class BroParserTest extends AbstractConfigTest {
+	
+	
+	/**
+	 * The inputStrings.
+	 */
+	private static String[] inputStrings;
+
+     /**
+     * The parser.
+     */
+    private BasicBroParser parser=null;
+	
+    /**
+     * Constructs a new <code>BroParserTest</code> instance.
+     * @throws Exception 
+     */
+    public BroParserTest() throws Exception {
+        super();
+    }	
+
+
+	/**
+	 * @throws java.lang.Exception
+	 */
+	public static void setUpBeforeClass() throws Exception {
+	}
+
+	/**
+	 * @throws java.lang.Exception
+	 */
+	public static void tearDownAfterClass() throws Exception {
+	}
+
+	/**
+	 * @throws java.lang.Exception
+	 */
+	public void setUp() throws Exception {
+        super.setUp("org.apache.metron.parsers.bro.BroParserTest");
+        setInputStrings(super.readTestDataFromFile(this.getConfig().getString("logFile")));
+        parser = new BasicBroParser();  
+	}
+	
+	/**
+	 * @throws ParseException
+	 * Tests for Parse Method
+	 * Parses Static json String and checks if any spl chars are present in parsed string.
+	 */
+	@SuppressWarnings({ "unused", "rawtypes" })
+	public void testParse() throws ParseException {
+
+		for (String inputString : getInputStrings()) {
+			JSONObject cleanJson = parser.parse(inputString.getBytes()).get(0);
+			Assert.assertNotNull(cleanJson);
+			System.out.println(cleanJson);
+
+			Pattern p = Pattern.compile("[^\\._a-z0-9 ]",
+					Pattern.CASE_INSENSITIVE);
+
+			JSONParser parser = new JSONParser();
+
+			Map json = (Map) cleanJson;
+			Map output = new HashMap();
+			Iterator iter = json.entrySet().iterator();
+
+			while (iter.hasNext()) {
+				Map.Entry entry = (Map.Entry) iter.next();
+				String key = (String) entry.getKey();
+
+				Matcher m = p.matcher(key);
+				boolean b = m.find();
+				// Test False
+				Assert.assertFalse(b);
+			}
+		}
+
+	}
+
+	/**
+	 * Returns Input String
+	 */
+	public static String[] getInputStrings() {
+		return inputStrings;
+	}
+
+	/**
+	 * Sets SourceFire Input String
+	 */
+	public static void setInputStrings(String[] strings) {
+		BroParserTest.inputStrings = strings;
+	}
+	
+    /**
+     * Returns the parser.
+     * @return the parser.
+     */
+    
+    public BasicBroParser getParser() {
+        return parser;
+    }
+
+
+    /**
+     * Sets the parser.
+     * @param parser the parser.
+     */
+    
+    public void setParser(BasicBroParser parser) {
+    
+        this.parser = parser;
+    }	
+}


[31/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/extractor/stix/StixExtractorTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/extractor/stix/StixExtractorTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/extractor/stix/StixExtractorTest.java
new file mode 100644
index 0000000..dba57dd
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/extractor/stix/StixExtractorTest.java
@@ -0,0 +1,142 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.extractor.stix;
+
+import com.google.common.collect.Iterables;
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.metron.dataloads.extractor.Extractor;
+import org.apache.metron.dataloads.extractor.ExtractorHandler;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class StixExtractorTest {
+    /**
+         <!--
+         STIX IP Watchlist Example
+
+         Copyright (c) 2015, The MITRE Corporation. All rights reserved.
+         The contents of this file are subject to the terms of the STIX License located at http://stix.mitre.org/about/termsofuse.html.
+
+         This example demonstrates a simple usage of STIX to represent a list of IP address indicators (watchlist of IP addresses). Cyber operations and malware analysis centers often share a list of suspected malicious IP addresses with information about what those IPs might indicate. This STIX package represents a list of three IP addresses with a short dummy description of what they represent.
+
+         It demonstrates the use of:
+
+         * STIX Indicators
+         * CybOX within STIX
+         * The CybOX Address Object (IP)
+         * CybOX Patterns (apply_condition="ANY")
+         * Controlled vocabularies
+
+         Created by Mark Davidson
+         -->
+         <stix:STIX_Package
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xmlns:stix="http://stix.mitre.org/stix-1"
+         xmlns:indicator="http://stix.mitre.org/Indicator-2"
+         xmlns:cybox="http://cybox.mitre.org/cybox-2"
+         xmlns:AddressObject="http://cybox.mitre.org/objects#AddressObject-2"
+         xmlns:cyboxVocabs="http://cybox.mitre.org/default_vocabularies-2"
+         xmlns:stixVocabs="http://stix.mitre.org/default_vocabularies-1"
+         xmlns:example="http://example.com/"
+         id="example:STIXPackage-33fe3b22-0201-47cf-85d0-97c02164528d"
+         timestamp="2014-05-08T09:00:00.000000Z"
+         version="1.2">
+         <stix:STIX_Header>
+         <stix:Title>Example watchlist that contains IP information.</stix:Title>
+         <stix:Package_Intent xsi:type="stixVocabs:PackageIntentVocab-1.0">Indicators - Watchlist</stix:Package_Intent>
+         </stix:STIX_Header>
+         <stix:Indicators>
+         <stix:Indicator xsi:type="indicator:IndicatorType" id="example:Indicator-33fe3b22-0201-47cf-85d0-97c02164528d" timestamp="2014-05-08T09:00:00.000000Z">
+         <indicator:Type xsi:type="stixVocabs:IndicatorTypeVocab-1.1">IP Watchlist</indicator:Type>
+         <indicator:Description>Sample IP Address Indicator for this watchlist. This contains one indicator with a set of three IP addresses in the watchlist.</indicator:Description>
+         <indicator:Observable  id="example:Observable-1c798262-a4cd-434d-a958-884d6980c459">
+         <cybox:Object id="example:Object-1980ce43-8e03-490b-863a-ea404d12242e">
+         <cybox:Properties xsi:type="AddressObject:AddressObjectType" category="ipv4-addr">
+         <AddressObject:Address_Value condition="Equals" apply_condition="ANY">10.0.0.0##comma##10.0.0.1##comma##10.0.0.2</AddressObject:Address_Value>
+         </cybox:Properties>
+         </cybox:Object>
+         </indicator:Observable>
+         </stix:Indicator>
+         </stix:Indicators>
+         </stix:STIX_Package>
+         */
+    @Multiline
+    private static String stixDoc;
+
+    /**
+    {
+        "config" : {
+             "stix_address_categories" : "IPV_4_ADDR"
+        }
+        ,"extractor" : "STIX"
+    }
+    */
+    @Multiline
+    private static String stixConfigOnlyIPV4;
+    /**
+    {
+        "config" : {
+             "stix_address_categories" : "IPV_6_ADDR"
+        }
+        ,"extractor" : "STIX"
+    }
+    */
+    @Multiline
+    private static String stixConfigOnlyIPV6;
+    /**
+    {
+        "config" : {
+        }
+        ,"extractor" : "STIX"
+    }
+    */
+    @Multiline
+    private static String stixConfig;
+    @Test
+    public void testStixAddresses() throws Exception {
+        {
+            ExtractorHandler handler = ExtractorHandler.load(stixConfigOnlyIPV4);
+            Extractor extractor = handler.getExtractor();
+            Iterable<LookupKV> results = extractor.extract(stixDoc);
+
+            Assert.assertEquals(3, Iterables.size(results));
+            Assert.assertEquals("10.0.0.0", ((EnrichmentKey)(Iterables.get(results, 0).getKey())).indicator);
+            Assert.assertEquals("10.0.0.1", ((EnrichmentKey)(Iterables.get(results, 1).getKey())).indicator);
+            Assert.assertEquals("10.0.0.2", ((EnrichmentKey)(Iterables.get(results, 2).getKey())).indicator);
+        }
+        {
+
+            ExtractorHandler handler = ExtractorHandler.load(stixConfig);
+            Extractor extractor = handler.getExtractor();
+            Iterable<LookupKV> results = extractor.extract(stixDoc);
+            Assert.assertEquals(3, Iterables.size(results));
+            Assert.assertEquals("10.0.0.0", ((EnrichmentKey)(Iterables.get(results, 0).getKey())).indicator);
+            Assert.assertEquals("10.0.0.1", ((EnrichmentKey)(Iterables.get(results, 1).getKey())).indicator);
+            Assert.assertEquals("10.0.0.2", ((EnrichmentKey)(Iterables.get(results, 2).getKey())).indicator);
+        }
+        {
+
+            ExtractorHandler handler = ExtractorHandler.load(stixConfigOnlyIPV6);
+            Extractor extractor = handler.getExtractor();
+            Iterable<LookupKV> results = extractor.extract(stixDoc);
+            Assert.assertEquals(0, Iterables.size(results));
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/HBaseEnrichmentConverterTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/HBaseEnrichmentConverterTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/HBaseEnrichmentConverterTest.java
new file mode 100644
index 0000000..28b3e26
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/HBaseEnrichmentConverterTest.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.dataloads.hbase;
+
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.metron.enrichment.converter.HbaseConverter;
+import org.apache.metron.enrichment.converter.EnrichmentConverter;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.HashMap;
+
+
+public class HBaseEnrichmentConverterTest {
+    EnrichmentKey key = new EnrichmentKey("domain", "google");
+    EnrichmentValue value = new EnrichmentValue(
+            new HashMap<String, String>() {{
+                put("foo", "bar");
+                put("grok", "baz");
+            }});
+    LookupKV<EnrichmentKey, EnrichmentValue> results = new LookupKV(key, value);
+    @Test
+    public void testKeySerialization() {
+        byte[] serialized = key.toBytes();
+
+        EnrichmentKey deserialized = new EnrichmentKey();
+        deserialized.fromBytes(serialized);
+        Assert.assertEquals(key, deserialized);
+    }
+
+    @Test
+    public void testPut() throws IOException {
+        HbaseConverter<EnrichmentKey, EnrichmentValue> converter = new EnrichmentConverter();
+        Put put = converter.toPut("cf", key, value);
+        LookupKV<EnrichmentKey, EnrichmentValue> converted= converter.fromPut(put, "cf");
+        Assert.assertEquals(results, converted);
+    }
+    @Test
+    public void testResult() throws IOException {
+        HbaseConverter<EnrichmentKey, EnrichmentValue> converter = new EnrichmentConverter();
+        Result r = converter.toResult("cf", key, value);
+        LookupKV<EnrichmentKey, EnrichmentValue> converted= converter.fromResult(r, "cf");
+        Assert.assertEquals(results, converted);
+    }
+
+    @Test
+    public void testGet() throws Exception {
+        HbaseConverter<EnrichmentKey, EnrichmentValue> converter = new EnrichmentConverter();
+        Get get = converter.toGet("cf", key);
+        Assert.assertArrayEquals(key.toBytes(), get.getRow());
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/BulkLoadMapperIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/BulkLoadMapperIntegrationTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/BulkLoadMapperIntegrationTest.java
new file mode 100644
index 0000000..626c98e
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/BulkLoadMapperIntegrationTest.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.hbase.mr;
+
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.metron.dataloads.bulk.ThreatIntelBulkLoader;
+import org.apache.metron.enrichment.converter.EnrichmentConverter;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.*;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+public class BulkLoadMapperIntegrationTest {
+  /** The test util. */
+  private HBaseTestingUtility testUtil;
+
+  /** The test table. */
+  private HTable testTable;
+  String tableName = "malicious_domains";
+  String cf = "cf";
+  Configuration config = null;
+  @Before
+  public void setup() throws Exception {
+    Map.Entry<HBaseTestingUtility, Configuration> kv = HBaseUtil.INSTANCE.create(true);
+    config = kv.getValue();
+    testUtil = kv.getKey();
+    testTable = testUtil.createTable(Bytes.toBytes(tableName), Bytes.toBytes(cf));
+  }
+
+  @After
+  public void teardown() throws Exception {
+    HBaseUtil.INSTANCE.teardown(testUtil);
+  }
+ /**
+         {
+            "config" : {
+                        "columns" : {
+                                "host" : 0
+                                ,"meta" : 2
+                                    }
+                       ,"indicator_column" : "host"
+                       ,"separator" : ","
+                       ,"type" : "threat"
+                       }
+            ,"extractor" : "CSV"
+         }
+         */
+  @Multiline
+  private static String extractorConfig;
+
+  @Test
+  public void test() throws IOException, ClassNotFoundException, InterruptedException {
+
+    Assert.assertNotNull(testTable);
+    FileSystem fs = FileSystem.get(config);
+    String contents = "google.com,1,foo";
+    EnrichmentConverter converter = new EnrichmentConverter();
+    HBaseUtil.INSTANCE.writeFile(contents, new Path("input.csv"), fs);
+    Job job = ThreatIntelBulkLoader.createJob(config, "input.csv", tableName, cf, extractorConfig, 0L, new EnrichmentConverter());
+    Assert.assertTrue(job.waitForCompletion(true));
+    ResultScanner scanner = testTable.getScanner(Bytes.toBytes(cf));
+    List<LookupKV<EnrichmentKey, EnrichmentValue>> results = new ArrayList<>();
+    for(Result r : scanner) {
+      results.add(converter.fromResult(r, cf));
+    }
+    Assert.assertEquals(1, results.size());
+    Assert.assertEquals(results.get(0).getKey().indicator, "google.com");
+    Assert.assertEquals(results.get(0).getKey().type, "threat");
+    Assert.assertEquals(results.get(0).getValue().getMetadata().size(), 2);
+    Assert.assertEquals(results.get(0).getValue().getMetadata().get("meta"), "foo");
+    Assert.assertEquals(results.get(0).getValue().getMetadata().get("host"), "google.com");
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/BulkLoadMapperTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/BulkLoadMapperTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/BulkLoadMapperTest.java
new file mode 100644
index 0000000..82233cf
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/BulkLoadMapperTest.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.hbase.mr;
+
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.metron.enrichment.converter.EnrichmentConverter;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+public class BulkLoadMapperTest {
+    /**
+         {
+            "config" : {
+                        "columns" : {
+                                "host" : 0
+                                ,"meta" : 2
+                                    }
+                       ,"indicator_column" : "host"
+                       ,"type" : "threat"
+                       ,"separator" : ","
+                       }
+            ,"extractor" : "CSV"
+         }
+         */
+    @Multiline
+    private static String extractorConfig;
+    @Test
+    public void testMapper() throws IOException, InterruptedException {
+
+        final Map<ImmutableBytesWritable, Put> puts = new HashMap<>();
+        BulkLoadMapper mapper = new BulkLoadMapper() {
+            @Override
+            protected void write(ImmutableBytesWritable key, Put value, Context context) throws IOException, InterruptedException {
+                puts.put(key, value);
+            }
+        };
+        mapper.initialize(new Configuration() {{
+            set(BulkLoadMapper.COLUMN_FAMILY_KEY, "cf");
+            set(BulkLoadMapper.CONFIG_KEY, extractorConfig);
+            set(BulkLoadMapper.LAST_SEEN_KEY, "0");
+            set(BulkLoadMapper.CONVERTER_KEY, EnrichmentConverter.class.getName());
+        }});
+        {
+            mapper.map(null, new Text("#google.com,1,foo"), null);
+            Assert.assertTrue(puts.size() == 0);
+        }
+        {
+            mapper.map(null, new Text("google.com,1,foo"), null);
+            Assert.assertTrue(puts.size() == 1);
+            EnrichmentKey expectedKey = new EnrichmentKey() {{
+                indicator = "google.com";
+                type = "threat";
+            }};
+            EnrichmentConverter converter = new EnrichmentConverter();
+            Put put = puts.get(new ImmutableBytesWritable(expectedKey.toBytes()));
+            Assert.assertNotNull(puts);
+            LookupKV<EnrichmentKey, EnrichmentValue> results = converter.fromPut(put, "cf");
+            Assert.assertEquals(results.getKey().indicator, "google.com");
+            Assert.assertEquals(results.getValue().getMetadata().size(), 2);
+            Assert.assertEquals(results.getValue().getMetadata().get("meta"), "foo");
+            Assert.assertEquals(results.getValue().getMetadata().get("host"), "google.com");
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/HBaseUtil.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/HBaseUtil.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/HBaseUtil.java
new file mode 100644
index 0000000..c9c6424
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/HBaseUtil.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.hbase.mr;
+
+import com.google.common.base.Joiner;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+
+import java.io.*;
+import java.util.AbstractMap;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+public enum HBaseUtil {
+    INSTANCE;
+    public Map.Entry<HBaseTestingUtility,Configuration> create(boolean startMRCluster) throws Exception {
+        Configuration config = HBaseConfiguration.create();
+        config.set("hbase.master.hostname", "localhost");
+        config.set("hbase.regionserver.hostname", "localhost");
+        HBaseTestingUtility testUtil = new HBaseTestingUtility(config);
+
+        testUtil.startMiniCluster(1);
+        if(startMRCluster) {
+            testUtil.startMiniMapReduceCluster();
+        }
+        return new AbstractMap.SimpleEntry<>(testUtil, config);
+    }
+    public void writeFile(String contents, Path filename, FileSystem fs) throws IOException {
+        FSDataOutputStream os = fs.create(filename, true);
+        PrintWriter pw = new PrintWriter(new OutputStreamWriter(os));
+        pw.print(contents);
+        pw.flush();
+        os.close();
+    }
+
+    public String readFile(FileSystem fs, Path filename) throws IOException {
+        FSDataInputStream in = fs.open(filename);
+        BufferedReader br = new BufferedReader(new InputStreamReader(in));
+        List<String> contents = new ArrayList<>();
+        for(String line = null;(line = br.readLine()) != null;) {
+            contents.add(line);
+        }
+        return Joiner.on('\n').join(contents);
+    }
+
+    public void teardown(HBaseTestingUtility testUtil) throws Exception {
+        testUtil.shutdownMiniMapReduceCluster();
+        testUtil.shutdownMiniCluster();
+        testUtil.cleanupTestDir();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/LeastRecentlyUsedPrunerIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/LeastRecentlyUsedPrunerIntegrationTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/LeastRecentlyUsedPrunerIntegrationTest.java
new file mode 100644
index 0000000..65befe3
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/LeastRecentlyUsedPrunerIntegrationTest.java
@@ -0,0 +1,138 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.hbase.mr;
+
+import com.google.common.collect.Iterables;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.metron.dataloads.bulk.LeastRecentlyUsedPruner;
+import org.apache.metron.enrichment.converter.EnrichmentConverter;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.enrichment.lookup.EnrichmentLookup;
+import org.apache.metron.enrichment.lookup.LookupKey;
+import org.apache.metron.enrichment.lookup.accesstracker.BloomAccessTracker;
+import org.apache.metron.enrichment.lookup.accesstracker.PersistentAccessTracker;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class LeastRecentlyUsedPrunerIntegrationTest {
+    /** The test util. */
+    private HBaseTestingUtility testUtil;
+
+    /** The test table. */
+    private HTable testTable;
+    private HTable atTable;
+    String tableName = "malicious_domains";
+    String cf = "cf";
+    String atTableName = "access_trackers";
+    String atCF= "cf";
+    Configuration config = null;
+    @Before
+    public void setup() throws Exception {
+        Map.Entry<HBaseTestingUtility, Configuration> kv = HBaseUtil.INSTANCE.create(true);
+        config = kv.getValue();
+        testUtil = kv.getKey();
+        testTable = testUtil.createTable(Bytes.toBytes(tableName), Bytes.toBytes(cf));
+        atTable = testUtil.createTable(Bytes.toBytes(atTableName), Bytes.toBytes(atCF));
+    }
+    @After
+    public void teardown() throws Exception {
+        HBaseUtil.INSTANCE.teardown(testUtil);
+    }
+    public List<LookupKey> getKeys(int start, int end) {
+        List<LookupKey> keys = new ArrayList<>();
+        for(int i = start;i < end;++i) {
+            keys.add(new EnrichmentKey("type", "key-" + i));
+        }
+        return keys;
+    }
+    @Test
+    public void test() throws Exception {
+        long ts = System.currentTimeMillis();
+        BloomAccessTracker bat = new BloomAccessTracker("tracker1", 100, 0.03);
+        PersistentAccessTracker pat = new PersistentAccessTracker(tableName, "0", atTable, atCF, bat, 0L);
+        EnrichmentLookup lookup = new EnrichmentLookup(testTable, cf, pat);
+        List<LookupKey> goodKeysHalf = getKeys(0, 5);
+        List<LookupKey> goodKeysOtherHalf = getKeys(5, 10);
+        Iterable<LookupKey> goodKeys = Iterables.concat(goodKeysHalf, goodKeysOtherHalf);
+        List<LookupKey> badKey = getKeys(10, 11);
+        EnrichmentConverter converter = new EnrichmentConverter();
+        for(LookupKey k : goodKeysHalf) {
+            testTable.put(converter.toPut(cf, (EnrichmentKey) k
+                                            , new EnrichmentValue(
+                                                  new HashMap<String, String>() {{
+                                                    put("k", "dummy");
+                                                    }}
+                                                  )
+                                          )
+                         );
+            Assert.assertTrue(lookup.exists((EnrichmentKey)k, testTable, true));
+        }
+        pat.persist(true);
+        for(LookupKey k : goodKeysOtherHalf) {
+            testTable.put(converter.toPut(cf, (EnrichmentKey) k
+                                            , new EnrichmentValue(new HashMap<String, String>() {{
+                                                    put("k", "dummy");
+                                                    }}
+                                                                  )
+                                         )
+                         );
+            Assert.assertTrue(lookup.exists((EnrichmentKey)k, testTable, true));
+        }
+        testUtil.flush();
+        Assert.assertFalse(lookup.getAccessTracker().hasSeen(goodKeysHalf.get(0)));
+        for(LookupKey k : goodKeysOtherHalf) {
+            Assert.assertTrue(lookup.getAccessTracker().hasSeen(k));
+        }
+        pat.persist(true);
+        {
+            testTable.put(converter.toPut(cf, (EnrichmentKey) badKey.get(0)
+                    , new EnrichmentValue(new HashMap<String, String>() {{
+                        put("k", "dummy");
+                    }}
+                    )
+                    )
+            );
+        }
+        testUtil.flush();
+        Assert.assertFalse(lookup.getAccessTracker().hasSeen(badKey.get(0)));
+
+
+        Job job = LeastRecentlyUsedPruner.createJob(config, tableName, cf, atTableName, atCF, ts);
+        Assert.assertTrue(job.waitForCompletion(true));
+        for(LookupKey k : goodKeys) {
+            Assert.assertTrue(lookup.exists((EnrichmentKey)k, testTable, true));
+        }
+        for(LookupKey k : badKey) {
+            Assert.assertFalse(lookup.exists((EnrichmentKey)k, testTable, true));
+        }
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/taxii/MockTaxiiService.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/taxii/MockTaxiiService.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/taxii/MockTaxiiService.java
new file mode 100644
index 0000000..bc1b3b7
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/taxii/MockTaxiiService.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.nonbulk.taxii;
+
+
+import com.sun.net.httpserver.HttpHandler;
+import com.sun.net.httpserver.HttpServer;
+import org.apache.commons.io.FileUtils;
+import org.apache.metron.test.utils.UnitTestHelper;
+
+import javax.ws.rs.ApplicationPath;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.core.*;
+import javax.ws.rs.ext.RuntimeDelegate;
+import java.io.File;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+
+@Path("/")
+public class MockTaxiiService {
+    static String discoveryMsg;
+    static String pollMsg;
+    static {
+        try {
+            String baseDir = UnitTestHelper.findDir("taxii-messages");
+            discoveryMsg = FileUtils.readFileToString(new File(new File(baseDir), "message.discovery"));
+            pollMsg= FileUtils.readFileToString(new File(new File(baseDir), "messages.poll"));
+        } catch (IOException e) {
+            throw new RuntimeException("Unable to read discovery message", e);
+        }
+    }
+
+
+    @POST
+    @Path("/taxii-discovery-service")
+    public Response getDiscovery() {
+        return Response.ok(discoveryMsg, MediaType.APPLICATION_XML_TYPE).header("x-taxii-content-type", "urn:taxii.mitre.org:message:xml:1.1").build();
+    }
+    @POST
+    @Path("/taxii-data")
+    public Response getData() {
+        return Response.ok(pollMsg).type(MediaType.APPLICATION_XML_TYPE).header("x-taxii-content-type", "urn:taxii.mitre.org:message:xml:1.1").build();
+    }
+
+    @ApplicationPath("rs")
+    public static class ApplicationConfig extends Application{
+        private final Set<Class<?>> classes;
+        public ApplicationConfig() {
+            HashSet<Class<?>> c = new HashSet<>();
+            c.add(MockTaxiiService.class);
+            classes = Collections.unmodifiableSet(c);
+        }
+        @Override
+        public Set<Class<?>> getClasses() {
+            return classes;
+        }
+    }
+    private static HttpServer server;
+    public static void start(int port) throws IOException {
+        // Create an HTTP server listening at port 8282
+        URI uri = UriBuilder.fromUri("http://localhost/").port(port).build();
+        server = HttpServer.create(new InetSocketAddress(uri.getPort()), 0);
+        HttpHandler handler = RuntimeDelegate.getInstance().createEndpoint(new ApplicationConfig(), HttpHandler.class);
+        server.createContext(uri.getPath(), handler);
+        discoveryMsg = discoveryMsg.replaceAll("PORT", "" + uri.getPort());
+        server.start();
+    }
+
+    public static void shutdown() {
+        if(server != null) {
+            server.stop(0);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiIntegrationTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiIntegrationTest.java
new file mode 100644
index 0000000..f0d9178
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiIntegrationTest.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.dataloads.nonbulk.taxii;
+
+import com.google.common.base.Splitter;
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.metron.dataloads.extractor.stix.StixExtractor;
+import org.apache.metron.enrichment.converter.EnrichmentConverter;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.test.mock.MockHTable;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.Set;
+
+public class TaxiiIntegrationTest {
+
+    @Before
+    public void setup() throws IOException {
+        MockTaxiiService.start(8282);
+    }
+
+    @After
+    public void teardown() {
+        MockTaxiiService.shutdown();
+        MockHTable.Provider.clear();
+    }
+
+    /**
+         {
+            "endpoint" : "http://localhost:8282/taxii-discovery-service"
+           ,"type" : "DISCOVER"
+           ,"collection" : "guest.Abuse_ch"
+           ,"table" : "threat_intel"
+           ,"columnFamily" : "cf"
+           ,"allowedIndicatorTypes" : [ "domainname:FQDN", "address:IPV_4_ADDR" ]
+         }
+    */
+    @Multiline
+    static String taxiiConnectionConfig;
+
+    @Test
+    public void testTaxii() throws Exception {
+
+        final MockHTable.Provider provider = new MockHTable.Provider();
+        final Configuration config = HBaseConfiguration.create();
+        TaxiiHandler handler = new TaxiiHandler(TaxiiConnectionConfig.load(taxiiConnectionConfig), new StixExtractor(), config ) {
+            @Override
+            protected synchronized HTableInterface createHTable(String tableInfo) throws IOException {
+                return provider.addToCache("threat_intel", "cf");
+            }
+        };
+        //UnitTestHelper.verboseLogging();
+        handler.run();
+        Set<String> maliciousDomains;
+        {
+            MockHTable table = (MockHTable) provider.getTable(config, "threat_intel");
+            maliciousDomains = getIndicators("domainname:FQDN", table.getPutLog(), "cf");
+        }
+        Assert.assertTrue(maliciousDomains.contains("www.office-112.com"));
+        Assert.assertEquals(numStringsMatch(MockTaxiiService.pollMsg, "DomainNameObj:Value condition=\"Equals\""), maliciousDomains.size());
+        Set<String> maliciousAddresses;
+        {
+            MockHTable table = (MockHTable) provider.getTable(config, "threat_intel");
+            maliciousAddresses= getIndicators("address:IPV_4_ADDR", table.getPutLog(), "cf");
+        }
+        Assert.assertTrue(maliciousAddresses.contains("94.102.53.142"));
+        Assert.assertEquals(numStringsMatch(MockTaxiiService.pollMsg, "AddressObj:Address_Value condition=\"Equal\""), maliciousAddresses.size());
+        MockHTable.Provider.clear();
+    }
+
+    private static int numStringsMatch(String xmlBundle, String text) {
+        int cnt = 0;
+        for(String line : Splitter.on("\n").split(xmlBundle)) {
+            if(line.contains(text)) {
+                cnt++;
+            }
+        }
+        return cnt;
+    }
+
+    private static Set<String> getIndicators(String indicatorType, Iterable<Put> puts, String cf) throws IOException {
+        EnrichmentConverter converter = new EnrichmentConverter();
+        Set<String> ret = new HashSet<>();
+        for(Put p : puts) {
+            LookupKV<EnrichmentKey, EnrichmentValue> kv = converter.fromPut(p, cf);
+            if (kv.getKey().type.equals(indicatorType)) {
+                ret.add(kv.getKey().indicator);
+            }
+        }
+        return ret;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/resources/log4j.properties b/metron-platform/metron-data-management/src/test/resources/log4j.properties
new file mode 100644
index 0000000..0d50388
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/resources/log4j.properties
@@ -0,0 +1,24 @@
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+# Root logger option
+log4j.rootLogger=ERROR, stdout
+
+# Direct log messages to stdout
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-data-management/src/test/resources/taxii-messages/message.discovery
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/resources/taxii-messages/message.discovery b/metron-platform/metron-data-management/src/test/resources/taxii-messages/message.discovery
new file mode 100644
index 0000000..2f7e788
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/resources/taxii-messages/message.discovery
@@ -0,0 +1,21 @@
+<taxii_11:Discovery_Response in_response_to="urn:uuid:695149bd-72ab-41dd-a7e0-7d0b2a73e81f" message_id="65257" xmlns:xmldsig="http://www.w3.org/2000/09/xmldsig#" xmlns:taxii_11="http://taxii.mitre.org/messages/taxii_xml_binding-1.1">
+    <taxii_11:Service_Instance service_type="DISCOVERY" available="true" service_version="urn:taxii.mitre.org:services:1.1">
+        <taxii_11:Protocol_Binding>urn:taxii.mitre.org:protocol:https:1.0</taxii_11:Protocol_Binding>
+        <taxii_11:Address>http://localhost:PORT/taxii-data</taxii_11:Address>
+        <taxii_11:Message_Binding>urn:taxii.mitre.org:message:xml:1.1</taxii_11:Message_Binding>
+        <taxii_11:Message></taxii_11:Message>
+    </taxii_11:Service_Instance>
+    <taxii_11:Service_Instance service_type="COLLECTION_MANAGEMENT" available="true" service_version="urn:taxii.mitre.org:services:1.1">
+        <taxii_11:Protocol_Binding>urn:taxii.mitre.org:protocol:https:1.0</taxii_11:Protocol_Binding>
+        <taxii_11:Address>http://localhost:PORT/taxii-collections</taxii_11:Address>
+        <taxii_11:Message_Binding>urn:taxii.mitre.org:message:xml:1.1</taxii_11:Message_Binding>
+        <taxii_11:Message></taxii_11:Message>
+    </taxii_11:Service_Instance>
+    <taxii_11:Service_Instance service_type="POLL" available="true" service_version="urn:taxii.mitre.org:services:1.1">
+        <taxii_11:Protocol_Binding>urn:taxii.mitre.org:protocol:https:1.0</taxii_11:Protocol_Binding>
+        <taxii_11:Address>http://localhost:PORT/taxii-data</taxii_11:Address>
+        <taxii_11:Message_Binding>urn:taxii.mitre.org:message:xml:1.1</taxii_11:Message_Binding>
+        <taxii_11:Message></taxii_11:Message>
+    </taxii_11:Service_Instance>
+</taxii_11:Discovery_Response>
+



[10/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/src/main/java/storm/kafka/CallbackCollector.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/src/main/java/storm/kafka/CallbackCollector.java b/metron-platform/metron-pcap/src/main/java/storm/kafka/CallbackCollector.java
new file mode 100644
index 0000000..485da5a
--- /dev/null
+++ b/metron-platform/metron-pcap/src/main/java/storm/kafka/CallbackCollector.java
@@ -0,0 +1,182 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package storm.kafka;
+
+import backtype.storm.spout.ISpoutOutputCollector;
+import backtype.storm.spout.SpoutOutputCollector;
+
+import java.io.Serializable;
+import java.util.List;
+
+public class CallbackCollector extends SpoutOutputCollector implements Serializable {
+  static final long serialVersionUID = 0xDEADBEEFL;
+  Callback _callback;
+  SpoutOutputCollector _delegate;
+  EmitContext _context;
+  public CallbackCollector(Callback callback, SpoutOutputCollector collector, EmitContext context) {
+    super(collector);
+    this._callback = callback;
+    this._delegate = collector;
+    this._context = context;
+  }
+
+
+  /**
+   * Emits a new tuple to the specified output stream with the given message ID.
+   * When Storm detects that this tuple has been fully processed, or has failed
+   * to be fully processed, the spout will receive an ack or fail callback respectively
+   * with the messageId as long as the messageId was not null. If the messageId was null,
+   * Storm will not track the tuple and no callback will be received. The emitted values must be
+   * immutable.
+   *
+   * @param streamId
+   * @param tuple
+   * @param messageId
+   * @return the list of task ids that this tuple was sent to
+   */
+  @Override
+  public List<Integer> emit(String streamId, List<Object> tuple, Object messageId) {
+    List<Object> t = _callback.apply(tuple, _context.cloneContext().with(EmitContext.Type.MESSAGE_ID, messageId)
+            .with(EmitContext.Type.STREAM_ID, streamId)
+    );
+    return _delegate.emit(streamId, t, messageId);
+  }
+
+  /**
+   * Emits a new tuple to the default output stream with the given message ID.
+   * When Storm detects that this tuple has been fully processed, or has failed
+   * to be fully processed, the spout will receive an ack or fail callback respectively
+   * with the messageId as long as the messageId was not null. If the messageId was null,
+   * Storm will not track the tuple and no callback will be received. The emitted values must be
+   * immutable.
+   *
+   * @param tuple
+   * @param messageId
+   * @return the list of task ids that this tuple was sent to
+   */
+  @Override
+  public List<Integer> emit(List<Object> tuple, Object messageId) {
+    List<Object> t = _callback.apply(tuple, _context.cloneContext().with(EmitContext.Type.MESSAGE_ID, messageId));
+    return super.emit(t, messageId);
+  }
+
+  /**
+   * Emits a tuple to the default output stream with a null message id. Storm will
+   * not track this message so ack and fail will never be called for this tuple. The
+   * emitted values must be immutable.
+   *
+   * @param tuple
+   */
+  @Override
+  public List<Integer> emit(List<Object> tuple) {
+    List<Object> t = _callback.apply(tuple, _context.cloneContext());
+    return super.emit(t);
+  }
+
+  /**
+   * Emits a tuple to the specified output stream with a null message id. Storm will
+   * not track this message so ack and fail will never be called for this tuple. The
+   * emitted values must be immutable.
+   *
+   * @param streamId
+   * @param tuple
+   */
+  @Override
+  public List<Integer> emit(String streamId, List<Object> tuple) {
+    List<Object> t = _callback.apply(tuple, _context.cloneContext().with(EmitContext.Type.STREAM_ID, streamId));
+    return super.emit(streamId, t);
+  }
+
+  /**
+   * Emits a tuple to the specified task on the specified output stream. This output
+   * stream must have been declared as a direct stream, and the specified task must
+   * use a direct grouping on this stream to receive the message. The emitted values must be
+   * immutable.
+   *
+   * @param taskId
+   * @param streamId
+   * @param tuple
+   * @param messageId
+   */
+  @Override
+  public void emitDirect(int taskId, String streamId, List<Object> tuple, Object messageId) {
+    List<Object> t = _callback.apply(tuple, _context.cloneContext().with(EmitContext.Type.STREAM_ID, streamId)
+            .with(EmitContext.Type.MESSAGE_ID, messageId)
+            .with(EmitContext.Type.TASK_ID, new Integer(taskId))
+    );
+    super.emitDirect(taskId, streamId, t, messageId);
+  }
+
+  /**
+   * Emits a tuple to the specified task on the default output stream. This output
+   * stream must have been declared as a direct stream, and the specified task must
+   * use a direct grouping on this stream to receive the message. The emitted values must be
+   * immutable.
+   *
+   * @param taskId
+   * @param tuple
+   * @param messageId
+   */
+  @Override
+  public void emitDirect(int taskId, List<Object> tuple, Object messageId) {
+    List<Object> t = _callback.apply(tuple, _context.cloneContext().with(EmitContext.Type.MESSAGE_ID, messageId)
+            .with(EmitContext.Type.TASK_ID, new Integer(taskId))
+    );
+    super.emitDirect(taskId, t, messageId);
+  }
+
+  /**
+   * Emits a tuple to the specified task on the specified output stream. This output
+   * stream must have been declared as a direct stream, and the specified task must
+   * use a direct grouping on this stream to receive the message. The emitted values must be
+   * immutable.
+   * <p/>
+   * <p> Because no message id is specified, Storm will not track this message
+   * so ack and fail will never be called for this tuple.</p>
+   *
+   * @param taskId
+   * @param streamId
+   * @param tuple
+   */
+  @Override
+  public void emitDirect(int taskId, String streamId, List<Object> tuple) {
+    List<Object> t = _callback.apply(tuple, _context.cloneContext().with(EmitContext.Type.STREAM_ID, streamId)
+            .with(EmitContext.Type.TASK_ID, new Integer(taskId))
+    );
+    super.emitDirect(taskId, streamId, t);
+  }
+
+  /**
+   * Emits a tuple to the specified task on the default output stream. This output
+   * stream must have been declared as a direct stream, and the specified task must
+   * use a direct grouping on this stream to receive the message. The emitted values must be
+   * immutable.
+   * <p/>
+   * <p> Because no message id is specified, Storm will not track this message
+   * so ack and fail will never be called for this tuple.</p>
+   *
+   * @param taskId
+   * @param tuple
+   */
+  @Override
+  public void emitDirect(int taskId, List<Object> tuple) {
+
+    List<Object> t = _callback.apply(tuple, _context.cloneContext().with(EmitContext.Type.TASK_ID, new Integer(taskId)));
+    super.emitDirect(taskId, t);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/src/main/java/storm/kafka/CallbackKafkaSpout.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/src/main/java/storm/kafka/CallbackKafkaSpout.java b/metron-platform/metron-pcap/src/main/java/storm/kafka/CallbackKafkaSpout.java
new file mode 100644
index 0000000..431bdf9
--- /dev/null
+++ b/metron-platform/metron-pcap/src/main/java/storm/kafka/CallbackKafkaSpout.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package storm.kafka;
+
+import backtype.storm.Config;
+import backtype.storm.metric.api.IMetric;
+import backtype.storm.spout.SpoutOutputCollector;
+import backtype.storm.task.TopologyContext;
+import storm.kafka.*;
+
+import java.util.*;
+
+public class CallbackKafkaSpout extends KafkaSpout {
+  static final long serialVersionUID = 0xDEADBEEFL;
+  Class<? extends Callback> callbackClazz;
+  Callback _callback;
+  EmitContext _context;
+  public CallbackKafkaSpout(SpoutConfig spoutConfig, String callbackClass) {
+    this(spoutConfig, toCallbackClass(callbackClass));
+  }
+
+  public CallbackKafkaSpout(SpoutConfig spoutConf, Class<? extends Callback> callback) {
+    super(spoutConf);
+    callbackClazz = callback;
+  }
+
+  public void initialize() {
+    _callback = createCallback(callbackClazz);
+    _context = new EmitContext().with(EmitContext.Type.SPOUT_CONFIG, _spoutConfig)
+            .with(EmitContext.Type.UUID, _uuid);
+    _callback.initialize(_context);
+  }
+
+
+  private static Class<? extends Callback> toCallbackClass(String callbackClass)  {
+    try{
+      return (Class<? extends Callback>) Callback.class.forName(callbackClass);
+    }
+    catch (ClassNotFoundException e) {
+      throw new RuntimeException(callbackClass + " not found", e);
+    }
+  }
+
+  protected Callback createCallback(Class<? extends Callback> callbackClass)  {
+    try {
+      return callbackClass.newInstance();
+    } catch (InstantiationException e) {
+      throw new RuntimeException("Unable to instantiate callback", e);
+    } catch (IllegalAccessException e) {
+      throw new RuntimeException("Illegal access", e);
+    }
+  }
+
+  @Override
+  public void open(Map conf, final TopologyContext context, final SpoutOutputCollector collector) {
+    if(_callback == null) {
+      initialize();
+    }
+    super.open( conf, context
+            , new CallbackCollector(_callback, collector
+                    ,_context.cloneContext().with(EmitContext.Type.OPEN_CONFIG, conf)
+                    .with(EmitContext.Type.TOPOLOGY_CONTEXT, context)
+            )
+    );
+  }
+
+  @Override
+  public void close() {
+    super.close();
+    if(_callback != null) {
+      try {
+        _callback.close();
+      } catch (Exception e) {
+        throw new IllegalStateException("Unable to close callback", e);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/src/main/java/storm/kafka/EmitContext.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/src/main/java/storm/kafka/EmitContext.java b/metron-platform/metron-pcap/src/main/java/storm/kafka/EmitContext.java
new file mode 100644
index 0000000..1f9ef59
--- /dev/null
+++ b/metron-platform/metron-pcap/src/main/java/storm/kafka/EmitContext.java
@@ -0,0 +1,146 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package storm.kafka;
+
+import backtype.storm.task.TopologyContext;
+
+import java.io.Serializable;
+import java.util.EnumMap;
+import java.util.Map;
+
+public class EmitContext implements Cloneable,Serializable {
+  static final long serialVersionUID = 0xDEADBEEFL;
+
+  public enum Type{
+    MESSAGE_ID(PartitionManager.KafkaMessageId.class)
+    ,STREAM_ID(String.class)
+    ,TASK_ID(Integer.class)
+    ,UUID(String.class)
+    ,SPOUT_CONFIG(SpoutConfig.class)
+    ,OPEN_CONFIG(Map.class)
+    ,TOPOLOGY_CONTEXT(TopologyContext.class)
+    ;
+    Class<?> clazz;
+    Type(Class<?> clazz) {
+      this.clazz=  clazz;
+    }
+
+    public Class<?> clazz() {
+      return clazz;
+    }
+  }
+  public EmitContext() {
+    this(new EnumMap<>(Type.class));
+  }
+  public EmitContext(EnumMap<Type, Object> context) {
+    _context = context;
+  }
+  private EnumMap<Type, Object> _context;
+
+  public <T> EmitContext with(Type t, T o ) {
+    _context.put(t, t.clazz().cast(o));
+    return this;
+  }
+  public <T> void add(Type t, T o ) {
+    with(t, o);
+  }
+
+  public <T> T get(Type t) {
+    Object o = _context.get(t);
+    if(o == null) {
+      return null;
+    }
+    else {
+      return (T) o;
+    }
+  }
+
+  public EmitContext cloneContext() {
+    try {
+      return (EmitContext)this.clone();
+    } catch (CloneNotSupportedException e) {
+      throw new RuntimeException("Unable to clone emit context.", e);
+    }
+  }
+
+  /**
+   * Creates and returns a copy of this object.  The precise meaning
+   * of "copy" may depend on the class of the object. The general
+   * intent is that, for any object {@code x}, the expression:
+   * <blockquote>
+   * <pre>
+   * x.clone() != x</pre></blockquote>
+   * will be true, and that the expression:
+   * <blockquote>
+   * <pre>
+   * x.clone().getClass() == x.getClass()</pre></blockquote>
+   * will be {@code true}, but these are not absolute requirements.
+   * While it is typically the case that:
+   * <blockquote>
+   * <pre>
+   * x.clone().equals(x)</pre></blockquote>
+   * will be {@code true}, this is not an absolute requirement.
+   *
+   * By convention, the returned object should be obtained by calling
+   * {@code super.clone}.  If a class and all of its superclasses (except
+   * {@code Object}) obey this convention, it will be the case that
+   * {@code x.clone().getClass() == x.getClass()}.
+   *
+   * By convention, the object returned by this method should be independent
+   * of this object (which is being cloned).  To achieve this independence,
+   * it may be necessary to modify one or more fields of the object returned
+   * by {@code super.clone} before returning it.  Typically, this means
+   * copying any mutable objects that comprise the internal "deep structure"
+   * of the object being cloned and replacing the references to these
+   * objects with references to the copies.  If a class contains only
+   * primitive fields or references to immutable objects, then it is usually
+   * the case that no fields in the object returned by {@code super.clone}
+   * need to be modified.
+   *
+   * The method {@code clone} for class {@code Object} performs a
+   * specific cloning operation. First, if the class of this object does
+   * not implement the interface {@code Cloneable}, then a
+   * {@code CloneNotSupportedException} is thrown. Note that all arrays
+   * are considered to implement the interface {@code Cloneable} and that
+   * the return type of the {@code clone} method of an array type {@code T[]}
+   * is {@code T[]} where T is any reference or primitive type.
+   * Otherwise, this method creates a new instance of the class of this
+   * object and initializes all its fields with exactly the contents of
+   * the corresponding fields of this object, as if by assignment; the
+   * contents of the fields are not themselves cloned. Thus, this method
+   * performs a "shallow copy" of this object, not a "deep copy" operation.
+   *
+   * The class {@code Object} does not itself implement the interface
+   * {@code Cloneable}, so calling the {@code clone} method on an object
+   * whose class is {@code Object} will result in throwing an
+   * exception at run time.
+   *
+   * @return a clone of this instance.
+   * @throws CloneNotSupportedException if the object's class does not
+   *                                    support the {@code Cloneable} interface. Subclasses
+   *                                    that override the {@code clone} method can also
+   *                                    throw this exception to indicate that an instance cannot
+   *                                    be cloned.
+   * @see Cloneable
+   */
+  @Override
+  protected Object clone() throws CloneNotSupportedException {
+    EmitContext context = new EmitContext(_context.clone());
+    return context;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-pcap/src/test/java/org/apache/metron/pcap/utils/PcapUtilsTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap/src/test/java/org/apache/metron/pcap/utils/PcapUtilsTest.java b/metron-platform/metron-pcap/src/test/java/org/apache/metron/pcap/utils/PcapUtilsTest.java
new file mode 100644
index 0000000..39fa5fd
--- /dev/null
+++ b/metron-platform/metron-pcap/src/test/java/org/apache/metron/pcap/utils/PcapUtilsTest.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.pcap.utils;
+
+import junit.framework.Assert;
+import org.junit.Test;
+
+public class PcapUtilsTest {
+
+  @Test
+  public void testConvertHexToIpv4Ip() {
+    String hex = "c0a88a9e";
+    String ipAddress = PcapUtils.convertHexToIpv4Ip(hex);
+    Assert.assertEquals("192.168.138.158", ipAddress);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/pom.xml b/metron-platform/metron-solr/pom.xml
new file mode 100644
index 0000000..f6b87c5
--- /dev/null
+++ b/metron-platform/metron-solr/pom.xml
@@ -0,0 +1,245 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software
+	Foundation (ASF) under one or more contributor license agreements. See the
+	NOTICE file distributed with this work for additional information regarding
+	copyright ownership. The ASF licenses this file to You under the Apache License,
+	Version 2.0 (the "License"); you may not use this file except in compliance
+	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+	Unless required by applicable law or agreed to in writing, software distributed
+	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
+	OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+  the specific language governing permissions and limitations under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.metron</groupId>
+        <artifactId>metron-platform</artifactId>
+        <version>0.1BETA</version>
+    </parent>
+    <artifactId>metron-solr</artifactId>
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+    </properties>
+    <dependencies>
+        <dependency>
+            <groupId>com.google.guava</groupId>
+            <artifactId>guava</artifactId>
+            <version>${global_hbase_guava_version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-enrichment</artifactId>
+            <version>${project.parent.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.solr</groupId>
+            <artifactId>solr-solrj</artifactId>
+            <version>${global_solr_version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.solr</groupId>
+            <artifactId>solr-test-framework</artifactId>
+            <version>${global_solr_version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-core</artifactId>
+            <version>${global_storm_version}</version>
+            <scope>provided</scope>
+            <exclusions>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>log4j-over-slf4j</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-client</artifactId>
+            <version>${global_hbase_version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>log4j</groupId>
+                    <artifactId>log4j</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-hdfs</artifactId>
+            <version>${global_hadoop_version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-common</artifactId>
+            <version>${global_hadoop_version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka_2.9.2</artifactId>
+            <version>${global_kafka_version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>log4j</groupId>
+                    <artifactId>log4j</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-integration-test</artifactId>
+            <version>${project.parent.version}</version>
+            <scope>test</scope>
+            <exclusions>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+                <exclusion>
+                    <groupId>org.apache.httpcomponents</groupId>
+                    <artifactId>httpclient</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.mockito</groupId>
+            <artifactId>mockito-all</artifactId>
+            <version>${global_mockito_version}</version>
+            <scope>test</scope>
+        </dependency>
+
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <!-- Separates the unit tests from the integration tests. -->
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-surefire-plugin</artifactId>
+                <version>2.12.4</version>
+                <configuration>
+                    <!-- Skip the default running of this plug-in (or everything is run twice...see below) -->
+                    <argLine>-Xmx2048m -XX:MaxPermSize=256m</argLine>
+                    <skip>true</skip>
+                    <!-- Show 100% of the lines from the stack trace (doesn't work) -->
+                    <trimStackTrace>false</trimStackTrace>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>unit-tests</id>
+                        <phase>test</phase>
+                        <goals>
+                            <goal>test</goal>
+                        </goals>
+                        <configuration>
+                            <!-- Never skip running the tests when the test phase is invoked -->
+                            <skip>false</skip>
+                            <includes>
+                                <!-- Include unit tests within integration-test phase. -->
+                                <include>**/*Test.java</include>
+                            </includes>
+                            <excludes>
+                                <!-- Exclude integration tests within (unit) test phase. -->
+                                <exclude>**/*IntegrationTest.java</exclude>
+                            </excludes>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>integration-tests</id>
+                        <phase>integration-test</phase>
+                        <goals>
+                            <goal>test</goal>
+                        </goals>
+                        <configuration>
+                            <!-- Never skip running the tests when the integration-test phase is invoked -->
+                            <skip>false</skip>
+                            <includes>
+                                <!-- Include integration tests within integration-test phase. -->
+                                <include>**/*IntegrationTest.java</include>
+                            </includes>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <version>2.3</version>
+                <configuration>
+                    <createDependencyReducedPom>true</createDependencyReducedPom>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <artifactSet>
+                                <excludes>
+                                    <exclude>storm:storm-core:*</exclude>
+                                    <exclude>storm:storm-lib:*</exclude>
+                                    <exclude>org.slf4j.impl*</exclude>
+                                    <exclude>org.slf4j:slf4j-log4j*</exclude>
+                                </excludes>
+                            </artifactSet>
+                            <transformers>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
+                                    <resource>.yaml</resource>
+                                </transformer>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                    <mainClass></mainClass>
+                                </transformer>
+                            </transformers>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <configuration>
+                    <descriptor>src/main/assembly/assembly.xml</descriptor>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>make-assembly</id> <!-- this is used for inheritance merges -->
+                        <phase>package</phase> <!-- bind to the packaging phase -->
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/main/assembly/assembly.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/main/assembly/assembly.xml b/metron-platform/metron-solr/src/main/assembly/assembly.xml
new file mode 100644
index 0000000..bacaae3
--- /dev/null
+++ b/metron-platform/metron-solr/src/main/assembly/assembly.xml
@@ -0,0 +1,52 @@
+<!--
+  Licensed to the Apache Software
+	Foundation (ASF) under one or more contributor license agreements. See the
+	NOTICE file distributed with this work for additional information regarding
+	copyright ownership. The ASF licenses this file to You under the Apache License,
+	Version 2.0 (the "License"); you may not use this file except in compliance
+	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+	Unless required by applicable law or agreed to in writing, software distributed
+	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
+	OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+  the specific language governing permissions and limitations under the License.
+  -->
+
+<assembly>
+  <id>archive</id>
+  <formats>
+    <format>tar.gz</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>${project.basedir}/src/main/config</directory>
+      <outputDirectory>/config</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+      <excludes>
+        <exclude>**/*.formatted</exclude>
+        <exclude>**/*.filtered</exclude>
+      </excludes>
+      <fileMode>0644</fileMode>
+      <lineEnding>unix</lineEnding>
+    </fileSet>
+    <fileSet>
+      <directory>${project.basedir}/src/main/scripts</directory>
+      <outputDirectory>/scripts</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+      <excludes>
+        <exclude>**/*.formatted</exclude>
+        <exclude>**/*.filtered</exclude>
+      </excludes>
+      <fileMode>0644</fileMode>
+      <lineEnding>unix</lineEnding>
+    </fileSet>
+    <fileSet>
+      <directory>${project.basedir}/target</directory>
+      <includes>
+        <include>${project.artifactId}-${project.version}.jar</include>
+      </includes>
+      <outputDirectory>/lib</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+    </fileSet>
+  </fileSets>
+</assembly>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/main/config/solr.properties
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/main/config/solr.properties b/metron-platform/metron-solr/src/main/config/solr.properties
new file mode 100644
index 0000000..cdfe25a
--- /dev/null
+++ b/metron-platform/metron-solr/src/main/config/solr.properties
@@ -0,0 +1,109 @@
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+
+##### Kafka #####
+
+kafka.zk=node1:2181
+kafka.broker=node1:6667
+spout.kafka.topic.asa=asa
+spout.kafka.topic.bro=bro
+spout.kafka.topic.fireeye=fireeye
+spout.kafka.topic.ise=ise
+spout.kafka.topic.lancope=lancope
+spout.kafka.topic.paloalto=paloalto
+spout.kafka.topic.pcap=pcap
+spout.kafka.topic.snort=snort
+spout.kafka.topic.yaf=yaf
+
+##### Indexing #####
+writer.class.name=org.apache.metron.solr.writer.SolrWriter
+
+##### ElasticSearch #####
+
+es.ip=10.22.0.214
+es.port=9300
+es.clustername=elasticsearch
+
+##### MySQL #####
+
+mysql.ip=10.22.0.214
+mysql.port=3306
+mysql.username=root
+mysql.password=hadoop123
+
+##### Metrics #####
+
+#reporters
+org.apache.metron.metrics.reporter.graphite=true
+org.apache.metron.metrics.reporter.console=false
+org.apache.metron.metrics.reporter.jmx=false
+
+#Graphite Addresses
+
+org.apache.metron.metrics.graphite.address=localhost
+org.apache.metron.metrics.graphite.port=2023
+
+#TelemetryParserBolt
+org.apache.metron.metrics.TelemetryParserBolt.acks=true
+org.apache.metron.metrics.TelemetryParserBolt.emits=true
+org.apache.metron.metrics.TelemetryParserBolt.fails=true
+
+
+#GenericEnrichmentBolt
+org.apache.metron.metrics.GenericEnrichmentBolt.acks=true
+org.apache.metron.metrics.GenericEnrichmentBolt.emits=true
+org.apache.metron.metrics.GenericEnrichmentBolt.fails=true
+
+
+#TelemetryIndexingBolt
+org.apache.metron.metrics.TelemetryIndexingBolt.acks=true
+org.apache.metron.metrics.TelemetryIndexingBolt.emits=true
+org.apache.metron.metrics.TelemetryIndexingBolt.fails=true
+
+##### Host Enrichment #####
+
+org.apache.metron.enrichment.host.known_hosts=[{"ip":"10.1.128.236", "local":"YES", "type":"webserver", "asset_value" : "important"},\
+{"ip":"10.1.128.237", "local":"UNKNOWN", "type":"unknown", "asset_value" : "important"},\
+{"ip":"10.60.10.254", "local":"YES", "type":"printer", "asset_value" : "important"}]
+
+##### HDFS #####
+
+bolt.hdfs.batch.size=5000
+bolt.hdfs.field.delimiter=|
+bolt.hdfs.file.rotation.size.in.mb=5
+bolt.hdfs.file.system.url=hdfs://iot01.cloud.hortonworks.com:8020
+bolt.hdfs.wip.file.path=/paloalto/wip
+bolt.hdfs.finished.file.path=/paloalto/rotated
+bolt.hdfs.compression.codec.class=org.apache.hadoop.io.compress.SnappyCodec
+index.hdfs.output=/tmp/metron/enriched
+
+##### HBase #####
+bolt.hbase.table.name=pcap
+bolt.hbase.table.fields=t:value
+bolt.hbase.table.key.tuple.field.name=key
+bolt.hbase.table.timestamp.tuple.field.name=timestamp
+bolt.hbase.enable.batching=false
+bolt.hbase.write.buffer.size.in.bytes=2000000
+bolt.hbase.durability=SKIP_WAL
+bolt.hbase.partitioner.region.info.refresh.interval.mins=60
+
+##### Threat Intel #####
+
+threat.intel.tracker.table=
+threat.intel.tracker.cf=
+threat.intel.ip.table=
+threat.intel.ip.cf=

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/main/java/org/apache/metron/solr/SolrConstants.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/main/java/org/apache/metron/solr/SolrConstants.java b/metron-platform/metron-solr/src/main/java/org/apache/metron/solr/SolrConstants.java
new file mode 100644
index 0000000..d5dc7a0
--- /dev/null
+++ b/metron-platform/metron-solr/src/main/java/org/apache/metron/solr/SolrConstants.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.solr;
+
+public class SolrConstants {
+
+  public static final String REQUEST_ACTION = "action";
+  public static final String REQUEST_NAME = "name";
+  public static final String REQUEST_NUM_SHARDS = "numShards";
+  public static final String REQUEST_REPLICATION_FACTOR = "replicationFactor";
+  public static final String REQUEST_COLLECTION_CONFIG_NAME = "collection.configName";
+  public static final String REQUEST_COLLECTIONS_PATH = "/admin/collections";
+  public static final String RESPONSE_COLLECTIONS = "collections";
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/main/java/org/apache/metron/solr/writer/MetronSolrClient.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/main/java/org/apache/metron/solr/writer/MetronSolrClient.java b/metron-platform/metron-solr/src/main/java/org/apache/metron/solr/writer/MetronSolrClient.java
new file mode 100644
index 0000000..d3ef36f
--- /dev/null
+++ b/metron-platform/metron-solr/src/main/java/org/apache/metron/solr/writer/MetronSolrClient.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.solr.writer;
+
+import org.apache.metron.solr.SolrConstants;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.request.QueryRequest;
+import org.apache.solr.common.params.CollectionParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.List;
+
+public class MetronSolrClient extends CloudSolrClient {
+
+  private static final Logger LOG = LoggerFactory
+          .getLogger(MetronSolrClient.class);
+
+
+  public MetronSolrClient(String zkHost) {
+    super(zkHost);
+  }
+
+  public void createCollection(String name, int numShards, int replicationFactor) throws IOException, SolrServerException {
+    if (!listCollections().contains(name)) {
+      request(getCreateCollectionsRequest(name, numShards, replicationFactor));
+    }
+  }
+
+  public QueryRequest getCreateCollectionsRequest(String name, int numShards, int replicationFactor) {
+    ModifiableSolrParams params = new ModifiableSolrParams();
+    params.set(SolrConstants.REQUEST_ACTION, CollectionParams.CollectionAction.CREATE.name());
+    params.set(SolrConstants.REQUEST_NAME, name);
+    params.set(SolrConstants.REQUEST_NUM_SHARDS, numShards);
+    params.set(SolrConstants.REQUEST_REPLICATION_FACTOR, replicationFactor);
+    params.set(SolrConstants.REQUEST_COLLECTION_CONFIG_NAME, name);
+    QueryRequest request = new QueryRequest(params);
+    request.setPath(SolrConstants.REQUEST_COLLECTIONS_PATH);
+    return request;
+  }
+
+  @SuppressWarnings("unchecked")
+  public List<String> listCollections() throws IOException, SolrServerException {
+    NamedList<Object> response = request(getListCollectionsRequest(), null);
+    return (List<String>) response.get(SolrConstants.RESPONSE_COLLECTIONS);
+  }
+
+  public QueryRequest getListCollectionsRequest() {
+    ModifiableSolrParams params = new ModifiableSolrParams();
+    params.set(SolrConstants.REQUEST_ACTION, CollectionParams.CollectionAction.LIST.name());
+    QueryRequest request = new QueryRequest(params);
+    request.setPath(SolrConstants.REQUEST_COLLECTIONS_PATH);
+    return request;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/main/java/org/apache/metron/solr/writer/SolrWriter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/main/java/org/apache/metron/solr/writer/SolrWriter.java b/metron-platform/metron-solr/src/main/java/org/apache/metron/solr/writer/SolrWriter.java
new file mode 100644
index 0000000..45d5615
--- /dev/null
+++ b/metron-platform/metron-solr/src/main/java/org/apache/metron/solr/writer/SolrWriter.java
@@ -0,0 +1,110 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.solr.writer;
+
+import backtype.storm.tuple.Tuple;
+import org.apache.metron.common.configuration.Configurations;
+import org.apache.metron.common.interfaces.BulkMessageWriter;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.response.UpdateResponse;
+import org.apache.solr.common.SolrInputDocument;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.List;
+import java.util.Map;
+
+public class SolrWriter implements BulkMessageWriter<JSONObject>, Serializable {
+
+  public static final String DEFAULT_COLLECTION = "metron";
+
+  private static final Logger LOG = LoggerFactory
+          .getLogger(SolrWriter.class);
+
+  private boolean shouldCommit = false;
+  private MetronSolrClient solr;
+
+  public SolrWriter withShouldCommit(boolean shouldCommit) {
+    this.shouldCommit = shouldCommit;
+    return this;
+  }
+
+  public SolrWriter withMetronSolrClient(MetronSolrClient solr) {
+    this.solr = solr;
+    return this;
+  }
+
+  @Override
+  public void init(Map stormConf, Configurations configurations) throws IOException, SolrServerException {
+    Map<String, Object> globalConfiguration = configurations.getGlobalConfig();
+    if(solr == null) solr = new MetronSolrClient((String) globalConfiguration.get("solr.zookeeper"));
+    String collection = getCollection(configurations);
+    solr.createCollection(collection, (Integer) globalConfiguration.get("solr.numShards"), (Integer) globalConfiguration.get("solr.replicationFactor"));
+    solr.setDefaultCollection(collection);
+  }
+
+  @Override
+  public void write(String sourceType, Configurations configurations, List<Tuple> tuples, List<JSONObject> messages) throws Exception {
+    for(JSONObject message: messages) {
+      SolrInputDocument document = new SolrInputDocument();
+      document.addField("id", getIdValue(message));
+      document.addField("sensorType", sourceType);
+      for(Object key: message.keySet()) {
+        Object value = message.get(key);
+        document.addField(getFieldName(key, value), value);
+      }
+      UpdateResponse response = solr.add(document);
+    }
+    if (shouldCommit) {
+      solr.commit(getCollection(configurations));
+    }
+  }
+
+  protected String getCollection(Configurations configurations) {
+    String collection = (String) configurations.getGlobalConfig().get("solr.collection");
+    return collection != null ? collection : DEFAULT_COLLECTION;
+  }
+
+  private int getIdValue(JSONObject message) {
+    return message.toJSONString().hashCode();
+  }
+
+  protected String getFieldName(Object key, Object value) {
+    String field;
+    if (value instanceof Integer) {
+      field = key + "_i";
+    } else if (value instanceof Long) {
+      field = key + "_l";
+    } else if (value instanceof Float) {
+      field = key + "_f";
+    } else if (value instanceof Double) {
+      field = key + "_d";
+    } else {
+      field = key + "_s";
+    }
+    return field;
+  }
+
+  @Override
+  public void close() throws Exception {
+    solr.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/main/scripts/start_solr_topology.sh
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/main/scripts/start_solr_topology.sh b/metron-platform/metron-solr/src/main/scripts/start_solr_topology.sh
new file mode 100755
index 0000000..1c984a7
--- /dev/null
+++ b/metron-platform/metron-solr/src/main/scripts/start_solr_topology.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+# 
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#     http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+METRON_VERSION=0.1BETA
+METRON_HOME=/usr/metron/$METRON_VERSION
+TOPOLOGY_JAR=Metron-Solr-$METRON_VERSION.jar
+storm jar $METRON_HOME/lib/$TOPOLOGY_JAR org.apache.storm.flux.Flux --remote $METRON_HOME/config/enrichment/remote.yaml --filter $METRON_HOME/config/solr.properties

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/test/java/org/apache/metron/solr/integration/SolrEnrichmentIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/test/java/org/apache/metron/solr/integration/SolrEnrichmentIntegrationTest.java b/metron-platform/metron-solr/src/test/java/org/apache/metron/solr/integration/SolrEnrichmentIntegrationTest.java
new file mode 100644
index 0000000..4dfdad5
--- /dev/null
+++ b/metron-platform/metron-solr/src/test/java/org/apache/metron/solr/integration/SolrEnrichmentIntegrationTest.java
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.solr.integration;
+
+import com.google.common.base.Function;
+import org.apache.metron.common.configuration.Configurations;
+import org.apache.metron.integration.EnrichmentIntegrationTest;
+import org.apache.metron.integration.ComponentRunner;
+import org.apache.metron.integration.InMemoryComponent;
+import org.apache.metron.integration.Processor;
+import org.apache.metron.integration.ReadinessState;
+import org.apache.metron.solr.integration.components.SolrComponent;
+import org.apache.metron.integration.utils.SampleUtil;
+import org.apache.metron.common.cli.ConfigurationsUtils;
+import org.apache.metron.common.utils.JSONUtils;
+
+import javax.annotation.Nullable;
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+public class SolrEnrichmentIntegrationTest extends EnrichmentIntegrationTest {
+
+  private String collection = "metron";
+  private String solrZookeeperUrl;
+
+  @Override
+  public InMemoryComponent getSearchComponent(final Properties topologyProperties) throws Exception {
+    SolrComponent solrComponent = new SolrComponent.Builder()
+            .addCollection(collection, "../metron-solr/src/test/resources/solr/conf")
+            .withPostStartCallback(new Function<SolrComponent, Void>() {
+              @Nullable
+              @Override
+              public Void apply(@Nullable SolrComponent solrComponent) {
+                topologyProperties.setProperty("solr.zk", solrComponent.getZookeeperUrl());
+                try {
+                  String testZookeeperUrl = topologyProperties.getProperty("kafka.zk");
+                  Configurations configurations = SampleUtil.getSampleConfigs();
+                  Map<String, Object> globalConfig = configurations.getGlobalConfig();
+                  globalConfig.put("solr.zookeeper", solrComponent.getZookeeperUrl());
+                  ConfigurationsUtils.writeGlobalConfigToZookeeper(JSONUtils.INSTANCE.toJSON(globalConfig), testZookeeperUrl);
+                } catch (Exception e) {
+                  e.printStackTrace();
+                }
+                return null;
+              }
+            })
+            .build();
+    return solrComponent;
+  }
+
+  @Override
+  public Processor<List<Map<String, Object>>> getProcessor(final List<byte[]> inputMessages) {
+    return new Processor<List<Map<String, Object>>>() {
+      List<Map<String, Object>> docs = null;
+      public ReadinessState process(ComponentRunner runner) {
+        SolrComponent solrComponent = runner.getComponent("search", SolrComponent.class);
+        if (solrComponent.hasCollection(collection)) {
+          List<Map<String, Object>> docsFromDisk;
+          try {
+            docs = solrComponent.getAllIndexedDocs(collection);
+            docsFromDisk = EnrichmentIntegrationTest.readDocsFromDisk(hdfsDir);
+            System.out.println(docs.size() + " vs " + inputMessages.size() + " vs " + docsFromDisk.size());
+          } catch (IOException e) {
+            throw new IllegalStateException("Unable to retrieve indexed documents.", e);
+          }
+          if (docs.size() < inputMessages.size() || docs.size() != docsFromDisk.size()) {
+            return ReadinessState.NOT_READY;
+          } else {
+            return ReadinessState.READY;
+          }
+        } else {
+          return ReadinessState.NOT_READY;
+        }
+      }
+
+      public List<Map<String, Object>> getResult() {
+        return docs;
+      }
+    };
+  }
+
+  @Override
+  public void setAdditionalProperties(Properties topologyProperties) {
+    topologyProperties.setProperty("writer.class.name", "org.apache.metron.solr.writer.SolrWriter");
+  }
+
+  @Override
+  public String cleanField(String field) {
+    return field.replaceFirst("_[dfils]$", "");
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/test/java/org/apache/metron/solr/integration/components/SolrComponent.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/test/java/org/apache/metron/solr/integration/components/SolrComponent.java b/metron-platform/metron-solr/src/test/java/org/apache/metron/solr/integration/components/SolrComponent.java
new file mode 100644
index 0000000..3c852a4
--- /dev/null
+++ b/metron-platform/metron-solr/src/test/java/org/apache/metron/solr/integration/components/SolrComponent.java
@@ -0,0 +1,153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.solr.integration.components;
+
+import com.google.common.base.Function;
+import org.apache.metron.integration.InMemoryComponent;
+import org.apache.metron.integration.UnableToStartException;
+import org.apache.metron.solr.writer.MetronSolrClient;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.embedded.JettyConfig;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.cloud.MiniSolrCloudCluster;
+import org.apache.solr.common.SolrDocument;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class SolrComponent implements InMemoryComponent {
+
+  public static class Builder {
+    private int port = 8983;
+    private String solrXmlPath = "../metron-solr/src/test/resources/solr/solr.xml";
+    private Map<String, String> collections = new HashMap<>();
+    private Function<SolrComponent, Void> postStartCallback;
+
+    public Builder withPort(int port) {
+      this.port = port;
+      return this;
+    }
+
+    public Builder withSolrXmlPath(String solrXmlPath) {
+      this.solrXmlPath = solrXmlPath;
+      return this;
+    }
+
+    public Builder addCollection(String name, String configPath) {
+      collections.put(name, configPath);
+      return this;
+    }
+
+    public Builder withPostStartCallback(Function<SolrComponent, Void> f) {
+      postStartCallback = f;
+      return this;
+    }
+
+    public SolrComponent build() throws Exception {
+      if (collections.isEmpty()) throw new Exception("Must add at least 1 collection");
+      return new SolrComponent(port, solrXmlPath, collections, postStartCallback);
+    }
+  }
+
+  private int port;
+  private String solrXmlPath;
+  private Map<String, String> collections;
+  private MiniSolrCloudCluster miniSolrCloudCluster;
+  private Function<SolrComponent, Void> postStartCallback;
+
+  private SolrComponent(int port, String solrXmlPath, Map<String, String> collections, Function<SolrComponent, Void> postStartCallback) throws Exception {
+    this.port = port;
+    this.solrXmlPath = solrXmlPath;
+    this.collections = collections;
+    this.postStartCallback = postStartCallback;
+  }
+
+  @Override
+  public void start() throws UnableToStartException {
+    try {
+      File baseDir = Files.createTempDirectory("solrcomponent").toFile();
+      baseDir.deleteOnExit();
+      miniSolrCloudCluster = new MiniSolrCloudCluster(1, baseDir, new File(solrXmlPath), JettyConfig.builder().setPort(port).build());
+      for(String name: collections.keySet()) {
+        String configPath = collections.get(name);
+        miniSolrCloudCluster.uploadConfigDir(new File(configPath), name);
+      }
+      miniSolrCloudCluster.createCollection("metron", 1, 1, "metron", new HashMap<String, String>());
+      if (postStartCallback != null) postStartCallback.apply(this);
+    } catch(Exception e) {
+      throw new UnableToStartException(e.getMessage(), e);
+    }
+  }
+
+  @Override
+  public void stop() {
+    try {
+      miniSolrCloudCluster.shutdown();
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+  }
+
+  public MetronSolrClient getSolrClient() {
+    return new MetronSolrClient(getZookeeperUrl());
+  }
+
+  public MiniSolrCloudCluster getMiniSolrCloudCluster() {
+    return this.miniSolrCloudCluster;
+  }
+
+  public String getZookeeperUrl() {
+    return miniSolrCloudCluster.getZkServer().getZkAddress();
+  }
+
+  public boolean hasCollection(String collection) {
+    MetronSolrClient solr = getSolrClient();
+    boolean collectionFound = false;
+    try {
+      collectionFound = solr.listCollections().contains(collection);
+    } catch(Exception e) {
+      e.printStackTrace();
+    }
+    return collectionFound;
+  }
+
+  public List<Map<String, Object>> getAllIndexedDocs(String collection) {
+    List<Map<String, Object>> docs = new ArrayList<>();
+    CloudSolrClient solr = miniSolrCloudCluster.getSolrClient();
+    solr.setDefaultCollection(collection);
+    SolrQuery parameters = new SolrQuery();
+    parameters.set("q", "*:*");
+    try {
+      solr.commit();
+      QueryResponse response = solr.query(parameters);
+      for (SolrDocument solrDocument : response.getResults()) {
+        docs.add(solrDocument);
+      }
+    } catch (SolrServerException | IOException e) {
+      e.printStackTrace();
+    }
+    return docs;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/test/java/org/apache/metron/solr/writer/MetronSolrClientTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/test/java/org/apache/metron/solr/writer/MetronSolrClientTest.java b/metron-platform/metron-solr/src/test/java/org/apache/metron/solr/writer/MetronSolrClientTest.java
new file mode 100644
index 0000000..da27594
--- /dev/null
+++ b/metron-platform/metron-solr/src/test/java/org/apache/metron/solr/writer/MetronSolrClientTest.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.solr.writer;
+
+import org.apache.metron.solr.writer.MetronSolrClient;
+import org.apache.solr.client.solrj.request.QueryRequest;
+import org.apache.solr.common.params.CollectionParams;
+import org.apache.solr.common.util.NamedList;
+import org.hamcrest.Description;
+import org.junit.Test;
+import org.mockito.ArgumentMatcher;
+import org.mockito.Mockito;
+
+import java.util.ArrayList;
+
+import static org.mockito.Matchers.argThat;
+import static org.mockito.Matchers.isNull;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+public class MetronSolrClientTest {
+
+  class CollectionRequestMatcher extends ArgumentMatcher<QueryRequest> {
+
+    private String name;
+
+    public CollectionRequestMatcher(String name) {
+      this.name = name;
+    }
+
+    @Override
+    public boolean matches(Object o) {
+      QueryRequest queryRequest = (QueryRequest) o;
+      return name.equals(queryRequest.getParams().get("action"));
+    }
+
+    @Override
+    public void describeTo(Description description) {
+      description.appendText(name);
+    }
+  }
+
+  @Test
+  public void testClient() throws Exception {
+
+    final String collection = "metron";
+    String zookeeperUrl = "zookeeperUrl";
+    MetronSolrClient metronSolrClient = Mockito.spy(new MetronSolrClient(zookeeperUrl));
+
+    Mockito.doReturn(new NamedList<Object>() {{
+      add("collections", new ArrayList<String>() {{
+        add(collection);
+      }});
+    }}).when(metronSolrClient).request(argThat(new CollectionRequestMatcher(CollectionParams.CollectionAction.LIST.name())), (String) isNull());
+    metronSolrClient.createCollection(collection, 1, 1);
+    verify(metronSolrClient, times(1)).request(argThat(new CollectionRequestMatcher(CollectionParams.CollectionAction.LIST.name())), (String) isNull());
+    verify(metronSolrClient, times(0)).request(argThat(new CollectionRequestMatcher(CollectionParams.CollectionAction.CREATE.name())), (String) isNull());
+
+    metronSolrClient = Mockito.spy(new MetronSolrClient(zookeeperUrl));
+    Mockito.doReturn(new NamedList<Object>() {{
+      add("collections", new ArrayList<String>());
+    }}).when(metronSolrClient).request(argThat(new CollectionRequestMatcher(CollectionParams.CollectionAction.LIST.name())), (String) isNull());
+    Mockito.doReturn(new NamedList<>()).when(metronSolrClient).request(argThat(new CollectionRequestMatcher(CollectionParams.CollectionAction.CREATE.name())), (String) isNull());
+    metronSolrClient.createCollection(collection, 1, 1);
+    verify(metronSolrClient, times(1)).request(argThat(new CollectionRequestMatcher(CollectionParams.CollectionAction.LIST.name())), (String) isNull());
+    verify(metronSolrClient, times(1)).request(argThat(new CollectionRequestMatcher(CollectionParams.CollectionAction.CREATE.name())), (String) isNull());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/test/java/org/apache/metron/solr/writer/SolrWriterTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/test/java/org/apache/metron/solr/writer/SolrWriterTest.java b/metron-platform/metron-solr/src/test/java/org/apache/metron/solr/writer/SolrWriterTest.java
new file mode 100644
index 0000000..24c8eab
--- /dev/null
+++ b/metron-platform/metron-solr/src/test/java/org/apache/metron/solr/writer/SolrWriterTest.java
@@ -0,0 +1,139 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.solr.writer;
+
+import backtype.storm.tuple.Tuple;
+import org.apache.metron.common.configuration.Configurations;
+import org.apache.metron.integration.utils.SampleUtil;
+import org.apache.solr.client.solrj.request.QueryRequest;
+import org.apache.solr.common.SolrInputDocument;
+import org.hamcrest.Description;
+import org.json.simple.JSONObject;
+import org.junit.Test;
+import org.mockito.ArgumentMatcher;
+import org.mockito.Mockito;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import static org.mockito.Mockito.argThat;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+
+public class SolrWriterTest {
+
+  class CollectionRequestMatcher extends ArgumentMatcher<QueryRequest> {
+
+    private String name;
+
+    public CollectionRequestMatcher(String name) {
+      this.name = name;
+    }
+
+    @Override
+    public boolean matches(Object o) {
+      QueryRequest queryRequest = (QueryRequest) o;
+      return name.equals(queryRequest.getParams().get("action"));
+    }
+
+    @Override
+    public void describeTo(Description description) {
+      description.appendText(name);
+    }
+  }
+
+  class SolrInputDocumentMatcher extends ArgumentMatcher<SolrInputDocument> {
+
+    private int expectedId;
+    private String expectedSourceType;
+    private int expectedInt;
+    private double expectedDouble;
+
+    public SolrInputDocumentMatcher(int expectedId, String expectedSourceType, int expectedInt, double expectedDouble) {
+      this.expectedId = expectedId;
+      this.expectedSourceType = expectedSourceType;
+      this.expectedInt = expectedInt;
+      this.expectedDouble = expectedDouble;
+    }
+
+    @Override
+    public boolean matches(Object o) {
+      SolrInputDocument solrInputDocument = (SolrInputDocument) o;
+      int actualId = (Integer) solrInputDocument.get("id").getValue();
+      String actualName = (String) solrInputDocument.get("sensorType").getValue();
+      int actualInt = (Integer) solrInputDocument.get("intField_i").getValue();
+      double actualDouble = (Double) solrInputDocument.get("doubleField_d").getValue();
+      return expectedId == actualId && expectedSourceType.equals(actualName) && expectedInt == actualInt && expectedDouble == actualDouble;
+    }
+
+    @Override
+    public void describeTo(Description description) {
+      description.appendText(String.format("fields: [id=%d, doubleField_d=%f, name=%s, intField_i=%d]", expectedId, expectedDouble, expectedSourceType, expectedInt));
+    }
+
+  }
+
+  @Test
+  public void testWriter() throws Exception {
+    Configurations configurations = SampleUtil.getSampleConfigs();
+    JSONObject message1 = new JSONObject();
+    message1.put("intField", 100);
+    message1.put("doubleField", 100.0);
+    JSONObject message2 = new JSONObject();
+    message2.put("intField", 200);
+    message2.put("doubleField", 200.0);
+    List<JSONObject> messages = new ArrayList<>();
+    messages.add(message1);
+    messages.add(message2);
+
+    String collection = "metron";
+    MetronSolrClient solr = Mockito.mock(MetronSolrClient.class);
+    SolrWriter writer = new SolrWriter().withMetronSolrClient(solr);
+    writer.init(null, configurations);
+    verify(solr, times(1)).createCollection(collection, 1, 1);
+    verify(solr, times(1)).setDefaultCollection(collection);
+
+    collection = "metron2";
+    int numShards = 4;
+    int replicationFactor = 2;
+    Map<String, Object> globalConfig = configurations.getGlobalConfig();
+    globalConfig.put("solr.collection", collection);
+    globalConfig.put("solr.numShards", numShards);
+    globalConfig.put("solr.replicationFactor", replicationFactor);
+    configurations.updateGlobalConfig(globalConfig);
+    writer = new SolrWriter().withMetronSolrClient(solr);
+    writer.init(null, configurations);
+    verify(solr, times(1)).createCollection(collection, numShards, replicationFactor);
+    verify(solr, times(1)).setDefaultCollection(collection);
+
+    writer.write("test", configurations, new ArrayList<Tuple>(), messages);
+    verify(solr, times(1)).add(argThat(new SolrInputDocumentMatcher(message1.toJSONString().hashCode(), "test", 100, 100.0)));
+    verify(solr, times(1)).add(argThat(new SolrInputDocumentMatcher(message2.toJSONString().hashCode(), "test", 200, 200.0)));
+    verify(solr, times(0)).commit(collection);
+
+    writer = new SolrWriter().withMetronSolrClient(solr).withShouldCommit(true);
+    writer.init(null, configurations);
+    writer.write("test", configurations, new ArrayList<Tuple>(), messages);
+    verify(solr, times(2)).add(argThat(new SolrInputDocumentMatcher(message1.toJSONString().hashCode(), "test", 100, 100.0)));
+    verify(solr, times(2)).add(argThat(new SolrInputDocumentMatcher(message2.toJSONString().hashCode(), "test", 200, 200.0)));
+    verify(solr, times(1)).commit(collection);
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/test/resources/log4j.properties b/metron-platform/metron-solr/src/test/resources/log4j.properties
new file mode 100644
index 0000000..0d50388
--- /dev/null
+++ b/metron-platform/metron-solr/src/test/resources/log4j.properties
@@ -0,0 +1,24 @@
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+# Root logger option
+log4j.rootLogger=ERROR, stdout
+
+# Direct log messages to stdout
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/test/resources/log4j2.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/test/resources/log4j2.xml b/metron-platform/metron-solr/src/test/resources/log4j2.xml
new file mode 100755
index 0000000..68d5eac
--- /dev/null
+++ b/metron-platform/metron-solr/src/test/resources/log4j2.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<configuration monitorInterval="60">
+  <Appenders>
+    <Console name="Console" target="SYSTEM_OUT">
+     <PatternLayout pattern="%-4r [%t] %-5p %c{1.} - %msg%n"/>
+    </Console>
+  </Appenders>
+  <Loggers>
+    <Root level="error">
+      <AppenderRef ref="Console"/>
+    </Root>
+  </Loggers>
+</configuration>
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/test/resources/solr/conf/_rest_managed.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/test/resources/solr/conf/_rest_managed.json b/metron-platform/metron-solr/src/test/resources/solr/conf/_rest_managed.json
new file mode 100644
index 0000000..6a4aec3
--- /dev/null
+++ b/metron-platform/metron-solr/src/test/resources/solr/conf/_rest_managed.json
@@ -0,0 +1 @@
+{"initArgs":{},"managedList":[]}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/test/resources/solr/conf/currency.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/test/resources/solr/conf/currency.xml b/metron-platform/metron-solr/src/test/resources/solr/conf/currency.xml
new file mode 100644
index 0000000..3a9c58a
--- /dev/null
+++ b/metron-platform/metron-solr/src/test/resources/solr/conf/currency.xml
@@ -0,0 +1,67 @@
+<?xml version="1.0" ?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- Example exchange rates file for CurrencyField type named "currency" in example schema -->
+
+<currencyConfig version="1.0">
+  <rates>
+    <!-- Updated from http://www.exchangerate.com/ at 2011-09-27 -->
+    <rate from="USD" to="ARS" rate="4.333871" comment="ARGENTINA Peso" />
+    <rate from="USD" to="AUD" rate="1.025768" comment="AUSTRALIA Dollar" />
+    <rate from="USD" to="EUR" rate="0.743676" comment="European Euro" />
+    <rate from="USD" to="BRL" rate="1.881093" comment="BRAZIL Real" />
+    <rate from="USD" to="CAD" rate="1.030815" comment="CANADA Dollar" />
+    <rate from="USD" to="CLP" rate="519.0996" comment="CHILE Peso" />
+    <rate from="USD" to="CNY" rate="6.387310" comment="CHINA Yuan" />
+    <rate from="USD" to="CZK" rate="18.47134" comment="CZECH REP. Koruna" />
+    <rate from="USD" to="DKK" rate="5.515436" comment="DENMARK Krone" />
+    <rate from="USD" to="HKD" rate="7.801922" comment="HONG KONG Dollar" />
+    <rate from="USD" to="HUF" rate="215.6169" comment="HUNGARY Forint" />
+    <rate from="USD" to="ISK" rate="118.1280" comment="ICELAND Krona" />
+    <rate from="USD" to="INR" rate="49.49088" comment="INDIA Rupee" />
+    <rate from="USD" to="XDR" rate="0.641358" comment="INTNL MON. FUND SDR" />
+    <rate from="USD" to="ILS" rate="3.709739" comment="ISRAEL Sheqel" />
+    <rate from="USD" to="JPY" rate="76.32419" comment="JAPAN Yen" />
+    <rate from="USD" to="KRW" rate="1169.173" comment="KOREA (SOUTH) Won" />
+    <rate from="USD" to="KWD" rate="0.275142" comment="KUWAIT Dinar" />
+    <rate from="USD" to="MXN" rate="13.85895" comment="MEXICO Peso" />
+    <rate from="USD" to="NZD" rate="1.285159" comment="NEW ZEALAND Dollar" />
+    <rate from="USD" to="NOK" rate="5.859035" comment="NORWAY Krone" />
+    <rate from="USD" to="PKR" rate="87.57007" comment="PAKISTAN Rupee" />
+    <rate from="USD" to="PEN" rate="2.730683" comment="PERU Sol" />
+    <rate from="USD" to="PHP" rate="43.62039" comment="PHILIPPINES Peso" />
+    <rate from="USD" to="PLN" rate="3.310139" comment="POLAND Zloty" />
+    <rate from="USD" to="RON" rate="3.100932" comment="ROMANIA Leu" />
+    <rate from="USD" to="RUB" rate="32.14663" comment="RUSSIA Ruble" />
+    <rate from="USD" to="SAR" rate="3.750465" comment="SAUDI ARABIA Riyal" />
+    <rate from="USD" to="SGD" rate="1.299352" comment="SINGAPORE Dollar" />
+    <rate from="USD" to="ZAR" rate="8.329761" comment="SOUTH AFRICA Rand" />
+    <rate from="USD" to="SEK" rate="6.883442" comment="SWEDEN Krona" />
+    <rate from="USD" to="CHF" rate="0.906035" comment="SWITZERLAND Franc" />
+    <rate from="USD" to="TWD" rate="30.40283" comment="TAIWAN Dollar" />
+    <rate from="USD" to="THB" rate="30.89487" comment="THAILAND Baht" />
+    <rate from="USD" to="AED" rate="3.672955" comment="U.A.E. Dirham" />
+    <rate from="USD" to="UAH" rate="7.988582" comment="UKRAINE Hryvnia" />
+    <rate from="USD" to="GBP" rate="0.647910" comment="UNITED KINGDOM Pound" />
+    
+    <!-- Cross-rates for some common currencies -->
+    <rate from="EUR" to="GBP" rate="0.869914" />  
+    <rate from="EUR" to="NOK" rate="7.800095" />  
+    <rate from="GBP" to="NOK" rate="8.966508" />  
+  </rates>
+</currencyConfig>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/test/resources/solr/conf/lang/stopwords_en.txt
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/test/resources/solr/conf/lang/stopwords_en.txt b/metron-platform/metron-solr/src/test/resources/solr/conf/lang/stopwords_en.txt
new file mode 100644
index 0000000..2c164c0
--- /dev/null
+++ b/metron-platform/metron-solr/src/test/resources/solr/conf/lang/stopwords_en.txt
@@ -0,0 +1,54 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# a couple of test stopwords to test that the words are really being
+# configured from this file:
+stopworda
+stopwordb
+
+# Standard english stop words taken from Lucene's StopAnalyzer
+a
+an
+and
+are
+as
+at
+be
+but
+by
+for
+if
+in
+into
+is
+it
+no
+not
+of
+on
+or
+such
+that
+the
+their
+then
+there
+these
+they
+this
+to
+was
+will
+with

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-solr/src/test/resources/solr/conf/protwords.txt
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/src/test/resources/solr/conf/protwords.txt b/metron-platform/metron-solr/src/test/resources/solr/conf/protwords.txt
new file mode 100644
index 0000000..1dfc0ab
--- /dev/null
+++ b/metron-platform/metron-solr/src/test/resources/solr/conf/protwords.txt
@@ -0,0 +1,21 @@
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#-----------------------------------------------------------------------
+# Use a protected word file to protect against the stemmer reducing two
+# unrelated words to the same base word.
+
+# Some non-words that normally won't be encountered,
+# just to test that they won't be stemmed.
+dontstems
+zwhacky
+


[24/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/main/scripts/latency_summarizer.sh
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/scripts/latency_summarizer.sh b/metron-platform/metron-enrichment/src/main/scripts/latency_summarizer.sh
new file mode 100755
index 0000000..d424ba1
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/main/scripts/latency_summarizer.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+# 
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#     http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+BIGTOP_DEFAULTS_DIR=${BIGTOP_DEFAULTS_DIR-/etc/default}
+[ -n "${BIGTOP_DEFAULTS_DIR}" -a -r ${BIGTOP_DEFAULTS_DIR}/hbase ] && . ${BIGTOP_DEFAULTS_DIR}/hbase
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+  . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+  . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
+export METRON_VERSION=0.1BETA
+export METRON_HOME=/usr/metron/$METRON_VERSION
+export TOPOLOGIES_JAR=Metron-Topologies-$METRON_VERSION.jar
+java -cp $METRON_HOME/lib/$TOPOLOGIES_JAR org.apache.metron.enrichment.cli.LatencySummarizer "$@"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/geo/GeoAdapterTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/geo/GeoAdapterTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/geo/GeoAdapterTest.java
new file mode 100644
index 0000000..ec90c49
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/geo/GeoAdapterTest.java
@@ -0,0 +1,94 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.adapters.geo;
+
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.metron.enrichment.bolt.CacheKey;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+import java.sql.ResultSet;
+import java.sql.Statement;
+
+import static org.mockito.Mockito.when;
+
+public class GeoAdapterTest {
+
+
+  private String ip = "72.163.4.161";
+
+
+  /**
+   * {
+   * "locID":"1",
+   * "country":"test country",
+   * "city":"test city",
+   * "postalCode":"test zip",
+   * "latitude":"test latitude",
+   * "longitude":"test longitude",
+   * "dmaCode":"test dma",
+   * "location_point":"test longitude,test latitude"
+   * }
+   */
+  @Multiline
+  private String expectedMessageString;
+
+  private JSONObject expectedMessage;
+
+  @Mock
+  Statement statetment;
+  @Mock
+  ResultSet resultSet, resultSet1;
+
+
+  @Before
+  public void setup() throws Exception {
+    JSONParser jsonParser = new JSONParser();
+    expectedMessage = (JSONObject) jsonParser.parse(expectedMessageString);
+    MockitoAnnotations.initMocks(this);
+    when(statetment.executeQuery("select IPTOLOCID(\"CacheKey{field='dummy', value='72.163.4.161'}\") as ANS")).thenReturn(resultSet);
+    when(statetment.executeQuery("select * from location where locID = 1")).thenReturn(resultSet1);
+    when(resultSet.next()).thenReturn(Boolean.TRUE, Boolean.FALSE);
+    when(resultSet.getString("ANS")).thenReturn("1");
+    when(resultSet1.next()).thenReturn(Boolean.TRUE, Boolean.FALSE);
+    when(resultSet1.getString("locID")).thenReturn("1");
+    when(resultSet1.getString("country")).thenReturn("test country");
+    when(resultSet1.getString("city")).thenReturn("test city");
+    when(resultSet1.getString("postalCode")).thenReturn("test zip");
+    when(resultSet1.getString("latitude")).thenReturn("test latitude");
+    when(resultSet1.getString("longitude")).thenReturn("test longitude");
+    when(resultSet1.getString("dmaCode")).thenReturn("test dma");
+  }
+
+
+  @Test
+  public void testEnrich() throws Exception {
+    GeoAdapter geo = new GeoAdapter();
+    geo.setStatement(statetment);
+    JSONObject actualMessage = geo.enrich(new CacheKey("dummy", ip, null));
+    Assert.assertNotNull(actualMessage.get("locID"));
+    Assert.assertEquals(expectedMessage, actualMessage);
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/host/HostFromJSONListAdapterTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/host/HostFromJSONListAdapterTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/host/HostFromJSONListAdapterTest.java
new file mode 100644
index 0000000..c448f1e
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/host/HostFromJSONListAdapterTest.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.adapters.host;
+
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.metron.enrichment.bolt.CacheKey;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+public class HostFromJSONListAdapterTest {
+
+
+  /**
+   * [
+   * {"ip":"10.1.128.236", "local":"YES", "type":"webserver", "asset_value" : "important"},
+   * {"ip":"10.1.128.237", "local":"UNKNOWN", "type":"unknown", "asset_value" : "important"},
+   * {"ip":"10.60.10.254", "local":"YES", "type":"printer", "asset_value" : "important"},
+   * {"ip":"10.0.2.15", "local":"YES", "type":"printer", "asset_value" : "important"}
+   * ]
+   */
+  @Multiline
+  private String expectedKnownHostsString;
+
+  /**
+   * {
+   * "known_info.local":"YES",
+   * "known_info.type":"printer",
+   * "known_info.asset_value" : "important"
+   * }
+   */
+  @Multiline
+  private String expectedMessageString;
+
+  private JSONObject expectedMessage;
+  private String ip = "10.0.2.15";
+  private String ip1 = "10.0.22.22";
+
+
+  @Before
+  public void parseJSON() throws ParseException {
+    JSONParser jsonParser = new JSONParser();
+    expectedMessage = (JSONObject) jsonParser.parse(expectedMessageString);
+  }
+
+  @Test
+  public void testEnrich() throws Exception {
+    HostFromJSONListAdapter hja = new HostFromJSONListAdapter(expectedKnownHostsString);
+    JSONObject actualMessage = hja.enrich(new CacheKey("dummy", ip, null));
+    Assert.assertNotNull(actualMessage);
+    Assert.assertEquals(expectedMessage, actualMessage);
+    actualMessage = hja.enrich(new CacheKey("dummy", ip1, null));
+    JSONObject emptyJson = new JSONObject();
+    Assert.assertEquals(emptyJson, actualMessage);
+  }
+
+
+  @Test
+  public void testInitializeAdapter() throws Exception {
+    HostFromJSONListAdapter hja = new HostFromJSONListAdapter(expectedKnownHostsString);
+    Assert.assertTrue(hja.initializeAdapter());
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/host/HostFromPropertiesFileAdapterTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/host/HostFromPropertiesFileAdapterTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/host/HostFromPropertiesFileAdapterTest.java
new file mode 100644
index 0000000..ea5cabe
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/host/HostFromPropertiesFileAdapterTest.java
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.adapters.host;
+
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.metron.enrichment.bolt.CacheKey;
+import org.json.simple.JSONArray;
+import org.json.simple.JSONObject;
+import org.json.simple.JSONValue;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+
+
+public class HostFromPropertiesFileAdapterTest {
+
+    /**
+     * [
+     * {"ip":"10.1.128.236", "local":"YES", "type":"webserver", "asset_value" : "important"},
+     * {"ip":"10.1.128.237", "local":"UNKNOWN", "type":"unknown", "asset_value" : "important"},
+     * {"ip":"10.60.10.254", "local":"YES", "type":"printer", "asset_value" : "important"},
+     * {"ip":"10.0.2.15", "local":"YES", "type":"printer", "asset_value" : "important"}
+     * ]
+     */
+    @Multiline
+    private String expectedKnownHostsString;
+
+    /**
+     * {
+     * "known_info":
+     * {"asset_value":"important",
+     * "type":"printer","local":"YES"
+     * }
+     * }
+     */
+    @Multiline
+    private String expectedMessageString;
+
+    private JSONObject expectedMessage;
+    private String ip = "10.0.2.15";
+    private String ip1 = "10.0.22.22";
+
+    @Before
+    public void parseJSON() throws ParseException {
+        JSONParser jsonParser = new JSONParser();
+        expectedMessage = (JSONObject) jsonParser.parse(expectedMessageString);
+    }
+
+    @Test
+    public void testEnrich() throws Exception {
+        Map<String, JSONObject> mapKnownHosts = new HashMap<>();
+        JSONArray jsonArray = (JSONArray) JSONValue.parse(expectedKnownHostsString);
+        Iterator jsonArrayIterator = jsonArray.iterator();
+        while(jsonArrayIterator.hasNext()) {
+            JSONObject jsonObject = (JSONObject) jsonArrayIterator.next();
+            String host = (String) jsonObject.remove("ip");
+            mapKnownHosts.put(host, jsonObject);
+        }
+        HostFromPropertiesFileAdapter hfa = new HostFromPropertiesFileAdapter(mapKnownHosts);
+        JSONObject actualMessage = hfa.enrich(new CacheKey("dummy", ip, null));
+        Assert.assertNotNull(actualMessage);
+        Assert.assertEquals(expectedMessage, actualMessage);
+        actualMessage = hfa.enrich(new CacheKey("dummy", ip1, null));
+        JSONObject emptyJson = new JSONObject();
+        Assert.assertEquals(emptyJson, actualMessage);
+    }
+
+
+    @Test
+    public void testInitializeAdapter() throws Exception {
+        Map<String, JSONObject> mapKnownHosts = new HashMap<>();
+        HostFromPropertiesFileAdapter hfa = new HostFromPropertiesFileAdapter(mapKnownHosts);
+        Assert.assertFalse(hfa.initializeAdapter());
+        JSONArray jsonArray = (JSONArray) JSONValue.parse(expectedKnownHostsString);
+        Iterator jsonArrayIterator = jsonArray.iterator();
+        while(jsonArrayIterator.hasNext()) {
+            JSONObject jsonObject = (JSONObject) jsonArrayIterator.next();
+            String host = (String) jsonObject.remove("ip");
+            mapKnownHosts.put(host, jsonObject);
+        }
+        hfa = new HostFromPropertiesFileAdapter(mapKnownHosts);
+        Assert.assertTrue(hfa.initializeAdapter());
+    }
+
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/jdbc/MySqlConfigTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/jdbc/MySqlConfigTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/jdbc/MySqlConfigTest.java
new file mode 100644
index 0000000..2ae2ff0
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/jdbc/MySqlConfigTest.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.adapters.jdbc;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+public class MySqlConfigTest {
+
+  private String sampleURL = "jdbc:mysql://10.22.0.214:3306/GEO?user=root&password=hadoop123";
+  private MySqlConfig conn;
+
+  @Before
+  public void setupJdbc() {
+    conn = new MySqlConfig();
+    conn.setHost("10.22.0.214");
+    conn.setPort(3306);
+    conn.setTable("GEO");
+    conn.setUsername("root");
+    conn.setPassword("hadoop123");
+  }
+
+  @Test
+  public void testGetJdbcUrl() throws Exception {
+    Assert.assertEquals(sampleURL, conn.getJdbcUrl());
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/simplehbase/SimpleHBaseAdapterTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/simplehbase/SimpleHBaseAdapterTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/simplehbase/SimpleHBaseAdapterTest.java
new file mode 100644
index 0000000..1c79f12
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/simplehbase/SimpleHBaseAdapterTest.java
@@ -0,0 +1,117 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.adapters.simplehbase;
+
+
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.metron.common.configuration.SensorEnrichmentConfig;
+import org.apache.metron.enrichment.bolt.CacheKey;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.enrichment.lookup.EnrichmentLookup;
+import org.apache.metron.enrichment.converter.EnrichmentHelper;
+import org.apache.metron.test.mock.MockHTable;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.apache.metron.enrichment.lookup.accesstracker.BloomAccessTracker;
+import org.apache.metron.enrichment.lookup.accesstracker.PersistentAccessTracker;
+import org.apache.metron.common.utils.JSONUtils;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+
+public class SimpleHBaseAdapterTest {
+
+  private String cf = "cf";
+  private String atTableName = "tracker";
+  private final String hbaseTableName = "enrichments";
+  private EnrichmentLookup lookup;
+  private static final String PLAYFUL_CLASSIFICATION_TYPE = "playful_classification";
+  private static final Map<String, String> PLAYFUL_ENRICHMENT = new HashMap<String, String>() {{
+    put("orientation", "north");
+  }};
+
+  /**
+   * {
+   * "10.0.2.3.orientation":"north"
+   * }
+   */
+  @Multiline
+  private String expectedMessageString;
+
+  /**
+   * {
+   * "index": "bro",
+   * "batchSize": 5,
+   * "enrichmentFieldMap": {
+   * "geo": ["ip_dst_addr", "ip_src_addr"],
+   * "host": ["host"]
+   * },
+   * "fieldToEnrichmentTypeMap": {
+   * "ip_dst_addr" : [ "10.0.2.3" ],
+   * "ip_src_addr" : [ "10.3.30.120" ]
+   * }
+   * }
+   */
+  @Multiline
+  private String sourceConfigStr;
+
+  private JSONObject expectedMessage;
+
+  @Before
+  public void setup() throws Exception {
+    final MockHTable trackerTable = (MockHTable) MockHTable.Provider.addToCache(atTableName, cf);
+    final MockHTable hbaseTable = (MockHTable) MockHTable.Provider.addToCache(hbaseTableName, cf);
+    EnrichmentHelper.INSTANCE.load(hbaseTable, cf, new ArrayList<LookupKV<EnrichmentKey, EnrichmentValue>>() {{
+      add(new LookupKV<>(new EnrichmentKey("10.0.2.3", "10.0.2.3")
+                      , new EnrichmentValue(PLAYFUL_ENRICHMENT)
+              )
+      );
+    }});
+    BloomAccessTracker bat = new BloomAccessTracker(hbaseTableName, 100, 0.03);
+    PersistentAccessTracker pat = new PersistentAccessTracker(hbaseTableName, "0", trackerTable, cf, bat, 0L);
+    lookup = new EnrichmentLookup(hbaseTable, cf, pat);
+    JSONParser jsonParser = new JSONParser();
+    expectedMessage = (JSONObject) jsonParser.parse(expectedMessageString);
+  }
+
+  @Test
+  public void testEnrich() throws Exception {
+    SimpleHBaseAdapter sha = new SimpleHBaseAdapter();
+    sha.lookup = lookup;
+    SensorEnrichmentConfig broSc = JSONUtils.INSTANCE.load(sourceConfigStr, SensorEnrichmentConfig.class);
+    JSONObject actualMessage = sha.enrich(new CacheKey("test", "test", broSc));
+    Assert.assertEquals(actualMessage, new JSONObject());
+    actualMessage = sha.enrich(new CacheKey("ip_dst_addr", "10.0.2.3", broSc));
+    Assert.assertNotNull(actualMessage);
+    Assert.assertEquals(expectedMessage, actualMessage);
+  }
+
+  @Test(expected = Exception.class)
+  public void testInitializeAdapter() {
+    SimpleHBaseConfig config = new SimpleHBaseConfig();
+    SimpleHBaseAdapter sha = new SimpleHBaseAdapter(config);
+    sha.initializeAdapter();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/simplehbase/SimpleHBaseConfigTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/simplehbase/SimpleHBaseConfigTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/simplehbase/SimpleHBaseConfigTest.java
new file mode 100644
index 0000000..832a939
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/simplehbase/SimpleHBaseConfigTest.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.adapters.simplehbase;
+
+import org.apache.metron.hbase.HTableProvider;
+import org.apache.metron.hbase.TableProvider;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class SimpleHBaseConfigTest {
+
+
+    private String cf ="cf";
+    private String table = "threatintel";
+    private TableProvider provider;
+
+    @Test
+    public void test(){
+        SimpleHBaseConfig shc = new SimpleHBaseConfig();
+        shc.withHBaseCF(cf);
+        shc.withHBaseTable(table);
+        provider = new HTableProvider();
+        Assert.assertEquals(cf, shc.getHBaseCF());
+        Assert.assertEquals(table, shc.getHBaseTable());
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/threatintel/ThreatIntelAdapterTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/threatintel/ThreatIntelAdapterTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/threatintel/ThreatIntelAdapterTest.java
new file mode 100644
index 0000000..62c8b43
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/threatintel/ThreatIntelAdapterTest.java
@@ -0,0 +1,148 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.adapters.threatintel;
+
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.metron.common.configuration.SensorEnrichmentConfig;
+import org.apache.metron.enrichment.bolt.CacheKey;
+import org.apache.metron.hbase.TableProvider;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.enrichment.lookup.EnrichmentLookup;
+import org.apache.metron.enrichment.converter.EnrichmentHelper;
+import org.apache.metron.test.mock.MockHTable;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.apache.metron.enrichment.lookup.accesstracker.BloomAccessTracker;
+import org.apache.metron.enrichment.lookup.accesstracker.PersistentAccessTracker;
+import org.apache.metron.common.utils.JSONUtils;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+
+
+public class ThreatIntelAdapterTest {
+
+  public static class ExceptionProvider implements TableProvider {
+
+    public ExceptionProvider() {};
+
+    @Override
+    public HTableInterface getTable(Configuration config, String tableName) throws IOException {
+      throw new IOException();
+    }
+  }
+
+  private String cf = "cf";
+  private String atTableName = "tracker";
+  private static final String MALICIOUS_IP_TYPE = "malicious_ip";
+  private final String threatIntelTableName = "threat_intel";
+  private EnrichmentLookup lookup;
+
+  /**
+   * {
+   * "10.0.2.3":"alert"
+   * }
+   */
+  @Multiline
+  private String expectedMessageString;
+
+  /**
+   * {
+   * "index": "bro",
+   * "batchSize": 5,
+   * "enrichmentFieldMap": {
+   * "geo": ["ip_dst_addr", "ip_src_addr"],
+   * "host": ["host"]
+   * },
+   * "threatIntelFieldMap": {
+   * "hbaseThreatIntel": ["ip_dst_addr", "ip_src_addr"]
+   * },
+   * "fieldToThreatIntelTypeMap": {
+   * "ip_dst_addr" : [ "10.0.2.3" ],
+   * "ip_src_addr" : [ "malicious_ip" ]
+   * }
+   * }
+   */
+  @Multiline
+  private static String sourceConfigStr;
+
+  private JSONObject expectedMessage;
+
+  @Before
+  public void setup() throws Exception {
+
+    final MockHTable trackerTable = (MockHTable) MockHTable.Provider.addToCache(atTableName, cf);
+    final MockHTable threatIntelTable = (MockHTable) MockHTable.Provider.addToCache(threatIntelTableName, cf);
+    EnrichmentHelper.INSTANCE.load(threatIntelTable, cf, new ArrayList<LookupKV<EnrichmentKey, EnrichmentValue>>() {{
+      add(new LookupKV<>(new EnrichmentKey("10.0.2.3", "10.0.2.3"), new EnrichmentValue(new HashMap<String, String>())));
+    }});
+
+    BloomAccessTracker bat = new BloomAccessTracker(threatIntelTableName, 100, 0.03);
+    PersistentAccessTracker pat = new PersistentAccessTracker(threatIntelTableName, "0", trackerTable, cf, bat, 0L);
+    lookup = new EnrichmentLookup(threatIntelTable, cf, pat);
+    JSONParser jsonParser = new JSONParser();
+    expectedMessage = (JSONObject) jsonParser.parse(expectedMessageString);
+  }
+
+
+  @Test
+  public void testEnrich() throws Exception {
+    ThreatIntelAdapter tia = new ThreatIntelAdapter();
+    tia.lookup = lookup;
+    SensorEnrichmentConfig broSc = JSONUtils.INSTANCE.load(sourceConfigStr, SensorEnrichmentConfig.class);
+    JSONObject actualMessage = tia.enrich(new CacheKey("ip_dst_addr", "10.0.2.3", broSc));
+    Assert.assertNotNull(actualMessage);
+    Assert.assertEquals(expectedMessage, actualMessage);
+  }
+
+  @Test(expected = IllegalStateException.class)
+  public void testInitializeAdapter() {
+
+    String cf = "cf";
+    String table = "threatintel";
+    String trackCf = "cf";
+    String trackTable = "Track";
+    double falsePositive = 0.03;
+    int expectedInsertion = 1;
+    long millionseconds = (long) 0.1;
+
+    ThreatIntelConfig config = new ThreatIntelConfig();
+    config.withHBaseCF(cf);
+    config.withHBaseTable(table);
+    config.withExpectedInsertions(expectedInsertion);
+    config.withFalsePositiveRate(falsePositive);
+    config.withMillisecondsBetweenPersists(millionseconds);
+    config.withTrackerHBaseCF(trackCf);
+    config.withTrackerHBaseTable(trackTable);
+    config.withProviderImpl(ExceptionProvider.class.getName());
+
+    ThreatIntelAdapter tia = new ThreatIntelAdapter(config);
+    tia.initializeAdapter();
+
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/threatintel/ThreatIntelConfigTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/threatintel/ThreatIntelConfigTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/threatintel/ThreatIntelConfigTest.java
new file mode 100644
index 0000000..58fd803
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/threatintel/ThreatIntelConfigTest.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.adapters.threatintel;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+public class ThreatIntelConfigTest {
+
+  private String cf = "cf";
+  private String table = "threatintel";
+  private String trackCf = "cf";
+  private String trackTable = "Track";
+  private double falsePositive = 0.03;
+  private int expectedInsertion = 1;
+  private long millionseconds = (long) 0.1;
+
+  @Test
+  public void test() {
+    ThreatIntelConfig tic = new ThreatIntelConfig();
+    tic.withHBaseCF(cf);
+    tic.withHBaseTable(table);
+    tic.withExpectedInsertions(expectedInsertion);
+    tic.withFalsePositiveRate(falsePositive);
+    tic.withMillisecondsBetweenPersists(millionseconds);
+    tic.withTrackerHBaseCF(trackCf);
+    tic.withTrackerHBaseTable(trackTable);
+
+    Assert.assertEquals(cf, tic.getHBaseCF());
+    Assert.assertEquals(table, tic.getHBaseTable());
+    Assert.assertEquals(trackCf, tic.getTrackerHBaseCF());
+    Assert.assertEquals(trackTable, tic.getTrackerHBaseTable());
+    Assert.assertEquals(expectedInsertion, tic.getExpectedInsertions());
+    Assert.assertEquals(millionseconds, tic.getMillisecondsBetweenPersists());
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/BulkMessageWriterBoltTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/BulkMessageWriterBoltTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/BulkMessageWriterBoltTest.java
new file mode 100644
index 0000000..bcf4d04
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/BulkMessageWriterBoltTest.java
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.bolt;
+
+import backtype.storm.tuple.Tuple;
+import backtype.storm.tuple.Values;
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.metron.common.Constants;
+import org.apache.metron.test.bolt.BaseEnrichmentBoltTest;
+import org.apache.metron.common.configuration.Configurations;
+import org.apache.metron.common.interfaces.BulkMessageWriter;
+import org.hamcrest.Description;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.ArgumentMatcher;
+import org.mockito.Matchers;
+import org.mockito.Mock;
+
+import java.io.FileInputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.fail;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.argThat;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+public class BulkMessageWriterBoltTest extends BaseEnrichmentBoltTest {
+
+  protected class MessageListMatcher extends ArgumentMatcher<List<JSONObject>> {
+
+    private List<JSONObject> expectedMessageList;
+
+    public MessageListMatcher(List<JSONObject> expectedMessageList) {
+      this.expectedMessageList = expectedMessageList;
+    }
+
+    @Override
+    public boolean matches(Object o) {
+      List<JSONObject> actualMessageList = (List<JSONObject>) o;
+      for(JSONObject message: actualMessageList) removeTimingFields(message);
+      return expectedMessageList.equals(actualMessageList);
+    }
+
+    @Override
+    public void describeTo(Description description) {
+      description.appendText(String.format("[%s]", expectedMessageList));
+    }
+
+  }
+
+  /**
+   * {
+   * "field": "value",
+   * "source.type": "yaf"
+   * }
+   */
+  @Multiline
+  private String sampleMessageString;
+
+  private JSONObject sampleMessage;
+  private List<JSONObject> messageList;
+  private List<Tuple> tupleList;
+
+  @Before
+  public void parseMessages() throws ParseException {
+    JSONParser parser = new JSONParser();
+    sampleMessage = (JSONObject) parser.parse(sampleMessageString);
+    sampleMessage.put("field", "value1");
+    messageList = new ArrayList<>();
+    messageList.add(((JSONObject) sampleMessage.clone()));
+    sampleMessage.put("field", "value2");
+    messageList.add(((JSONObject) sampleMessage.clone()));
+    sampleMessage.put("field", "value3");
+    messageList.add(((JSONObject) sampleMessage.clone()));
+    sampleMessage.put("field", "value4");
+    messageList.add(((JSONObject) sampleMessage.clone()));
+    sampleMessage.put("field", "value5");
+    messageList.add(((JSONObject) sampleMessage.clone()));
+  }
+
+  @Mock
+  private BulkMessageWriter<JSONObject> bulkMessageWriter;
+
+  @Test
+  public void test() throws Exception {
+    BulkMessageWriterBolt bulkMessageWriterBolt = new BulkMessageWriterBolt("zookeeperUrl").withBulkMessageWriter(bulkMessageWriter);
+    bulkMessageWriterBolt.setCuratorFramework(client);
+    bulkMessageWriterBolt.setTreeCache(cache);
+    bulkMessageWriterBolt.getConfigurations().updateSensorEnrichmentConfig(sensorType, new FileInputStream(sampleSensorEnrichmentConfigPath));
+    bulkMessageWriterBolt.declareOutputFields(declarer);
+    verify(declarer, times(1)).declareStream(eq("error"), argThat(new FieldsMatcher("message")));
+    Map stormConf = new HashMap();
+    doThrow(new Exception()).when(bulkMessageWriter).init(eq(stormConf), any(Configurations.class));
+    try {
+      bulkMessageWriterBolt.prepare(stormConf, topologyContext, outputCollector);
+      fail("A runtime exception should be thrown when bulkMessageWriter.init throws an exception");
+    } catch(RuntimeException e) {}
+    reset(bulkMessageWriter);
+    bulkMessageWriterBolt.prepare(stormConf, topologyContext, outputCollector);
+    verify(bulkMessageWriter, times(1)).init(eq(stormConf), any(Configurations.class));
+    tupleList = new ArrayList<>();
+    for(int i = 0; i < 4; i++) {
+      when(tuple.getValueByField("message")).thenReturn(messageList.get(i));
+      tupleList.add(tuple);
+      bulkMessageWriterBolt.execute(tuple);
+      verify(bulkMessageWriter, times(0)).write(eq(sensorType), any(Configurations.class), eq(tupleList), eq(messageList));
+    }
+    when(tuple.getValueByField("message")).thenReturn(messageList.get(4));
+    tupleList.add(tuple);
+    bulkMessageWriterBolt.execute(tuple);
+    verify(bulkMessageWriter, times(1)).write(eq(sensorType), any(Configurations.class), eq(tupleList), argThat(new MessageListMatcher(messageList)));
+    verify(outputCollector, times(5)).ack(tuple);
+    reset(outputCollector);
+    doThrow(new Exception()).when(bulkMessageWriter).write(eq(sensorType), any(Configurations.class), Matchers.anyListOf(Tuple.class), Matchers.anyListOf(JSONObject.class));
+    when(tuple.getValueByField("message")).thenReturn(messageList.get(0));
+    for(int i = 0; i < 5; i++) {
+      bulkMessageWriterBolt.execute(tuple);
+    }
+    verify(outputCollector, times(0)).ack(tuple);
+    verify(outputCollector, times(5)).fail(tuple);
+    verify(outputCollector, times(1)).emit(eq(Constants.ERROR_STREAM), any(Values.class));
+    verify(outputCollector, times(1)).reportError(any(Throwable.class));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/EnrichmentJoinBoltTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/EnrichmentJoinBoltTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/EnrichmentJoinBoltTest.java
new file mode 100644
index 0000000..f760e5a
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/EnrichmentJoinBoltTest.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.bolt;
+
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.metron.test.bolt.BaseEnrichmentBoltTest;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
+public class EnrichmentJoinBoltTest extends BaseEnrichmentBoltTest {
+
+  /**
+   * {
+   * "enrichedField": "enrichedValue",
+   * "emptyEnrichedField": ""
+   * }
+   */
+  @Multiline
+  private String enrichedMessageString;
+
+  /**
+   * {
+   * "ip_src_addr": "ip1",
+   * "ip_dst_addr": "ip2",
+   * "source.type": "yaf",
+   * "enrichedField": "enrichedValue"
+   * }
+   */
+  @Multiline
+  private String expectedJoinedMessageString;
+
+  private JSONObject enrichedMessage;
+  private JSONObject expectedJoinedMessage;
+
+  @Before
+  public void parseMessages() throws ParseException {
+    JSONParser parser = new JSONParser();
+    enrichedMessage = (JSONObject) parser.parse(enrichedMessageString);
+    expectedJoinedMessage = (JSONObject) parser.parse(expectedJoinedMessageString);
+  }
+
+  @Test
+  public void test() throws IOException {
+    EnrichmentJoinBolt enrichmentJoinBolt = new EnrichmentJoinBolt("zookeeperUrl");
+    enrichmentJoinBolt.setCuratorFramework(client);
+    enrichmentJoinBolt.setTreeCache(cache);
+    enrichmentJoinBolt.getConfigurations().updateSensorEnrichmentConfig(sensorType, new FileInputStream(sampleSensorEnrichmentConfigPath));
+    enrichmentJoinBolt.withMaxCacheSize(100);
+    enrichmentJoinBolt.withMaxTimeRetain(10000);
+    enrichmentJoinBolt.prepare(new HashMap<>(), topologyContext, outputCollector);
+    Set<String> actualStreamIds = enrichmentJoinBolt.getStreamIds(sampleMessage);
+    streamIds.add("message");
+    Assert.assertEquals(streamIds, actualStreamIds);
+    Map<String, JSONObject> streamMessageMap = new HashMap<>();
+    streamMessageMap.put("message", sampleMessage);
+    streamMessageMap.put("enriched", enrichedMessage);
+    JSONObject joinedMessage = enrichmentJoinBolt.joinMessages(streamMessageMap);
+    removeTimingFields(joinedMessage);
+    Assert.assertEquals(expectedJoinedMessage, joinedMessage);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/EnrichmentSplitterBoltTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/EnrichmentSplitterBoltTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/EnrichmentSplitterBoltTest.java
new file mode 100644
index 0000000..67b12b9
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/EnrichmentSplitterBoltTest.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.bolt;
+
+import com.google.common.collect.ImmutableSet;
+import org.apache.metron.test.bolt.BaseEnrichmentBoltTest;
+import org.apache.metron.enrichment.configuration.Enrichment;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.ParseException;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static org.mockito.Mockito.when;
+
+
+public class EnrichmentSplitterBoltTest extends BaseEnrichmentBoltTest {
+
+  @Test
+  public void test() throws ParseException, IOException {
+    final Enrichment geo = new Enrichment();
+    geo.setType("geo");
+    final Enrichment host = new Enrichment();
+    host.setType("host");
+    final Enrichment hbaseEnrichment = new Enrichment();
+    hbaseEnrichment.setType("hbaseEnrichment");
+    List<Enrichment> enrichments = new ArrayList<Enrichment>() {{
+      add(geo);
+      add(host);
+      add(hbaseEnrichment);
+    }};
+
+    EnrichmentSplitterBolt enrichmentSplitterBolt = new EnrichmentSplitterBolt("zookeeperUrl").withEnrichments(enrichments);
+    enrichmentSplitterBolt.setCuratorFramework(client);
+    enrichmentSplitterBolt.setTreeCache(cache);
+    enrichmentSplitterBolt.getConfigurations().updateSensorEnrichmentConfig(sensorType, new FileInputStream(sampleSensorEnrichmentConfigPath));
+    enrichmentSplitterBolt.prepare(new HashMap<>(), topologyContext, outputCollector);
+
+    String key = enrichmentSplitterBolt.getKey(tuple, sampleMessage);
+    Assert.assertTrue(key != null && key.length() == 36);
+    String someKey = "someKey";
+    when(tuple.getStringByField("key")).thenReturn(someKey);
+    key = enrichmentSplitterBolt.getKey(tuple, sampleMessage);
+    Assert.assertEquals(someKey, key);
+    when(tuple.getBinary(0)).thenReturn(sampleMessageString.getBytes());
+    JSONObject generatedMessage = enrichmentSplitterBolt.generateMessage(tuple);
+    removeTimingFields(generatedMessage);
+    Assert.assertEquals(sampleMessage, generatedMessage);
+    String messageFieldName = "messageFieldName";
+    enrichmentSplitterBolt.withMessageFieldName(messageFieldName);
+    when(tuple.getValueByField(messageFieldName)).thenReturn(sampleMessage);
+    generatedMessage = enrichmentSplitterBolt.generateMessage(tuple);
+    Assert.assertEquals(sampleMessage, generatedMessage);
+    Set<String> actualStreamIds = enrichmentSplitterBolt.getStreamIds();
+    Assert.assertEquals(streamIds, actualStreamIds);
+
+    Map<String, JSONObject> actualSplitMessages = enrichmentSplitterBolt.splitMessage(sampleMessage);
+    Assert.assertEquals(3, actualSplitMessages.size());
+    Assert.assertEquals(geoMessage, actualSplitMessages.get("geo"));
+    Assert.assertEquals(hostMessage, actualSplitMessages.get("host"));
+    Assert.assertEquals(hbaseEnrichmentMessage, actualSplitMessages.get("hbaseEnrichment"));
+
+
+  }
+
+  public void removeTimingFields(JSONObject message) {
+    ImmutableSet keys = ImmutableSet.copyOf(message.keySet());
+    for(Object key: keys) {
+      if (key.toString().contains("splitter.begin.ts")) {
+        message.remove(key);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/GenericEnrichmentBoltTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/GenericEnrichmentBoltTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/GenericEnrichmentBoltTest.java
new file mode 100644
index 0000000..d5a90fb
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/GenericEnrichmentBoltTest.java
@@ -0,0 +1,196 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.bolt;
+
+import backtype.storm.tuple.Values;
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.metron.TestConstants;
+import org.apache.metron.test.bolt.BaseEnrichmentBoltTest;
+import org.apache.metron.enrichment.configuration.Enrichment;
+import org.apache.metron.common.configuration.SensorEnrichmentConfig;
+import org.apache.metron.enrichment.interfaces.EnrichmentAdapter;
+import org.apache.metron.common.cli.ConfigurationsUtils;
+import org.hamcrest.Description;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.ArgumentMatcher;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.HashMap;
+
+import static org.junit.Assert.fail;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.argThat;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.reset;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+public class GenericEnrichmentBoltTest extends BaseEnrichmentBoltTest {
+
+  protected class EnrichedMessageMatcher extends ArgumentMatcher<Values> {
+
+    private String expectedKey;
+    private JSONObject expectedMessage;
+
+    public EnrichedMessageMatcher(String expectedKey, JSONObject expectedMessage) {
+      this.expectedKey = expectedKey;
+      this.expectedMessage = expectedMessage;
+    }
+
+    @Override
+    public boolean matches(Object o) {
+      Values values = (Values) o;
+      String actualKey = (String) values.get(0);
+      JSONObject actualMessage = (JSONObject) values.get(1);
+      removeTimingFields(actualMessage);
+      return expectedKey.equals(actualKey) && expectedMessage.equals(actualMessage);
+    }
+
+    @Override
+    public void describeTo(Description description) {
+      description.appendText(String.format("[%s]", expectedMessage));
+    }
+
+  }
+
+  /**
+   {
+   "field1": "value1",
+   "field2": "value2",
+   "source.type": "yaf"
+   }
+   */
+  @Multiline
+  private String originalMessageString;
+
+  /**
+   {
+   "enrichedField1": "enrichedValue1"
+   }
+   */
+  @Multiline
+  private String enrichedField1String;
+
+  /**
+   {
+   "enrichedField2": "enrichedValue2"
+   }
+   */
+  @Multiline
+  private String enrichedField2String;
+
+  /**
+   {
+   "field1.enrichedField1": "enrichedValue1",
+   "field2.enrichedField2": "enrichedValue2",
+   "source.type": "yaf"
+   }
+   */
+  @Multiline
+  private String enrichedMessageString;
+
+  private JSONObject originalMessage;
+  private JSONObject enrichedField1;
+  private JSONObject enrichedField2;
+  private JSONObject enrichedMessage;
+
+  @Before
+  public void parseMessages() throws ParseException {
+    JSONParser parser = new JSONParser();
+    originalMessage = (JSONObject) parser.parse(originalMessageString);
+    enrichedField1 = (JSONObject) parser.parse(enrichedField1String);
+    enrichedField2 = (JSONObject) parser.parse(enrichedField2String);
+    enrichedMessage = (JSONObject) parser.parse(enrichedMessageString);
+  }
+
+  @Mock
+  public EnrichmentAdapter<CacheKey> enrichmentAdapter;
+
+  @Before
+  public void initMocks() {
+    MockitoAnnotations.initMocks(this);
+  }
+
+  @Test
+  public void test() throws IOException {
+    String key = "someKey";
+    String enrichmentType = "enrichmentType";
+    Enrichment<EnrichmentAdapter<CacheKey>> testEnrichment = new Enrichment<>();
+    testEnrichment.setType(enrichmentType);
+    testEnrichment.setAdapter(enrichmentAdapter);
+    GenericEnrichmentBolt genericEnrichmentBolt = new GenericEnrichmentBolt("zookeeperUrl");
+    genericEnrichmentBolt.setCuratorFramework(client);
+    genericEnrichmentBolt.setTreeCache(cache);
+    genericEnrichmentBolt.getConfigurations().updateSensorEnrichmentConfig(sensorType, new FileInputStream(sampleSensorEnrichmentConfigPath));
+    try {
+      genericEnrichmentBolt.prepare(new HashMap(), topologyContext, outputCollector);
+      fail("Should fail if a maxCacheSize property is not set");
+    } catch(IllegalStateException e) {}
+    genericEnrichmentBolt.withMaxCacheSize(100);
+    try {
+      genericEnrichmentBolt.prepare(new HashMap(), topologyContext, outputCollector);
+      fail("Should fail if a maxTimeRetain property is not set");
+    } catch(IllegalStateException e) {}
+    genericEnrichmentBolt.withMaxTimeRetain(10000);
+    try {
+      genericEnrichmentBolt.prepare(new HashMap(), topologyContext, outputCollector);
+      fail("Should fail if an adapter is not set");
+    } catch(IllegalStateException e) {}
+    genericEnrichmentBolt.withEnrichment(testEnrichment);
+    when(enrichmentAdapter.initializeAdapter()).thenReturn(true);
+    genericEnrichmentBolt.prepare(new HashMap(), topologyContext, outputCollector);
+    verify(enrichmentAdapter, times(1)).initializeAdapter();
+    when(enrichmentAdapter.initializeAdapter()).thenReturn(false);
+    try {
+      genericEnrichmentBolt.prepare(new HashMap(), topologyContext, outputCollector);
+      fail("An exception should be thrown if enrichment adapter initialization fails");
+    } catch(IllegalStateException e) {}
+    genericEnrichmentBolt.declareOutputFields(declarer);
+    verify(declarer, times(1)).declareStream(eq(enrichmentType), argThat(new FieldsMatcher("key", "message")));
+    verify(declarer, times(1)).declareStream(eq("error"), argThat(new FieldsMatcher("message")));
+    when(tuple.getStringByField("key")).thenReturn(null);
+    genericEnrichmentBolt.execute(tuple);
+    verify(outputCollector, times(1)).emit(eq("error"), any(Values.class));
+    when(tuple.getStringByField("key")).thenReturn(key);
+    when(tuple.getValueByField("message")).thenReturn(originalMessage);
+    genericEnrichmentBolt.execute(tuple);
+    verify(outputCollector, times(1)).emit(eq(enrichmentType), argThat(new EnrichedMessageMatcher(key, new JSONObject())));
+    reset(enrichmentAdapter);
+
+    SensorEnrichmentConfig sensorEnrichmentConfig = SensorEnrichmentConfig.
+            fromBytes(ConfigurationsUtils.readSensorEnrichmentConfigsFromFile(TestConstants.SAMPLE_CONFIG_PATH).get(sensorType));
+    CacheKey cacheKey1 = new CacheKey("field1", "value1", sensorEnrichmentConfig);
+    CacheKey cacheKey2 = new CacheKey("field2", "value2", sensorEnrichmentConfig);
+    when(enrichmentAdapter.enrich(cacheKey1)).thenReturn(enrichedField1);
+    when(enrichmentAdapter.enrich(cacheKey2)).thenReturn(enrichedField2);
+    genericEnrichmentBolt.execute(tuple);
+    verify(enrichmentAdapter, times(1)).logAccess(cacheKey1);
+    verify(enrichmentAdapter, times(1)).logAccess(cacheKey2);
+    verify(outputCollector, times(1)).emit(eq(enrichmentType), argThat(new EnrichedMessageMatcher(key, enrichedMessage)));
+
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/JoinBoltTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/JoinBoltTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/JoinBoltTest.java
new file mode 100644
index 0000000..90ec6ad
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/JoinBoltTest.java
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.bolt;
+
+import backtype.storm.task.TopologyContext;
+import backtype.storm.tuple.Values;
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.metron.test.bolt.BaseEnrichmentBoltTest;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
+import static org.junit.Assert.fail;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.argThat;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+public class JoinBoltTest extends BaseEnrichmentBoltTest {
+
+  public class StandAloneJoinBolt extends JoinBolt<JSONObject> {
+
+    public StandAloneJoinBolt(String zookeeperUrl) {
+      super(zookeeperUrl);
+    }
+
+    @Override
+    public void prepare(Map map, TopologyContext topologyContext) {
+
+    }
+
+    @Override
+    public Set<String> getStreamIds(JSONObject value) {
+      return streamIds;
+    }
+
+    @Override
+    public JSONObject joinMessages(Map<String, JSONObject> streamMessageMap) {
+      return joinedMessage;
+    }
+  }
+
+  /**
+   {
+   "joinField": "joinValue"
+   }
+   */
+  @Multiline
+  private String joinedMessageString;
+
+  private JSONObject joinedMessage;
+
+  @Before
+  public void parseMessages() {
+    JSONParser parser = new JSONParser();
+    try {
+      joinedMessage = (JSONObject) parser.parse(joinedMessageString);
+    } catch (ParseException e) {
+      e.printStackTrace();
+    }
+  }
+
+  @Test
+  public void test() {
+    StandAloneJoinBolt joinBolt = new StandAloneJoinBolt("zookeeperUrl");
+    joinBolt.setCuratorFramework(client);
+    joinBolt.setTreeCache(cache);
+    try {
+      joinBolt.prepare(new HashMap(), topologyContext, outputCollector);
+      fail("Should fail if a maxCacheSize property is not set");
+    } catch(IllegalStateException e) {}
+    joinBolt.withMaxCacheSize(100);
+    try {
+      joinBolt.prepare(new HashMap(), topologyContext, outputCollector);
+      fail("Should fail if a maxTimeRetain property is not set");
+    } catch(IllegalStateException e) {}
+    joinBolt.withMaxTimeRetain(10000);
+    joinBolt.prepare(new HashMap(), topologyContext, outputCollector);
+    joinBolt.declareOutputFields(declarer);
+    verify(declarer, times(1)).declareStream(eq("message"), argThat(new FieldsMatcher("key", "message")));
+    when(tuple.getValueByField("key")).thenReturn(key);
+    when(tuple.getSourceStreamId()).thenReturn("geo");
+    when(tuple.getValueByField("message")).thenReturn(geoMessage);
+    joinBolt.execute(tuple);
+    verify(outputCollector, times(0)).emit(eq("message"), any(tuple.getClass()), any(Values.class));
+    verify(outputCollector, times(0)).ack(tuple);
+    when(tuple.getSourceStreamId()).thenReturn("host");
+    when(tuple.getValueByField("message")).thenReturn(hostMessage);
+    joinBolt.execute(tuple);
+    verify(outputCollector, times(0)).emit(eq("message"), any(tuple.getClass()), any(Values.class));
+    verify(outputCollector, times(0)).ack(tuple);
+    when(tuple.getSourceStreamId()).thenReturn("hbaseEnrichment");
+    when(tuple.getValueByField("message")).thenReturn(hbaseEnrichmentMessage);
+    joinBolt.execute(tuple);
+    verify(outputCollector, times(1)).emit(eq("message"), any(tuple.getClass()), eq(new Values(key, joinedMessage)));
+    verify(outputCollector, times(1)).ack(tuple);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/SplitBoltTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/SplitBoltTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/SplitBoltTest.java
new file mode 100644
index 0000000..7166b1b
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/SplitBoltTest.java
@@ -0,0 +1,125 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.bolt;
+
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Tuple;
+import backtype.storm.tuple.Values;
+import junit.framework.Assert;
+import org.apache.metron.test.bolt.BaseEnrichmentBoltTest;
+import org.apache.metron.common.configuration.Configurations;
+import org.json.simple.JSONObject;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Matchers.argThat;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.doCallRealMethod;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+public class SplitBoltTest extends BaseEnrichmentBoltTest {
+
+  public class StandAloneSplitBolt extends SplitBolt<JSONObject> {
+
+    public StandAloneSplitBolt(String zookeeperUrl) {
+      super(zookeeperUrl);
+    }
+
+
+    @Override
+    public void prepare(Map map, TopologyContext topologyContext) {
+
+    }
+
+    @Override
+    public Set<String> getStreamIds() {
+      return streamIds;
+    }
+
+    @Override
+    public String getKey(Tuple tuple, JSONObject message) {
+      return key;
+    }
+
+    @Override
+    public JSONObject generateMessage(Tuple tuple) {
+      return sampleMessage;
+    }
+
+    @Override
+    public Map<String, JSONObject> splitMessage(JSONObject message) {
+      return null;
+    }
+
+    @Override
+    public void declareOther(OutputFieldsDeclarer declarer) {
+
+    }
+
+    @Override
+    public void emitOther(Tuple tuple, JSONObject message) {
+
+    }
+  }
+
+  @Test
+  public void test() {
+    StandAloneSplitBolt splitBolt = spy(new StandAloneSplitBolt("zookeeperUrl"));
+    splitBolt.setCuratorFramework(client);
+    splitBolt.setTreeCache(cache);
+    doCallRealMethod().when(splitBolt).reloadCallback(anyString(), any(Configurations.Type.class));
+    splitBolt.prepare(new HashMap(), topologyContext, outputCollector);
+    splitBolt.declareOutputFields(declarer);
+    verify(declarer, times(1)).declareStream(eq("message"), argThat(new FieldsMatcher("key", "message")));
+    for(String streamId: streamIds) {
+      verify(declarer, times(1)).declareStream(eq(streamId), argThat(new FieldsMatcher("key", "message")));
+    }
+    verify(declarer, times(1)).declareStream(eq("error"), argThat(new FieldsMatcher("message")));
+
+    JSONObject sampleMessage = splitBolt.generateMessage(tuple);
+    Map<String, JSONObject> streamMessageMap = new HashMap<>();
+    streamMessageMap.put("geo", geoMessage);
+    streamMessageMap.put("host", hostMessage);
+    streamMessageMap.put("hbaseEnrichment", hbaseEnrichmentMessage);
+    doReturn(streamMessageMap).when(splitBolt).splitMessage(sampleMessage);
+    splitBolt.execute(tuple);
+    verify(outputCollector, times(1)).emit(eq("message"), any(tuple.getClass()), eq(new Values(key, sampleMessage)));
+    verify(outputCollector, times(1)).emit(eq("geo"), eq(new Values(key, geoMessage)));
+    verify(outputCollector, times(1)).emit(eq("host"), eq(new Values(key, hostMessage)));
+    verify(outputCollector, times(1)).emit(eq("hbaseEnrichment"), eq(new Values(key, hbaseEnrichmentMessage)));
+    verify(outputCollector, times(1)).ack(tuple);
+    streamMessageMap = new HashMap<>();
+    streamMessageMap.put("host", null);
+    doReturn(streamMessageMap).when(splitBolt).splitMessage(sampleMessage);
+    try {
+      splitBolt.execute(tuple);
+      Assert.fail("An exception should be thrown when splitMessage produces a null value for a stream");
+    }catch (IllegalArgumentException e) {}
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBoltTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBoltTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBoltTest.java
new file mode 100644
index 0000000..306c3e1
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBoltTest.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.bolt;
+
+import junit.framework.Assert;
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.metron.test.bolt.BaseEnrichmentBoltTest;
+import org.json.simple.JSONObject;
+import org.json.simple.parser.JSONParser;
+import org.json.simple.parser.ParseException;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class ThreatIntelJoinBoltTest extends BaseEnrichmentBoltTest {
+
+  /**
+   {
+   "field1": "value1",
+   "enrichedField1": "enrichedValue1",
+   "source.type": "yaf"
+   }
+   */
+  @Multiline
+  private String messageString;
+
+  /**
+   {
+   "field1": "value1",
+   "enrichedField1": "enrichedValue1",
+   "source.type": "yaf",
+   "threatintels.field.end.ts": "timing"
+   }
+   */
+  @Multiline
+  private String messageWithTimingString;
+
+  /**
+   {
+   "field1": "value1",
+   "enrichedField1": "enrichedValue1",
+   "source.type": "yaf",
+   "threatintels.field": "threatIntelValue"
+   }
+   */
+  @Multiline
+  private String alertMessageString;
+
+  private JSONObject message;
+  private JSONObject messageWithTiming;
+  private JSONObject alertMessage;
+
+  @Before
+  public void parseMessages() throws ParseException {
+    JSONParser parser = new JSONParser();
+    message = (JSONObject) parser.parse(messageString);
+    messageWithTiming = (JSONObject) parser.parse(messageWithTimingString);
+    alertMessage = (JSONObject) parser.parse(alertMessageString);
+  }
+
+  @Test
+  public void test() throws IOException {
+    ThreatIntelJoinBolt threatIntelJoinBolt = new ThreatIntelJoinBolt("zookeeperUrl");
+    threatIntelJoinBolt.setCuratorFramework(client);
+    threatIntelJoinBolt.setTreeCache(cache);
+    threatIntelJoinBolt.getConfigurations().updateSensorEnrichmentConfig(sensorType, new FileInputStream(sampleSensorEnrichmentConfigPath));
+    threatIntelJoinBolt.withMaxCacheSize(100);
+    threatIntelJoinBolt.withMaxTimeRetain(10000);
+    threatIntelJoinBolt.prepare(new HashMap<>(), topologyContext, outputCollector);
+    Map<String, List<String>> fieldMap = threatIntelJoinBolt.getFieldMap("incorrectSourceType");
+    Assert.assertNull(fieldMap);
+    fieldMap = threatIntelJoinBolt.getFieldMap(sensorType);
+    Assert.assertTrue(fieldMap.containsKey("hbaseThreatIntel"));
+    Map<String, JSONObject> streamMessageMap = new HashMap<>();
+    streamMessageMap.put("message", message);
+    JSONObject joinedMessage = threatIntelJoinBolt.joinMessages(streamMessageMap);
+    Assert.assertFalse(joinedMessage.containsKey("is_alert"));
+    streamMessageMap.put("message", messageWithTiming);
+    joinedMessage = threatIntelJoinBolt.joinMessages(streamMessageMap);
+    Assert.assertFalse(joinedMessage.containsKey("is_alert"));
+    streamMessageMap.put("message", alertMessage);
+    joinedMessage = threatIntelJoinBolt.joinMessages(streamMessageMap);
+    Assert.assertTrue(joinedMessage.containsKey("is_alert") && "true".equals(joinedMessage.get("is_alert")));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/ThreatIntelSplitterBoltTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/ThreatIntelSplitterBoltTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/ThreatIntelSplitterBoltTest.java
new file mode 100644
index 0000000..77ed32f
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/ThreatIntelSplitterBoltTest.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.bolt;
+
+import junit.framework.Assert;
+import org.apache.metron.test.bolt.BaseEnrichmentBoltTest;
+import org.junit.Test;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class ThreatIntelSplitterBoltTest extends BaseEnrichmentBoltTest {
+
+  @Test
+  public void test() throws IOException {
+    String threatIntelType = "hbaseThreatIntel";
+    ThreatIntelSplitterBolt threatIntelSplitterBolt = new ThreatIntelSplitterBolt("zookeeperUrl");
+    threatIntelSplitterBolt.setCuratorFramework(client);
+    threatIntelSplitterBolt.setTreeCache(cache);
+    threatIntelSplitterBolt.getConfigurations().updateSensorEnrichmentConfig(sensorType, new FileInputStream(sampleSensorEnrichmentConfigPath));
+    threatIntelSplitterBolt.prepare(new HashMap<>(), topologyContext, outputCollector);
+    Map<String, List<String>> fieldMap = threatIntelSplitterBolt.getFieldMap(sensorType);
+    Assert.assertTrue(fieldMap.containsKey(threatIntelType));
+    String fieldName = threatIntelSplitterBolt.getKeyName(threatIntelType, "field");
+    Assert.assertEquals("threatintels.hbaseThreatIntel.field", fieldName);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/converter/EnrichmentConverterTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/converter/EnrichmentConverterTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/converter/EnrichmentConverterTest.java
new file mode 100644
index 0000000..20ec64c
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/converter/EnrichmentConverterTest.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.converter;
+
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.HashMap;
+
+public class EnrichmentConverterTest {
+  @Test
+  public void testKeyConversion() {
+    EnrichmentKey k1 = new EnrichmentKey("type", "indicator1");
+    byte[] serialized = k1.toBytes();
+    EnrichmentKey k2 = new EnrichmentKey();
+    k2.fromBytes(serialized);
+    Assert.assertEquals(k1, k2);
+  }
+
+  @Test
+  public void testValueConversion() throws IOException {
+    EnrichmentConverter converter = new EnrichmentConverter();
+    EnrichmentKey k1 = new EnrichmentKey("type", "indicator");
+    EnrichmentValue v1 = new EnrichmentValue(new HashMap<String, String>() {{
+      put("k1", "v1");
+      put("k2", "v2");
+    }});
+    Put serialized = converter.toPut("cf", k1, v1);
+    LookupKV<EnrichmentKey, EnrichmentValue> kv = converter.fromPut(serialized,"cf");
+    Assert.assertEquals(k1, kv.getKey());
+    Assert.assertEquals(v1, kv.getValue());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/tldextractor/BasicTldExtractorTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/tldextractor/BasicTldExtractorTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/tldextractor/BasicTldExtractorTest.java
new file mode 100644
index 0000000..b856e9a
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/tldextractor/BasicTldExtractorTest.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.enrichment.tldextractor;
+
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+
+ /**
+ * <ul>
+ * <li>Title: Basic TLD Extractor Test</li>
+ * <li>Description: Basic TLD Extractor class test</li>
+ * <li>Created: Feb 26, 2015</li>
+ * </ul>
+ * @author $Author:  $
+ * @version $Revision: 1.1 $
+ */
+public class BasicTldExtractorTest {
+
+    private BasicTldExtractor tldExtractor=null;
+
+     @Before
+    public void setUp() throws Exception {
+        //super.setUp("org.apache.metron.enrichment.tldextractor.BasicTldExtractorTest");
+        this.tldExtractor=new BasicTldExtractor();
+    }
+
+     @After
+    public void tearDown() throws Exception {
+        //super.tearDown();
+    }
+
+    /**
+     * Test method for {@link org.apache.metron.enrichment.tldextractor.BasicTldExtractor#extract2LD(java.lang.String)}.
+     */
+    @Test
+    public void testExtract2LD() {
+        String result = this.tldExtractor.extract2LD("cisco.com");
+        Assert.assertEquals(result, "cisco.com");
+    }
+
+    /**
+     * Test method for {@link org.apache.metron.enrichment.tldextractor.BasicTldExtractor#extractTLD(java.lang.String)}.
+     */
+    @Test
+    public void testExtractTLD() 
+    {
+        String result = this.tldExtractor.extractTLD("cisco.com");
+        Assert.assertEquals(result, ".com");
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/resources/CIFHbaseAdapterTest.properties
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/resources/CIFHbaseAdapterTest.properties b/metron-platform/metron-enrichment/src/test/resources/CIFHbaseAdapterTest.properties
new file mode 100644
index 0000000..9495d27
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/resources/CIFHbaseAdapterTest.properties
@@ -0,0 +1,27 @@
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+kafka.zk.port=2181
+kafka.zk.list=zkpr1
+kafka.zk=zkpr1:2181
+
+#CIF Enrichment
+bolt.enrichment.cif.tablename=cif_table
+bolt.enrichment.cif.host=tld
+bolt.enrichment.cif.email=email
+bolt.enrichment.cif.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.cif.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.cif.enrichment_tag=cif

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/resources/GeoMysqlAdapterTest.properties
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/resources/GeoMysqlAdapterTest.properties b/metron-platform/metron-enrichment/src/test/resources/GeoMysqlAdapterTest.properties
new file mode 100644
index 0000000..ef7126f
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/resources/GeoMysqlAdapterTest.properties
@@ -0,0 +1,27 @@
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+mysql.ip=172.30.9.120
+mysql.port=3306
+mysql.username=test
+mysql.password=123123
+
+#GeoEnrichment
+bolt.enrichment.geo.enrichment_tag=geo
+bolt.enrichment.geo.adapter.table=GEO
+bolt.enrichment.geo.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.geo.MAX_TIME_RETAIN_MINUTES=10
+bolt.enrichment.geo.source=ip_src_addr,ip_dst_addr

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/resources/TestSchemas/CIFHbaseSchema.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/resources/TestSchemas/CIFHbaseSchema.json b/metron-platform/metron-enrichment/src/test/resources/TestSchemas/CIFHbaseSchema.json
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/resources/TestSchemas/GeoMySqlSchema.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/resources/TestSchemas/GeoMySqlSchema.json b/metron-platform/metron-enrichment/src/test/resources/TestSchemas/GeoMySqlSchema.json
new file mode 100644
index 0000000..c4f2a82
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/resources/TestSchemas/GeoMySqlSchema.json
@@ -0,0 +1,42 @@
+{
+"title": "GeoMySql Schema",
+"type": "object",
+"properties": {
+
+         "city"    : {
+					   "type": "string"
+				  },
+		 "country" : {
+						"type": "string"
+					},
+		 "dmaCode" :
+		 			 {
+						"type": "string"
+					},
+	     "geoHash" : 
+	     			{
+						"type": "string"
+					},
+		 "latitude" : 
+		 			{
+						"type": "string"
+				   },
+		 "locID" : 
+		 			{
+					   "type": "string"
+				   },
+		 "location_point" : 
+		 			{
+					   "type": "string"
+				    },
+		 "longitude" : 
+		 			{
+						"type": "string"
+					},
+		 "postalCode" : 
+		 			{
+						"type": "string"
+					}
+   },
+   "required": ["city", "country", "dmaCode","latitude","locID","location_point","postalCode"]
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/resources/TestSchemas/WhoisHbaseSchema.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/resources/TestSchemas/WhoisHbaseSchema.json b/metron-platform/metron-enrichment/src/test/resources/TestSchemas/WhoisHbaseSchema.json
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-enrichment/src/test/resources/WhoisHbaseAdapterTest.properties
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/resources/WhoisHbaseAdapterTest.properties b/metron-platform/metron-enrichment/src/test/resources/WhoisHbaseAdapterTest.properties
new file mode 100644
index 0000000..a579fa3
--- /dev/null
+++ b/metron-platform/metron-enrichment/src/test/resources/WhoisHbaseAdapterTest.properties
@@ -0,0 +1,28 @@
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+
+kafka.zk.port=2181
+kafka.zk.list=zkpr1
+kafka.zk=zkpr1:2181
+
+#WhoisEnrichment
+
+bolt.enrichment.whois.hbase.table.name=whois
+bolt.enrichment.whois.enrichment_tag=whois
+bolt.enrichment.whois.source=tld
+bolt.enrichment.whois.MAX_CACHE_SIZE_OBJECTS_NUM=10000
+bolt.enrichment.whois.MAX_TIME_RETAIN_MINUTES=10



[37/51] [partial] incubator-metron git commit: METRON-113 Project Reorganization (merrimanr) closes apache/incubator-metron#88

Posted by rm...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/pom.xml b/metron-platform/metron-common/pom.xml
new file mode 100644
index 0000000..8050418
--- /dev/null
+++ b/metron-platform/metron-common/pom.xml
@@ -0,0 +1,300 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 
+  Licensed to the Apache Software 
+	Foundation (ASF) under one or more contributor license agreements. See the 
+	NOTICE file distributed with this work for additional information regarding 
+	copyright ownership. The ASF licenses this file to You under the Apache License, 
+	Version 2.0 (the "License"); you may not use this file except in compliance 
+	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 
+	Unless required by applicable law or agreed to in writing, software distributed 
+	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES 
+	OR CONDITIONS OF ANY KIND, either express or implied. See the License for 
+  the specific language governing permissions and limitations under the License. 
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.metron</groupId>
+        <artifactId>metron-platform</artifactId>
+        <version>0.1BETA</version>
+    </parent>
+    <artifactId>metron-common</artifactId>
+    <name>metron-common</name>
+    <description>Components common to all enrichments</description>
+    <properties>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+        <commons.config.version>1.10</commons.config.version>
+    </properties>
+    <repositories>
+        <repository>
+            <id>Metron-Kraken-Repo</id>
+            <name>Metron Kraken Repository</name>
+            <url>https://raw.github.com/opensoc/kraken/mvn-repo</url>
+        </repository>
+    </repositories>
+    <dependencies>
+        <dependency>
+            <groupId>com.opencsv</groupId>
+            <artifactId>opencsv</artifactId>
+            <version>${global_opencsv_version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.googlecode.json-simple</groupId>
+            <artifactId>json-simple</artifactId>
+            <version>${global_json_simple_version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-core</artifactId>
+            <version>${global_storm_version}</version>
+            <scope>provided</scope>
+            <exclusions>
+                <exclusion>
+                    <artifactId>servlet-api</artifactId>
+                    <groupId>javax.servlet</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>log4j-over-slf4j</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.kafka</groupId>
+            <artifactId>kafka_2.9.2</artifactId>
+            <version>${global_kafka_version}</version>
+            <scope>provided</scope>
+            <exclusions>
+                <exclusion>
+                    <groupId>com.sun.jmx</groupId>
+                    <artifactId>jmxri</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>com.sun.jdmk</groupId>
+                    <artifactId>jmxtools</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>javax.jms</groupId>
+                    <artifactId>jms</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>com.codahale.metrics</groupId>
+            <artifactId>metrics-core</artifactId>
+            <version>${global_metrics_version}</version>
+        </dependency>
+        <dependency>
+            <groupId>com.codahale.metrics</groupId>
+            <artifactId>metrics-graphite</artifactId>
+            <version>${global_metrics_version}</version>
+        </dependency>
+        <dependency>
+            <groupId>commons-configuration</groupId>
+            <artifactId>commons-configuration</artifactId>
+            <version>${commons.config.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.krakenapps</groupId>
+            <artifactId>kraken-pcap</artifactId>
+            <version>${global_pcap_version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>slf4j-api</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+                <exclusion>
+                    <artifactId>slf4j-simple</artifactId>
+                    <groupId>org.slf4j</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>com.google.guava</groupId>
+            <artifactId>guava</artifactId>
+            <version>${global_guava_version}</version>
+            <scope>compile</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-common</artifactId>
+            <version>${global_hbase_version}</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-client</artifactId>
+            <version>${global_hbase_version}</version>
+            <scope>provided</scope>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>log4j</groupId>
+                    <artifactId>log4j</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>com.google.guava</groupId>
+                    <artifactId>guava</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>com.github.fge</groupId>
+            <artifactId>json-schema-validator</artifactId>
+            <version>${global_json_schema_validator_version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.curator</groupId>
+            <artifactId>curator-recipes</artifactId>
+            <version>2.7.1</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>flux-core</artifactId>
+            <version>${global_flux_version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.storm</groupId>
+            <artifactId>storm-kafka</artifactId>
+            <version>${global_storm_version}</version>
+            <exclusions>
+                <exclusion>
+                    <artifactId>org.apache.curator</artifactId>
+                    <groupId>curator-client</groupId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.curator</groupId>
+            <artifactId>curator-test</artifactId>
+            <version>2.7.1</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.mockito</groupId>
+            <artifactId>mockito-all</artifactId>
+            <version>${global_mockito_version}</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>nl.jqno.equalsverifier</groupId>
+            <artifactId>equalsverifier</artifactId>
+            <version>2.0.2</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.metron</groupId>
+            <artifactId>metron-test-utilities</artifactId>
+            <version>0.1BETA</version>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+
+    <reporting>
+        <plugins>
+            <!-- Normally, dependency report takes time, skip it -->
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-project-info-reports-plugin</artifactId>
+                <version>2.7</version>
+
+                <configuration>
+                    <dependencyLocationsEnabled>false</dependencyLocationsEnabled>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>emma-maven-plugin</artifactId>
+                <version>1.0-alpha-3</version>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-pmd-plugin</artifactId>
+                <configuration>
+                    <targetJdk>1.7</targetJdk>
+                </configuration>
+            </plugin>
+        </plugins>
+    </reporting>
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-compiler-plugin</artifactId>
+                <version>3.1</version>
+                <configuration>
+                    <source>1.7</source>
+                    <compilerArgument>-Xlint:unchecked</compilerArgument>
+                    <target>1.7</target>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <version>1.4</version>
+                <configuration>
+                    <createDependencyReducedPom>true</createDependencyReducedPom>
+                    <artifactSet>
+                        <excludes>
+                            <exclude>*slf4j*</exclude>
+                        </excludes>
+                    </artifactSet>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <relocations>
+                                <relocation>
+                                    <pattern>com.google.common</pattern>
+                                    <shadedPattern>org.apache.metron.guava</shadedPattern>
+                                </relocation>
+                            </relocations>
+                            <transformers>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.DontIncludeResourceTransformer">
+                                    <resource>.yaml</resource>
+                                </transformer>
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
+                                <transformer
+                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                    <mainClass></mainClass>
+                                </transformer>
+                            </transformers>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <configuration>
+                    <descriptor>src/main/assembly/assembly.xml</descriptor>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>make-assembly</id> <!-- this is used for inheritance merges -->
+                        <phase>package</phase> <!-- bind to the packaging phase -->
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+        <resources>
+            <resource>
+                <directory>src/main/resources</directory>
+            </resource>
+        </resources>
+    </build>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/main/assembly/assembly.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/assembly/assembly.xml b/metron-platform/metron-common/src/main/assembly/assembly.xml
new file mode 100644
index 0000000..305f0a6
--- /dev/null
+++ b/metron-platform/metron-common/src/main/assembly/assembly.xml
@@ -0,0 +1,33 @@
+<!--
+  Licensed to the Apache Software
+	Foundation (ASF) under one or more contributor license agreements. See the
+	NOTICE file distributed with this work for additional information regarding
+	copyright ownership. The ASF licenses this file to You under the Apache License,
+	Version 2.0 (the "License"); you may not use this file except in compliance
+	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+	Unless required by applicable law or agreed to in writing, software distributed
+	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
+	OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+  the specific language governing permissions and limitations under the License.
+  -->
+
+<assembly>
+  <id>archive</id>
+  <formats>
+    <format>tar.gz</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>${project.basedir}/src/main/scripts</directory>
+      <outputDirectory>/scripts</outputDirectory>
+      <useDefaultExcludes>true</useDefaultExcludes>
+      <excludes>
+        <exclude>**/*.formatted</exclude>
+        <exclude>**/*.filtered</exclude>
+      </excludes>
+      <fileMode>0755</fileMode>
+      <lineEnding>unix</lineEnding>
+    </fileSet>
+  </fileSets>
+</assembly>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/main/java/org/apache/metron/common/Constants.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/Constants.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/Constants.java
new file mode 100644
index 0000000..3418e9c
--- /dev/null
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/Constants.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common;
+
+public class Constants {
+
+  public static final String GLOBAL_CONFIG_NAME = "global";
+  public static final String SENSORS_CONFIG_NAME = "sensors";
+  public static final String ZOOKEEPER_ROOT = "/metron";
+  public static final String ZOOKEEPER_TOPOLOGY_ROOT = ZOOKEEPER_ROOT + "/topology";
+  public static final String ZOOKEEPER_GLOBAL_ROOT = ZOOKEEPER_TOPOLOGY_ROOT + "/" + GLOBAL_CONFIG_NAME;
+  public static final String ZOOKEEPER_SENSOR_ROOT = ZOOKEEPER_TOPOLOGY_ROOT + "/" + SENSORS_CONFIG_NAME;
+  public static final long DEFAULT_CONFIGURED_BOLT_TIMEOUT = 5000;
+  public static final String SENSOR_TYPE = "source.type";
+  public static final String ENRICHMENT_TOPIC = "enrichments";
+  public static final String ERROR_STREAM = "error";
+  public static final String SIMPLE_HBASE_ENRICHMENT = "hbaseEnrichment";
+  public static final String SIMPLE_HBASE_THREAT_INTEL = "hbaseThreatIntel";
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/main/java/org/apache/metron/common/bolt/ConfiguredBolt.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/bolt/ConfiguredBolt.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/bolt/ConfiguredBolt.java
new file mode 100644
index 0000000..aa654fb
--- /dev/null
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/bolt/ConfiguredBolt.java
@@ -0,0 +1,124 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.bolt;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.base.BaseRichBolt;
+import org.apache.curator.RetryPolicy;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.framework.recipes.cache.TreeCache;
+import org.apache.curator.framework.recipes.cache.TreeCacheEvent;
+import org.apache.curator.framework.recipes.cache.TreeCacheListener;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.log4j.Logger;
+import org.apache.metron.common.Constants;
+import org.apache.metron.common.configuration.Configurations;
+import org.apache.metron.common.cli.ConfigurationsUtils;
+
+import java.io.IOException;
+import java.util.Map;
+
+public abstract class ConfiguredBolt extends BaseRichBolt {
+
+  private static final Logger LOG = Logger.getLogger(ConfiguredBolt.class);
+
+  private String zookeeperUrl;
+
+  protected final Configurations configurations = new Configurations();
+  protected CuratorFramework client;
+  protected TreeCache cache;
+
+  public ConfiguredBolt(String zookeeperUrl) {
+    this.zookeeperUrl = zookeeperUrl;
+  }
+
+  public Configurations getConfigurations() {
+    return configurations;
+  }
+
+  public void setCuratorFramework(CuratorFramework client) {
+    this.client = client;
+  }
+
+  public void setTreeCache(TreeCache cache) {
+    this.cache = cache;
+  }
+
+  public void reloadCallback(String name, Configurations.Type type) {
+  }
+
+  @Override
+  public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+    try {
+      if (client == null) {
+        RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
+        client = CuratorFrameworkFactory.newClient(zookeeperUrl, retryPolicy);
+      }
+      client.start();
+      if (cache == null) {
+        cache = new TreeCache(client, Constants.ZOOKEEPER_TOPOLOGY_ROOT);
+        TreeCacheListener listener = new TreeCacheListener() {
+          @Override
+          public void childEvent(CuratorFramework client, TreeCacheEvent event) throws Exception {
+            if (event.getType().equals(TreeCacheEvent.Type.NODE_ADDED) || event.getType().equals(TreeCacheEvent.Type.NODE_UPDATED)) {
+              String path = event.getData().getPath();
+              byte[] data = event.getData().getData();
+              updateConfig(path, data);
+            }
+          }
+        };
+        cache.getListenable().addListener(listener);
+        try {
+          ConfigurationsUtils.updateConfigsFromZookeeper(configurations, client);
+        } catch (Exception e) {
+          LOG.warn("Unable to load configs from zookeeper, but the cache should load lazily...");
+        }
+      }
+      cache.start();
+    } catch (Exception e) {
+      LOG.error(e.getMessage(), e);
+      throw new RuntimeException(e);
+    }
+  }
+
+  public void updateConfig(String path, byte[] data) throws IOException {
+    if (data.length != 0) {
+      String name = path.substring(path.lastIndexOf("/") + 1);
+      Configurations.Type type;
+      if (path.startsWith(Constants.ZOOKEEPER_SENSOR_ROOT)) {
+        configurations.updateSensorEnrichmentConfig(name, data);
+        type = Configurations.Type.SENSOR;
+      } else if (Constants.ZOOKEEPER_GLOBAL_ROOT.equals(path)) {
+        configurations.updateGlobalConfig(data);
+        type = Configurations.Type.GLOBAL;
+      } else {
+        configurations.updateConfig(name, data);
+        type = Configurations.Type.OTHER;
+      }
+      reloadCallback(name, type);
+    }
+  }
+
+  @Override
+  public void cleanup() {
+    cache.close();
+    client.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/main/java/org/apache/metron/common/cli/ConfigurationsUtils.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/cli/ConfigurationsUtils.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/cli/ConfigurationsUtils.java
new file mode 100644
index 0000000..27f4c2a
--- /dev/null
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/cli/ConfigurationsUtils.java
@@ -0,0 +1,232 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.cli;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.cli.PosixParser;
+import org.apache.commons.io.FilenameUtils;
+import org.apache.curator.RetryPolicy;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.metron.common.Constants;
+import org.apache.metron.common.configuration.Configurations;
+import org.apache.metron.common.configuration.SensorEnrichmentConfig;
+import org.apache.metron.common.utils.JSONUtils;
+import org.apache.zookeeper.KeeperException;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class ConfigurationsUtils {
+
+  public static CuratorFramework getClient(String zookeeperUrl) {
+    RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
+    return CuratorFrameworkFactory.newClient(zookeeperUrl, retryPolicy);
+  }
+
+  public static void writeGlobalConfigToZookeeper(Map<String, Object> globalConfig, String zookeeperUrl) throws Exception {
+    writeGlobalConfigToZookeeper(JSONUtils.INSTANCE.toJSON(globalConfig), zookeeperUrl);
+  }
+
+  public static void writeGlobalConfigToZookeeper(byte[] globalConfig, String zookeeperUrl) throws Exception {
+    CuratorFramework client = getClient(zookeeperUrl);
+    client.start();
+    try {
+      writeGlobalConfigToZookeeper(globalConfig, client);
+    }
+    finally {
+      client.close();
+    }
+  }
+
+  public static void writeGlobalConfigToZookeeper(byte[] globalConfig, CuratorFramework client) throws Exception {
+    writeToZookeeper(Constants.ZOOKEEPER_GLOBAL_ROOT, globalConfig, client);
+  }
+
+  public static void writeSensorEnrichmentConfigToZookeeper(String sensorType, SensorEnrichmentConfig sensorEnrichmentConfig, String zookeeperUrl) throws Exception {
+    writeSensorEnrichmentConfigToZookeeper(sensorType, JSONUtils.INSTANCE.toJSON(sensorEnrichmentConfig), zookeeperUrl);
+  }
+
+  public static void writeSensorEnrichmentConfigToZookeeper(String sensorType, byte[] configData, String zookeeperUrl) throws Exception {
+    CuratorFramework client = getClient(zookeeperUrl);
+    client.start();
+    try {
+      writeSensorEnrichmentConfigToZookeeper(sensorType, configData, client);
+    }
+    finally {
+      client.close();
+    }
+  }
+
+  public static void writeSensorEnrichmentConfigToZookeeper(String sensorType, byte[] configData, CuratorFramework client) throws Exception {
+    writeToZookeeper(Constants.ZOOKEEPER_SENSOR_ROOT + "/" + sensorType, configData, client);
+  }
+
+  public static void writeConfigToZookeeper(String name, Map<String, Object> config, String zookeeperUrl) throws Exception {
+    writeConfigToZookeeper(name, JSONUtils.INSTANCE.toJSON(config), zookeeperUrl);
+  }
+
+  public static void writeConfigToZookeeper(String name, byte[] config, String zookeeperUrl) throws Exception {
+    CuratorFramework client = getClient(zookeeperUrl);
+    client.start();
+    try {
+      writeToZookeeper(Constants.ZOOKEEPER_TOPOLOGY_ROOT + "/" + name, config, client);
+    }
+    finally {
+      client.close();
+    }
+  }
+
+  public static void writeToZookeeper(String path, byte[] configData, CuratorFramework client) throws Exception {
+    try {
+      client.setData().forPath(path, configData);
+    } catch (KeeperException.NoNodeException e) {
+      client.create().creatingParentsIfNeeded().forPath(path, configData);
+    }
+  }
+
+  public static void updateConfigsFromZookeeper(Configurations configurations, CuratorFramework client) throws Exception {
+    configurations.updateGlobalConfig(readGlobalConfigBytesFromZookeeper(client));
+    List<String> sensorTypes = client.getChildren().forPath(Constants.ZOOKEEPER_SENSOR_ROOT);
+    for(String sensorType: sensorTypes) {
+      configurations.updateSensorEnrichmentConfig(sensorType, readSensorEnrichmentConfigBytesFromZookeeper(sensorType, client));
+    }
+  }
+
+  public static byte[] readGlobalConfigBytesFromZookeeper(CuratorFramework client) throws Exception {
+    return readFromZookeeper(Constants.ZOOKEEPER_GLOBAL_ROOT, client);
+  }
+
+  public static byte[] readSensorEnrichmentConfigBytesFromZookeeper(String sensorType, CuratorFramework client) throws Exception {
+    return readFromZookeeper(Constants.ZOOKEEPER_SENSOR_ROOT + "/" + sensorType, client);
+  }
+
+  public static byte[] readConfigBytesFromZookeeper(String name, CuratorFramework client) throws Exception {
+    return readFromZookeeper(Constants.ZOOKEEPER_TOPOLOGY_ROOT + "/" + name, client);
+  }
+
+  public static byte[] readFromZookeeper(String path, CuratorFramework client) throws Exception {
+    return client.getData().forPath(path);
+  }
+
+  public static void uploadConfigsToZookeeper(String rootFilePath, String zookeeperUrl) throws Exception {
+    ConfigurationsUtils.writeGlobalConfigToZookeeper(readGlobalConfigFromFile(rootFilePath), zookeeperUrl);
+    Map<String, byte[]> sensorEnrichmentConfigs = readSensorEnrichmentConfigsFromFile(rootFilePath);
+    for(String sensorType: sensorEnrichmentConfigs.keySet()) {
+      ConfigurationsUtils.writeSensorEnrichmentConfigToZookeeper(sensorType, sensorEnrichmentConfigs.get(sensorType), zookeeperUrl);
+    }
+  }
+
+  public static byte[] readGlobalConfigFromFile(String rootFilePath) throws IOException {
+    return Files.readAllBytes(Paths.get(rootFilePath, Constants.GLOBAL_CONFIG_NAME + ".json"));
+  }
+
+  public static Map<String, byte[]> readSensorEnrichmentConfigsFromFile(String rootPath) throws IOException {
+    Map<String, byte[]> sensorEnrichmentConfigs = new HashMap<>();
+    for(File file: new File(rootPath, Constants.SENSORS_CONFIG_NAME).listFiles()) {
+      sensorEnrichmentConfigs.put(FilenameUtils.removeExtension(file.getName()), Files.readAllBytes(file.toPath()));
+    }
+    return sensorEnrichmentConfigs;
+  }
+
+  public static void dumpConfigs(String zookeeperUrl) throws Exception {
+    CuratorFramework client = getClient(zookeeperUrl);
+    client.start();
+    //Output global configs
+    {
+      System.out.println("Global config");
+      byte[] globalConfigData = client.getData().forPath(Constants.ZOOKEEPER_GLOBAL_ROOT);
+      System.out.println(new String(globalConfigData));
+    }
+    //Output sensor specific configs
+    {
+      List<String> children = client.getChildren().forPath(Constants.ZOOKEEPER_SENSOR_ROOT);
+      for (String child : children) {
+        byte[] data = client.getData().forPath(Constants.ZOOKEEPER_SENSOR_ROOT + "/" + child);
+        System.out.println("Config for source " + child);
+        System.out.println(new String(data));
+        System.out.println();
+      }
+    }
+    client.close();
+  }
+
+  public static void main(String[] args) {
+
+    Options options = new Options();
+    {
+      Option o = new Option("h", "help", false, "This screen");
+      o.setRequired(false);
+      options.addOption(o);
+    }
+    {
+      Option o = new Option("p", "config_files", true, "Path to the source config files.  Must be named like \"$source\".json");
+      o.setArgName("DIR_NAME");
+      o.setRequired(false);
+      options.addOption(o);
+    }
+    {
+      Option o = new Option("z", "zk", true, "Zookeeper Quroum URL (zk1:2181,zk2:2181,...");
+      o.setArgName("ZK_QUORUM");
+      o.setRequired(true);
+      options.addOption(o);
+    }
+
+    try {
+      CommandLineParser parser = new PosixParser();
+      CommandLine cmd = null;
+      try {
+        cmd = parser.parse(options, args);
+      } catch (ParseException pe) {
+        pe.printStackTrace();
+        final HelpFormatter usageFormatter = new HelpFormatter();
+        usageFormatter.printHelp("ConfigurationsUtils", null, options, null, true);
+        System.exit(-1);
+      }
+      if (cmd.hasOption("h")) {
+        final HelpFormatter usageFormatter = new HelpFormatter();
+        usageFormatter.printHelp("ConfigurationsUtils", null, options, null, true);
+        System.exit(0);
+      }
+
+      String zkQuorum = cmd.getOptionValue("z");
+      if (cmd.hasOption("p")) {
+        String sourcePath = cmd.getOptionValue("p");
+        uploadConfigsToZookeeper(sourcePath, zkQuorum);
+      }
+
+      ConfigurationsUtils.dumpConfigs(zkQuorum);
+
+    } catch (Exception e) {
+      e.printStackTrace();
+      System.exit(-1);
+    }
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/Configuration.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/Configuration.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/Configuration.java
new file mode 100644
index 0000000..1ccf47b
--- /dev/null
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/Configuration.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.configuration;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.metron.common.cli.ConfigurationsUtils;
+
+import java.nio.file.Path;
+import java.util.Map;
+
+public class Configuration extends Configurations {
+
+    protected CuratorFramework curatorFramework = null;
+    private Path configFileRoot;
+
+    public Configuration(CuratorFramework curatorFramework){
+
+        this.curatorFramework = curatorFramework;
+
+    }
+
+
+    public Configuration(Path configFileRoot){
+
+        this.configFileRoot = configFileRoot;
+    }
+
+    public void update() throws Exception {
+
+        if( null != curatorFramework ) {
+
+            ConfigurationsUtils.updateConfigsFromZookeeper(this, this.curatorFramework);
+
+        } else {
+
+            updateGlobalConfig(ConfigurationsUtils.readGlobalConfigFromFile(configFileRoot.toAbsolutePath().toString()));
+            Map<String, byte[]> sensorEnrichmentConfigs = ConfigurationsUtils.readSensorEnrichmentConfigsFromFile(configFileRoot.toAbsolutePath().toString());
+            for(String sensorType: sensorEnrichmentConfigs.keySet()) {
+                updateSensorEnrichmentConfig(sensorType, sensorEnrichmentConfigs.get(sensorType));
+            }
+
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/Configurations.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/Configurations.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/Configurations.java
new file mode 100644
index 0000000..6aaa2b4
--- /dev/null
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/Configurations.java
@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.configuration;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import org.apache.log4j.Logger;
+import org.apache.metron.common.utils.JSONUtils;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Serializable;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+public class Configurations implements Serializable {
+
+  private static final Logger LOG = Logger.getLogger(Configurations.class);
+
+  public enum Type {
+    GLOBAL, SENSOR, OTHER
+  }
+
+  public static final String GLOBAL_CONFIG_NAME = "global";
+
+  private ConcurrentMap<String, Object> configurations = new ConcurrentHashMap<>();
+
+  @SuppressWarnings("unchecked")
+  public Map<String, Object> getGlobalConfig() {
+    return (Map<String, Object>) configurations.get(GLOBAL_CONFIG_NAME);
+  }
+
+  public void updateGlobalConfig(byte[] data) throws IOException {
+    updateGlobalConfig(new ByteArrayInputStream(data));
+  }
+
+  public void updateGlobalConfig(InputStream io) throws IOException {
+    Map<String, Object> globalConfig = JSONUtils.INSTANCE.load(io, new TypeReference<Map<String, Object>>() {
+    });
+    updateGlobalConfig(globalConfig);
+  }
+
+  public void updateGlobalConfig(Map<String, Object> globalConfig) {
+    configurations.put(GLOBAL_CONFIG_NAME, globalConfig);
+  }
+
+  public SensorEnrichmentConfig getSensorEnrichmentConfig(String sensorType) {
+    return (SensorEnrichmentConfig) configurations.get(sensorType);
+  }
+
+  public void updateSensorEnrichmentConfig(String sensorType, byte[] data) throws IOException {
+    updateSensorEnrichmentConfig(sensorType, new ByteArrayInputStream(data));
+  }
+
+  public void updateSensorEnrichmentConfig(String sensorType, InputStream io) throws IOException {
+    SensorEnrichmentConfig sensorEnrichmentConfig = JSONUtils.INSTANCE.load(io, SensorEnrichmentConfig.class);
+    updateSensorEnrichmentConfig(sensorType, sensorEnrichmentConfig);
+  }
+
+  public void updateSensorEnrichmentConfig(String sensorType, SensorEnrichmentConfig sensorEnrichmentConfig) {
+    configurations.put(sensorType, sensorEnrichmentConfig);
+  }
+
+  @SuppressWarnings("unchecked")
+  public Map<String, Object> getConfig(String name) {
+    return (Map<String, Object>) configurations.get(name);
+  }
+
+  public void updateConfig(String name, byte[] data) throws IOException {
+    if (data == null) throw new IllegalStateException("config data cannot be null");
+    Map<String, Object> config = JSONUtils.INSTANCE.load(new ByteArrayInputStream(data), new TypeReference<Map<String, Object>>() {});
+    updateConfig(name, config);
+  }
+
+  public void updateConfig(String name, Map<String, Object> config) {
+    configurations.put(name, config);
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+    Configurations that = (Configurations) o;
+    return configurations.equals(that.configurations);
+  }
+
+  @Override
+  public int hashCode() {
+    return configurations.hashCode();
+  }
+
+  @Override
+  public String toString() {
+    return configurations.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/EnrichmentConfig.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/EnrichmentConfig.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/EnrichmentConfig.java
new file mode 100644
index 0000000..2ead81e
--- /dev/null
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/EnrichmentConfig.java
@@ -0,0 +1,201 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.common.configuration;
+
+import com.google.common.base.Joiner;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.metron.common.Constants;
+import org.apache.metron.common.cli.ConfigurationsUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.*;
+
+public class EnrichmentConfig {
+  public static enum Type {
+     THREAT_INTEL
+    ,ENRICHMENT
+  }
+
+  protected static final Logger _LOG = LoggerFactory.getLogger(EnrichmentConfig.class);
+  public static class FieldList {
+    Type type;
+    Map<String, List<String>> fieldToEnrichmentTypes;
+
+    public Type getType() {
+      return type;
+    }
+
+    public void setType(Type type) {
+      this.type = type;
+    }
+
+    public Map<String, List<String>> getFieldToEnrichmentTypes() {
+      return fieldToEnrichmentTypes;
+    }
+
+    public void setFieldToEnrichmentTypes(Map<String, List<String>> fieldToEnrichmentTypes) {
+      this.fieldToEnrichmentTypes = fieldToEnrichmentTypes;
+    }
+  }
+  public String zkQuorum;
+  public Map<String, FieldList> sensorToFieldList;
+
+  public String getZkQuorum() {
+    return zkQuorum;
+  }
+
+  public void setZkQuorum(String zkQuorum) {
+    this.zkQuorum = zkQuorum;
+  }
+
+  public Map<String, FieldList> getSensorToFieldList() {
+    return sensorToFieldList;
+  }
+
+  public void setSensorToFieldList(Map<String, FieldList> sensorToFieldList) {
+    this.sensorToFieldList = sensorToFieldList;
+  }
+
+  public void updateSensorConfigs( ) throws Exception {
+    CuratorFramework client = ConfigurationsUtils.getClient(getZkQuorum());
+    try {
+      client.start();
+      updateSensorConfigs(new ZKSourceConfigHandler(client), sensorToFieldList);
+    }
+    finally {
+      client.close();
+    }
+  }
+
+  public static interface SourceConfigHandler {
+    SensorEnrichmentConfig readConfig(String sensor) throws Exception;
+    void persistConfig(String sensor, SensorEnrichmentConfig config) throws Exception;
+  }
+
+  public static class ZKSourceConfigHandler implements SourceConfigHandler {
+    CuratorFramework client;
+    public ZKSourceConfigHandler(CuratorFramework client) {
+      this.client = client;
+    }
+    @Override
+    public SensorEnrichmentConfig readConfig(String sensor) throws Exception {
+      return SensorEnrichmentConfig.fromBytes(ConfigurationsUtils.readSensorEnrichmentConfigBytesFromZookeeper(sensor, client));
+    }
+
+    @Override
+    public void persistConfig(String sensor, SensorEnrichmentConfig config) throws Exception {
+      ConfigurationsUtils.writeSensorEnrichmentConfigToZookeeper(sensor, config.toJSON().getBytes(), client);
+    }
+  }
+
+  public static void updateSensorConfigs( SourceConfigHandler scHandler
+                                        , Map<String, FieldList> sensorToFieldList
+                                        ) throws Exception
+  {
+    Map<String, SensorEnrichmentConfig> sourceConfigsChanged = new HashMap<>();
+    for (Map.Entry<String, FieldList> kv : sensorToFieldList.entrySet()) {
+      SensorEnrichmentConfig config = sourceConfigsChanged.get(kv.getKey());
+      if(config == null) {
+        config = scHandler.readConfig(kv.getKey());
+        if(_LOG.isDebugEnabled()) {
+          _LOG.debug(config.toJSON());
+        }
+      }
+      Map<String, List<String> > fieldMap = null;
+      Map<String, List<String> > fieldToTypeMap = null;
+      List<String> fieldList = null;
+      if(kv.getValue().type == Type.THREAT_INTEL) {
+        fieldMap = config.getThreatIntelFieldMap();
+        if(fieldMap!= null) {
+          fieldList = fieldMap.get(Constants.SIMPLE_HBASE_THREAT_INTEL);
+        }
+        if(fieldList == null) {
+          fieldList = new ArrayList<>();
+          fieldMap.put(Constants.SIMPLE_HBASE_THREAT_INTEL, fieldList);
+        }
+        fieldToTypeMap = config.getFieldToThreatIntelTypeMap();
+        if(fieldToTypeMap == null) {
+          fieldToTypeMap = new HashMap<>();
+          config.setFieldToThreatIntelTypeMap(fieldToTypeMap);
+        }
+      }
+      else if(kv.getValue().type == Type.ENRICHMENT) {
+        fieldMap = config.getEnrichmentFieldMap();
+        if(fieldMap!= null) {
+          fieldList = fieldMap.get(Constants.SIMPLE_HBASE_ENRICHMENT);
+        }
+        if(fieldList == null) {
+          fieldList = new ArrayList<>();
+          fieldMap.put(Constants.SIMPLE_HBASE_ENRICHMENT, fieldList);
+        }
+        fieldToTypeMap = config.getFieldToEnrichmentTypeMap();
+        if(fieldToTypeMap == null) {
+          fieldToTypeMap = new HashMap<>();
+          config.setFieldToEnrichmentTypeMap(fieldToTypeMap);
+        }
+      }
+      if(fieldToTypeMap == null  || fieldMap == null) {
+        _LOG.debug("fieldToTypeMap is null or fieldMap is null, so skipping");
+        continue;
+      }
+      //Add the additional fields to the field list associated with the hbase adapter
+      {
+        HashSet<String> fieldSet = new HashSet<>(fieldList);
+        List<String> additionalFields = new ArrayList<>();
+        for (String field : kv.getValue().getFieldToEnrichmentTypes().keySet()) {
+          if (!fieldSet.contains(field)) {
+            additionalFields.add(field);
+          }
+        }
+        //adding only the ones that we don't already have to the field list
+        if (additionalFields.size() > 0) {
+          _LOG.debug("Adding additional fields: " + Joiner.on(',').join(additionalFields));
+          fieldList.addAll(additionalFields);
+          sourceConfigsChanged.put(kv.getKey(), config);
+        }
+      }
+      //Add the additional enrichment types to the mapping between the fields
+      {
+        for(Map.Entry<String, List<String>> fieldToType : kv.getValue().getFieldToEnrichmentTypes().entrySet()) {
+          String field = fieldToType.getKey();
+          final HashSet<String> types = new HashSet<>(fieldToType.getValue());
+          int sizeBefore = 0;
+          if(fieldToTypeMap.containsKey(field)) {
+            List<String> typeList = fieldToTypeMap.get(field);
+            sizeBefore = new HashSet<>(typeList).size();
+            types.addAll(typeList);
+          }
+          int sizeAfter = types.size();
+          boolean changed = sizeBefore != sizeAfter;
+          if(changed) {
+            fieldToTypeMap.put(field, new ArrayList<String>() {{
+                addAll(types);
+              }});
+            sourceConfigsChanged.put(kv.getKey(), config);
+          }
+        }
+      }
+    }
+    for(Map.Entry<String, SensorEnrichmentConfig> kv : sourceConfigsChanged.entrySet()) {
+      scHandler.persistConfig(kv.getKey(), kv.getValue());
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/SensorEnrichmentConfig.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/SensorEnrichmentConfig.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/SensorEnrichmentConfig.java
new file mode 100644
index 0000000..bc30327
--- /dev/null
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/SensorEnrichmentConfig.java
@@ -0,0 +1,129 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.configuration;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import org.apache.metron.common.utils.JSONUtils;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class SensorEnrichmentConfig {
+
+  private String index;
+  private Map<String, List<String>> enrichmentFieldMap;
+  private Map<String, List<String>> threatIntelFieldMap;
+  private Map<String, List<String>> fieldToEnrichmentTypeMap = new HashMap<>();
+  private Map<String, List<String>> fieldToThreatIntelTypeMap = new HashMap<>();
+  private int batchSize;
+
+  public String getIndex() {
+    return index;
+  }
+
+  public void setIndex(String index) {
+    this.index = index;
+  }
+
+  public Map<String, List<String>> getEnrichmentFieldMap() {
+    return enrichmentFieldMap;
+  }
+
+  public void setEnrichmentFieldMap(Map<String, List<String>> enrichmentFieldMap) {
+    this.enrichmentFieldMap = enrichmentFieldMap;
+  }
+
+  public Map<String, List<String>> getThreatIntelFieldMap() {
+    return threatIntelFieldMap;
+  }
+
+  public void setThreatIntelFieldMap(Map<String, List<String>> threatIntelFieldMap) {
+    this.threatIntelFieldMap = threatIntelFieldMap;
+  }
+
+  public Map<String, List<String>> getFieldToEnrichmentTypeMap() {
+    return fieldToEnrichmentTypeMap;
+  }
+
+  public Map<String, List<String>> getFieldToThreatIntelTypeMap() {
+    return fieldToThreatIntelTypeMap;
+  }
+  public void setFieldToEnrichmentTypeMap(Map<String, List<String>> fieldToEnrichmentTypeMap) {
+    this.fieldToEnrichmentTypeMap = fieldToEnrichmentTypeMap;
+  }
+
+  public void setFieldToThreatIntelTypeMap(Map<String, List<String>> fieldToThreatIntelTypeMap) {
+    this.fieldToThreatIntelTypeMap= fieldToThreatIntelTypeMap;
+  }
+  public int getBatchSize() {
+    return batchSize;
+  }
+
+  public void setBatchSize(int batchSize) {
+    this.batchSize = batchSize;
+  }
+
+  public static SensorEnrichmentConfig fromBytes(byte[] config) throws IOException {
+    return JSONUtils.INSTANCE.load(new String(config), SensorEnrichmentConfig.class);
+  }
+
+  public String toJSON() throws JsonProcessingException {
+    return JSONUtils.INSTANCE.toJSON(this, true);
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    SensorEnrichmentConfig that = (SensorEnrichmentConfig) o;
+
+    if (getBatchSize() != that.getBatchSize()) return false;
+    if (getIndex() != null ? !getIndex().equals(that.getIndex()) : that.getIndex() != null) return false;
+    if (getEnrichmentFieldMap() != null ? !getEnrichmentFieldMap().equals(that.getEnrichmentFieldMap()) : that.getEnrichmentFieldMap() != null)
+      return false;
+    if (getThreatIntelFieldMap() != null ? !getThreatIntelFieldMap().equals(that.getThreatIntelFieldMap()) : that.getThreatIntelFieldMap() != null)
+      return false;
+    if (getFieldToEnrichmentTypeMap() != null ? !getFieldToEnrichmentTypeMap().equals(that.getFieldToEnrichmentTypeMap()) : that.getFieldToEnrichmentTypeMap() != null)
+      return false;
+    return getFieldToThreatIntelTypeMap() != null ? getFieldToThreatIntelTypeMap().equals(that.getFieldToThreatIntelTypeMap()) : that.getFieldToThreatIntelTypeMap() == null;
+
+  }
+
+  @Override
+  public String toString() {
+    return "{index=" + index + ", batchSize=" + batchSize +
+            ", enrichmentFieldMap=" + enrichmentFieldMap +
+            ", threatIntelFieldMap" + threatIntelFieldMap +
+            ", fieldToEnrichmentTypeMap=" + fieldToEnrichmentTypeMap +
+            ", fieldToThreatIntelTypeMap=" + fieldToThreatIntelTypeMap + "}";
+  }
+
+  @Override
+  public int hashCode() {
+    int result = getIndex() != null ? getIndex().hashCode() : 0;
+    result = 31 * result + (getEnrichmentFieldMap() != null ? getEnrichmentFieldMap().hashCode() : 0);
+    result = 31 * result + (getThreatIntelFieldMap() != null ? getThreatIntelFieldMap().hashCode() : 0);
+    result = 31 * result + (getFieldToEnrichmentTypeMap() != null ? getFieldToEnrichmentTypeMap().hashCode() : 0);
+    result = 31 * result + (getFieldToThreatIntelTypeMap() != null ? getFieldToThreatIntelTypeMap().hashCode() : 0);
+    result = 31 * result + getBatchSize();
+    return result;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/main/java/org/apache/metron/common/interfaces/BulkMessageWriter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/interfaces/BulkMessageWriter.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/interfaces/BulkMessageWriter.java
new file mode 100644
index 0000000..6fb3d78
--- /dev/null
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/interfaces/BulkMessageWriter.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.interfaces;
+
+import backtype.storm.tuple.Tuple;
+import org.apache.metron.common.configuration.Configurations;
+
+import java.util.List;
+import java.util.Map;
+
+public interface BulkMessageWriter<T> extends AutoCloseable {
+
+  void init(Map stormConf, Configurations configuration) throws Exception;
+  void write(String sensorType, Configurations configurations, List<Tuple> tuples, List<T> messages) throws Exception;
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/main/java/org/apache/metron/common/interfaces/MessageWriter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/interfaces/MessageWriter.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/interfaces/MessageWriter.java
new file mode 100644
index 0000000..a90a8cb
--- /dev/null
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/interfaces/MessageWriter.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.interfaces;
+
+import backtype.storm.tuple.Tuple;
+import org.apache.metron.common.configuration.Configurations;
+
+public interface MessageWriter<T> extends AutoCloseable {
+
+  void init();
+  void write(String sensorType, Configurations configurations, Tuple tuple, T message) throws Exception;
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/ErrorUtils.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/ErrorUtils.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/ErrorUtils.java
new file mode 100644
index 0000000..6e139c8
--- /dev/null
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/ErrorUtils.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.utils;
+
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.tuple.Values;
+import org.apache.commons.lang.exception.ExceptionUtils;
+import org.apache.metron.common.Constants;
+import org.json.simple.JSONObject;
+
+public class ErrorUtils {
+
+	@SuppressWarnings("unchecked")
+	public static JSONObject generateErrorMessage(String message, Throwable t)
+	{
+		JSONObject error_message = new JSONObject();
+		
+		/*
+		 * Save full stack trace in object.
+		 */
+		String stackTrace = ExceptionUtils.getStackTrace(t);
+		
+		String exception = t.toString();
+		
+		error_message.put("time", System.currentTimeMillis());
+		try {
+			error_message.put("hostname", InetAddress.getLocalHost().getHostName());
+		} catch (UnknownHostException ex) {
+			// TODO Auto-generated catch block
+			ex.printStackTrace();
+		}
+		
+		error_message.put("message", message);
+		error_message.put(Constants.SENSOR_TYPE, "error");
+		error_message.put("exception", exception);
+		error_message.put("stack", stackTrace);
+		
+		return error_message;
+	}
+
+	public static void handleError(OutputCollector collector, Throwable t, String errorStream) {
+		JSONObject error = ErrorUtils.generateErrorMessage(t.getMessage(), t);
+		collector.emit(errorStream, new Values(error));
+		collector.reportError(t);
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/JSONUtils.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/JSONUtils.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/JSONUtils.java
new file mode 100644
index 0000000..4af9ad1
--- /dev/null
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/JSONUtils.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.common.utils;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import java.io.*;
+
+public enum JSONUtils {
+  INSTANCE;
+  private static ThreadLocal<ObjectMapper> _mapper = new ThreadLocal<ObjectMapper>() {
+    /**
+     * Returns the current thread's "initial value" for this
+     * thread-local variable.  This method will be invoked the first
+     * time a thread accesses the variable with the {@link #get}
+     * method, unless the thread previously invoked the {@link #set}
+     * method, in which case the {@code initialValue} method will not
+     * be invoked for the thread.  Normally, this method is invoked at
+     * most once per thread, but it may be invoked again in case of
+     * subsequent invocations of {@link #remove} followed by {@link #get}.
+     * <p>
+     * <p>This implementation simply returns {@code null}; if the
+     * programmer desires thread-local variables to have an initial
+     * value other than {@code null}, {@code ThreadLocal} must be
+     * subclassed, and this method overridden.  Typically, an
+     * anonymous inner class will be used.
+     *
+     * @return the initial value for this thread-local
+     */
+    @Override
+    protected ObjectMapper initialValue() {
+      return new ObjectMapper();
+    }
+  };
+
+  public <T> T load(InputStream is, TypeReference<T> ref) throws IOException {
+    return _mapper.get().readValue(is, ref);
+  }
+  public <T> T load(String is, TypeReference<T> ref) throws IOException {
+    return _mapper.get().readValue(is, ref);
+  }
+  public <T> T load(File f, TypeReference<T> ref) throws IOException {
+    return _mapper.get().readValue(new BufferedInputStream(new FileInputStream(f)), ref);
+  }
+  public <T> T load(InputStream is, Class<T> clazz) throws IOException {
+    return _mapper.get().readValue(is, clazz);
+  }
+
+  public <T> T load(File f, Class<T> clazz) throws IOException {
+    return _mapper.get().readValue(new BufferedInputStream(new FileInputStream(f)), clazz);
+  }
+  public <T> T load(String is, Class<T> clazz) throws IOException {
+    return _mapper.get().readValue(is, clazz);
+  }
+
+  public String toJSON(Object o, boolean pretty) throws JsonProcessingException {
+    if(pretty) {
+      return _mapper.get().writerWithDefaultPrettyPrinter().writeValueAsString(o);
+    }
+    else {
+      return _mapper.get().writeValueAsString(o);
+    }
+  }
+
+  public byte[] toJSON(Object config) throws JsonProcessingException {
+    return _mapper.get().writeValueAsBytes(config);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/MessageUtils.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/MessageUtils.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/MessageUtils.java
new file mode 100644
index 0000000..df711fa
--- /dev/null
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/MessageUtils.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.utils;
+
+import org.apache.metron.common.Constants;
+import org.json.simple.JSONObject;
+
+public class MessageUtils {
+
+  public static String getSensorType(JSONObject message) {
+    return (String) message.get(Constants.SENSOR_TYPE);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/ReflectionUtils.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/ReflectionUtils.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/ReflectionUtils.java
new file mode 100644
index 0000000..2afa097
--- /dev/null
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/ReflectionUtils.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.utils;
+
+import java.lang.reflect.InvocationTargetException;
+
+public class ReflectionUtils<T> {
+
+  public static <T> T createInstance(String className, T defaultClass) {
+    T instance;
+    if(className == null || className.length() == 0 || className.charAt(0) == '$') {
+      return defaultClass;
+    }
+    else {
+      try {
+        Class<? extends T> clazz = (Class<? extends T>) Class.forName(className);
+        instance = clazz.getConstructor().newInstance();
+      } catch (InstantiationException e) {
+        throw new IllegalStateException("Unable to instantiate connector.", e);
+      } catch (IllegalAccessException e) {
+        throw new IllegalStateException("Unable to instantiate connector: illegal access", e);
+      } catch (InvocationTargetException e) {
+        throw new IllegalStateException("Unable to instantiate connector", e);
+      } catch (NoSuchMethodException e) {
+        throw new IllegalStateException("Unable to instantiate connector: no such method", e);
+      } catch (ClassNotFoundException e) {
+        throw new IllegalStateException("Unable to instantiate connector: class not found", e);
+      }
+    }
+    return instance;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/main/scripts/zk_load_configs.sh
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/scripts/zk_load_configs.sh b/metron-platform/metron-common/src/main/scripts/zk_load_configs.sh
new file mode 100755
index 0000000..4a928bd
--- /dev/null
+++ b/metron-platform/metron-common/src/main/scripts/zk_load_configs.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+# 
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#     http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+BIGTOP_DEFAULTS_DIR=${BIGTOP_DEFAULTS_DIR-/etc/default}
+[ -n "${BIGTOP_DEFAULTS_DIR}" -a -r ${BIGTOP_DEFAULTS_DIR}/hbase ] && . ${BIGTOP_DEFAULTS_DIR}/hbase
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+  . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+  . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
+export METRON_VERSION=0.1BETA
+export METRON_HOME=/usr/metron/$METRON_VERSION
+export PARSERS_JAR=metron-parsers-$METRON_VERSION.jar
+export ZK_HOME=${ZK_HOME:-/usr/hdp/current/hbase-client}
+java -cp $METRON_HOME/lib/$PARSERS_JAR org.apache.metron.common.cli.ConfigurationsUtils "$@"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/test/java/org/apache/metron/common/bolt/ConfiguredBoltTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/test/java/org/apache/metron/common/bolt/ConfiguredBoltTest.java b/metron-platform/metron-common/src/test/java/org/apache/metron/common/bolt/ConfiguredBoltTest.java
new file mode 100644
index 0000000..a791086
--- /dev/null
+++ b/metron-platform/metron-common/src/test/java/org/apache/metron/common/bolt/ConfiguredBoltTest.java
@@ -0,0 +1,162 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.bolt;
+
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Tuple;
+import org.apache.curator.test.TestingServer;
+import org.apache.metron.common.Constants;
+import org.apache.metron.TestConstants;
+import org.apache.metron.test.bolt.BaseEnrichmentBoltTest;
+import org.apache.metron.common.configuration.Configurations;
+import org.apache.metron.common.configuration.SensorEnrichmentConfig;
+import org.apache.metron.common.cli.ConfigurationsUtils;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+public class ConfiguredBoltTest extends BaseEnrichmentBoltTest {
+  private static Set<String> configsUpdated = new HashSet<>();
+  private Set<String> allConfigurationTypes = new HashSet<>();
+  private String zookeeperUrl;
+
+  public static class StandAloneConfiguredBolt extends ConfiguredBolt {
+
+    public StandAloneConfiguredBolt(String zookeeperUrl) {
+      super(zookeeperUrl);
+    }
+
+    @Override
+    public void execute(Tuple input) {
+    }
+
+    @Override
+    public void declareOutputFields(OutputFieldsDeclarer declarer) {
+    }
+
+    @Override
+    public void reloadCallback(String name, Configurations.Type type) {
+      configsUpdated.add(name);
+    }
+  }
+
+  @Before
+  public void setupConfiguration() throws Exception {
+    TestingServer testZkServer = new TestingServer(true);
+    this.zookeeperUrl = testZkServer.getConnectString();
+    byte[] globalConfig = ConfigurationsUtils.readGlobalConfigFromFile(TestConstants.SAMPLE_CONFIG_PATH);
+    ConfigurationsUtils.writeGlobalConfigToZookeeper(globalConfig, zookeeperUrl);
+    allConfigurationTypes.add(Constants.GLOBAL_CONFIG_NAME);
+    Map<String, byte[]> sensorEnrichmentConfigs = ConfigurationsUtils.readSensorEnrichmentConfigsFromFile(TestConstants.SAMPLE_CONFIG_PATH);
+    for (String sensorType : sensorEnrichmentConfigs.keySet()) {
+      ConfigurationsUtils.writeSensorEnrichmentConfigToZookeeper(sensorType, sensorEnrichmentConfigs.get(sensorType), zookeeperUrl);
+      allConfigurationTypes.add(sensorType);
+    }
+  }
+
+  @Test
+  public void test() throws Exception {
+    Configurations sampleConfigurations = new Configurations();
+    try {
+      StandAloneConfiguredBolt configuredBolt = new StandAloneConfiguredBolt(null);
+      configuredBolt.prepare(new HashMap(), topologyContext, outputCollector);
+      Assert.fail("A valid zookeeper url must be supplied");
+    } catch (RuntimeException e){}
+
+    configsUpdated = new HashSet<>();
+    sampleConfigurations.updateGlobalConfig(ConfigurationsUtils.readGlobalConfigFromFile(TestConstants.SAMPLE_CONFIG_PATH));
+    Map<String, byte[]> sensorEnrichmentConfigs = ConfigurationsUtils.readSensorEnrichmentConfigsFromFile(TestConstants.SAMPLE_CONFIG_PATH);
+    for (String sensorType : sensorEnrichmentConfigs.keySet()) {
+      sampleConfigurations.updateSensorEnrichmentConfig(sensorType, sensorEnrichmentConfigs.get(sensorType));
+    }
+
+    StandAloneConfiguredBolt configuredBolt = new StandAloneConfiguredBolt(zookeeperUrl);
+    configuredBolt.prepare(new HashMap(), topologyContext, outputCollector);
+    waitForConfigUpdate(allConfigurationTypes);
+    Assert.assertEquals(sampleConfigurations, configuredBolt.configurations);
+
+    configsUpdated = new HashSet<>();
+    Map<String, Object> sampleGlobalConfig = sampleConfigurations.getGlobalConfig();
+    sampleGlobalConfig.put("newGlobalField", "newGlobalValue");
+    ConfigurationsUtils.writeGlobalConfigToZookeeper(sampleGlobalConfig, zookeeperUrl);
+    waitForConfigUpdate(Constants.GLOBAL_CONFIG_NAME);
+    Assert.assertEquals("Add global config field", sampleConfigurations.getGlobalConfig(), configuredBolt.configurations.getGlobalConfig());
+
+    configsUpdated = new HashSet<>();
+    sampleGlobalConfig.remove("newGlobalField");
+    ConfigurationsUtils.writeGlobalConfigToZookeeper(sampleGlobalConfig, zookeeperUrl);
+    waitForConfigUpdate(Constants.GLOBAL_CONFIG_NAME);
+    Assert.assertEquals("Remove global config field", sampleConfigurations, configuredBolt.configurations);
+
+    configsUpdated = new HashSet<>();
+    String sensorType = "testSensorConfig";
+    SensorEnrichmentConfig testSensorConfig = new SensorEnrichmentConfig();
+    testSensorConfig.setBatchSize(50);
+    testSensorConfig.setIndex("test");
+    Map<String, List<String>> enrichmentFieldMap = new HashMap<>();
+    enrichmentFieldMap.put("enrichmentTest", new ArrayList<String>() {{
+      add("enrichmentField");
+    }});
+    testSensorConfig.setEnrichmentFieldMap(enrichmentFieldMap);
+    Map<String, List<String>> threatIntelFieldMap = new HashMap<>();
+    threatIntelFieldMap.put("threatIntelTest", new ArrayList<String>() {{
+      add("threatIntelField");
+    }});
+    testSensorConfig.setThreatIntelFieldMap(threatIntelFieldMap);
+    sampleConfigurations.updateSensorEnrichmentConfig(sensorType, testSensorConfig);
+    ConfigurationsUtils.writeSensorEnrichmentConfigToZookeeper(sensorType, testSensorConfig, zookeeperUrl);
+    waitForConfigUpdate(sensorType);
+    Assert.assertEquals("Add new sensor config", sampleConfigurations, configuredBolt.configurations);
+
+    configsUpdated = new HashSet<>();
+    String someConfigType = "someConfig";
+    Map<String, Object> someConfig = new HashMap<>();
+    someConfig.put("someField", "someValue");
+    sampleConfigurations.updateConfig(someConfigType, someConfig);
+    ConfigurationsUtils.writeConfigToZookeeper(someConfigType, someConfig, zookeeperUrl);
+    waitForConfigUpdate(someConfigType);
+    Assert.assertEquals("Add new misc config", sampleConfigurations, configuredBolt.configurations);
+    configuredBolt.cleanup();
+  }
+
+  private void waitForConfigUpdate(final String expectedConfigUpdate) {
+    waitForConfigUpdate(new HashSet<String>() {{ add(expectedConfigUpdate); }});
+  }
+
+  private void waitForConfigUpdate(Set<String> expectedConfigUpdates) {
+    int count = 0;
+    while (!configsUpdated.equals(expectedConfigUpdates)) {
+      if (count++ > 5) {
+        Assert.fail("ConfiguredBolt was not updated in time");
+        return;
+      }
+      try {
+        Thread.sleep(500);
+      } catch (InterruptedException e) {
+        e.printStackTrace();
+      }
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/test/java/org/apache/metron/common/cli/ConfigurationsUtilsTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/test/java/org/apache/metron/common/cli/ConfigurationsUtilsTest.java b/metron-platform/metron-common/src/test/java/org/apache/metron/common/cli/ConfigurationsUtilsTest.java
new file mode 100644
index 0000000..bee4af7
--- /dev/null
+++ b/metron-platform/metron-common/src/test/java/org/apache/metron/common/cli/ConfigurationsUtilsTest.java
@@ -0,0 +1,95 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.cli;
+
+import junit.framework.Assert;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.test.TestingServer;
+import org.apache.metron.TestConstants;
+import org.apache.metron.common.cli.ConfigurationsUtils;
+import org.apache.metron.common.utils.JSONUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+
+public class ConfigurationsUtilsTest {
+
+  private TestingServer testZkServer;
+  private String zookeeperUrl;
+  private CuratorFramework client;
+  private byte[] testGlobalConfig;
+  private Map<String, byte[]> testSensorConfigMap;
+
+  @Before
+  public void setup() throws Exception {
+    testZkServer = new TestingServer(true);
+    zookeeperUrl = testZkServer.getConnectString();
+    client = ConfigurationsUtils.getClient(zookeeperUrl);
+    client.start();
+    testGlobalConfig = ConfigurationsUtils.readGlobalConfigFromFile(TestConstants.SAMPLE_CONFIG_PATH);
+    testSensorConfigMap = ConfigurationsUtils.readSensorEnrichmentConfigsFromFile(TestConstants.SAMPLE_CONFIG_PATH);
+  }
+
+  @Test
+  public void test() throws Exception {
+    Assert.assertTrue(testGlobalConfig.length > 0);
+    ConfigurationsUtils.writeGlobalConfigToZookeeper(testGlobalConfig, zookeeperUrl);
+    byte[] readGlobalConfigBytes = ConfigurationsUtils.readGlobalConfigBytesFromZookeeper(client);
+    Assert.assertTrue(Arrays.equals(testGlobalConfig, readGlobalConfigBytes));
+
+    Assert.assertTrue(testSensorConfigMap.size() > 0);
+    String testSensorType = "yaf";
+    byte[] testSensorConfigBytes = testSensorConfigMap.get(testSensorType);
+    ConfigurationsUtils.writeSensorEnrichmentConfigToZookeeper(testSensorType, testSensorConfigBytes, zookeeperUrl);
+    byte[] readSensorConfigBytes = ConfigurationsUtils.readSensorEnrichmentConfigBytesFromZookeeper(testSensorType, client);
+    Assert.assertTrue(Arrays.equals(testSensorConfigBytes, readSensorConfigBytes));
+    String name = "testConfig";
+    Map<String, Object> testConfig = new HashMap<>();
+    testConfig.put("stringField", "value");
+    testConfig.put("intField", 1);
+    testConfig.put("doubleField", 1.1);
+    ConfigurationsUtils.writeConfigToZookeeper(name, testConfig, zookeeperUrl);
+    byte[] readConfigBytes = ConfigurationsUtils.readConfigBytesFromZookeeper(name, client);
+    Assert.assertTrue(Arrays.equals(JSONUtils.INSTANCE.toJSON(testConfig), readConfigBytes));
+
+  }
+
+  @Test
+  public void testCmdLine() throws Exception {
+    String[] args = {"-z", zookeeperUrl, "-p", TestConstants.SAMPLE_CONFIG_PATH};
+    ConfigurationsUtils.main(args);
+    byte[] readGlobalConfigBytes = ConfigurationsUtils.readGlobalConfigBytesFromZookeeper(client);
+    Assert.assertTrue(Arrays.equals(testGlobalConfig, readGlobalConfigBytes));
+    for(String sensorType: testSensorConfigMap.keySet()) {
+      byte[] readSensorConfigBytes = ConfigurationsUtils.readSensorEnrichmentConfigBytesFromZookeeper(sensorType, client);
+      Assert.assertTrue(Arrays.equals(testSensorConfigMap.get(sensorType), readSensorConfigBytes));
+    }
+  }
+
+  @After
+  public void tearDown() throws IOException {
+    client.close();
+    testZkServer.close();
+    testZkServer.stop();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/0117987e/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/ConfigurationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/ConfigurationTest.java b/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/ConfigurationTest.java
new file mode 100644
index 0000000..fb45ccc
--- /dev/null
+++ b/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/ConfigurationTest.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.configuration;
+
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.api.ExistsBuilder;
+import org.apache.curator.framework.api.GetChildrenBuilder;
+import org.apache.curator.framework.api.GetDataBuilder;
+import org.apache.metron.common.Constants;
+import org.json.simple.JSONObject;
+import org.junit.Test;
+
+import java.nio.file.Paths;
+import java.util.Collections;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class ConfigurationTest {
+
+    private static final String TEST_PROPERTY = "configuration.class.test.property";
+    private static final String TEST_VALUE = "Configuration";
+    @Test
+    public void testCanReadFromFile() throws Exception {
+
+        Configuration configuration = new Configuration(Paths.get("./src/test/resources/config/"));
+        configuration.update();
+
+        checkResult(configuration);
+
+    }
+
+    @Test
+    public void testCanReadFromZookeeper() throws Exception {
+
+        CuratorFramework curatorFramework = mock(CuratorFramework.class);
+        ExistsBuilder existsBuilder = mock(ExistsBuilder.class);
+        GetDataBuilder getDataBuilder = mock(GetDataBuilder.class);
+        GetChildrenBuilder getChildrenBuilder = mock(GetChildrenBuilder.class);
+
+        when(getDataBuilder.forPath(Constants.ZOOKEEPER_GLOBAL_ROOT)).thenReturn(mockGlobalData());
+        when(curatorFramework.checkExists()).thenReturn(existsBuilder);
+        when(curatorFramework.getData()).thenReturn(getDataBuilder);
+        when(curatorFramework.getChildren()).thenReturn(getChildrenBuilder);
+        when(getChildrenBuilder.forPath(anyString())).thenReturn(Collections.<String> emptyList());
+
+        Configuration configuration = new Configuration(Paths.get("foo"));
+        configuration.curatorFramework = curatorFramework;
+        configuration.update();
+
+        checkResult(configuration);
+    }
+
+
+    private byte[] mockGlobalData(){
+
+        JSONObject global = new JSONObject();
+        global.put(TEST_PROPERTY, TEST_VALUE);
+        return global.toString().getBytes();
+
+    }
+
+
+    private void checkResult( Configuration configuration ){
+
+        assertEquals("File contains 1 entry: ",1,configuration.getGlobalConfig().size());
+        String testValue = configuration.getGlobalConfig().get(TEST_PROPERTY).toString();
+        assertEquals(TEST_PROPERTY + " should be \"" + TEST_VALUE + "\"",TEST_VALUE,testValue);
+
+
+    }
+}
+