You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@metron.apache.org by ce...@apache.org on 2015/12/17 21:46:10 UTC

[08/26] incubator-metron git commit: replace opensoc-steaming version 0.4BETA with 0.6BETA 8e7a6b4ad9febbc4ea47ba7810c42cc94d4dee37

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapReceiverImplRestEasy.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapReceiverImplRestEasy.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapReceiverImplRestEasy.java
new file mode 100644
index 0000000..98e855e
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapReceiverImplRestEasy.java
@@ -0,0 +1,250 @@
+package com.opensoc.pcapservice;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.log4j.Logger;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.opensoc.pcap.PcapUtils;
+
+@Path("/")
+public class PcapReceiverImplRestEasy {
+
+	/** The Constant LOGGER. */
+	private static final Logger LOGGER = Logger
+			.getLogger(PcapReceiverImplRestEasy.class);
+
+	/** The Constant HEADER_CONTENT_DISPOSITION_NAME. */
+	private static final String HEADER_CONTENT_DISPOSITION_NAME = "Content-Disposition";
+
+	/** The Constant HEADER_CONTENT_DISPOSITION_VALUE. */
+	private static final String HEADER_CONTENT_DISPOSITION_VALUE = "attachment; filename=\"managed-threat.pcap\"";
+
+	/** partial response key header name. */
+	private static final String HEADER_PARTIAL_RESPONE_KEY = "lastRowKey";
+
+	@GET
+	@Path("pcapGetter/getPcapsByKeys")
+	public Response getPcapsByKeys(
+			@QueryParam("keys") List<String> keys,
+			@QueryParam("lastRowKey") String lastRowKey,
+			@DefaultValue("-1") @QueryParam("startTime") long startTime,
+			@DefaultValue("-1") @QueryParam("endTime") long endTime,
+			@QueryParam("includeDuplicateLastRow") boolean includeDuplicateLastRow,
+			@QueryParam("includeReverseTraffic") boolean includeReverseTraffic,
+			@QueryParam("maxResponseSize") String maxResponseSize,
+			@Context HttpServletResponse response) throws IOException {
+		PcapsResponse pcapResponse = null;
+
+		if (keys == null || keys.size() == 0)
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'keys' must not be null or empty").build();
+
+		try {
+			IPcapGetter pcapGetter = PcapGetterHBaseImpl.getInstance();
+			pcapResponse = pcapGetter.getPcaps(parseKeys(keys), lastRowKey,
+					startTime, endTime, includeReverseTraffic,
+					includeDuplicateLastRow,
+					ConfigurationUtil.validateMaxResultSize(maxResponseSize));
+			LOGGER.info("pcaps response in REST layer ="
+					+ pcapResponse.toString());
+
+			// return http status '204 No Content' if the pcaps response size is
+			// 0
+			if (pcapResponse == null || pcapResponse.getResponseSize() == 0) {
+
+				return Response.status(Response.Status.NO_CONTENT).build();
+			}
+
+			// return http status '206 Partial Content', the partial response
+			// file and
+			// 'lastRowKey' header , if the pcaps response status is 'PARTIAL'
+
+			response.setHeader(HEADER_CONTENT_DISPOSITION_NAME,
+					HEADER_CONTENT_DISPOSITION_VALUE);
+
+			if (pcapResponse.getStatus() == PcapsResponse.Status.PARTIAL) {
+
+				response.setHeader(HEADER_PARTIAL_RESPONE_KEY,
+						pcapResponse.getLastRowKey());
+
+				return Response
+						.ok(pcapResponse.getPcaps(),
+								MediaType.APPLICATION_OCTET_STREAM).status(206)
+						.build();
+
+			}
+
+		} catch (IOException e) {
+			LOGGER.error(
+					"Exception occurred while fetching Pcaps for the keys :"
+							+ keys.toString(), e);
+			throw e;
+		}
+
+		// return http status '200 OK' along with the complete pcaps response
+		// file,
+		// and headers
+		// return new ResponseEntity<byte[]>(pcapResponse.getPcaps(), headers,
+		// HttpStatus.OK);
+
+		return Response
+				.ok(pcapResponse.getPcaps(), MediaType.APPLICATION_OCTET_STREAM)
+				.status(200).build();
+
+	}
+	
+	
+	@GET
+	@Path("/pcapGetter/getPcapsByKeyRange")
+
+	  public Response getPcapsByKeyRange(
+	      @QueryParam("startKey") String startKey,
+	      @QueryParam("endKey")String endKey,
+	      @QueryParam("maxResponseSize") String maxResponseSize,
+	      @DefaultValue("-1") @QueryParam("startTime")long startTime,
+	      @DefaultValue("-1") @QueryParam("endTime") long endTime, 
+	      @Context HttpServletResponse servlet_response) throws IOException {
+
+		if (startKey == null || startKey.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'start key' must not be null or empty").build();
+		
+		if (startKey == null || startKey.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'end key' must not be null or empty").build();
+		
+		
+	    byte[] response = null;
+	    try {
+	      IPcapScanner pcapScanner = PcapScannerHBaseImpl.getInstance();
+	      response = pcapScanner.getPcaps(startKey, endKey,
+	          ConfigurationUtil.validateMaxResultSize(maxResponseSize), startTime,
+	          endTime);
+	      if (response == null || response.length == 0) {
+	    	  
+	    	  return Response.status(Response.Status.NO_CONTENT).build();
+	        
+	      }
+	      servlet_response.setHeader(HEADER_CONTENT_DISPOSITION_NAME,
+					HEADER_CONTENT_DISPOSITION_VALUE);
+
+	    } catch (IOException e) {
+	      LOGGER.error(
+	          "Exception occurred while fetching Pcaps for the key range : startKey="
+	              + startKey + ", endKey=" + endKey, e);
+	      throw e;
+	    }
+	    // return http status '200 OK' along with the complete pcaps response file,
+	    // and headers
+	    
+		return Response
+				.ok(response, MediaType.APPLICATION_OCTET_STREAM)
+				.status(200).build();
+	  }
+
+	  /*
+	   * (non-Javadoc)
+	   * 
+	   * @see
+	   * com.cisco.opensoc.hbase.client.IPcapReceiver#getPcapsByIdentifiers(java.lang
+	   * .String, java.lang.String, java.lang.String, java.lang.String,
+	   * java.lang.String, long, long, boolean,
+	   * javax.servlet.http.HttpServletResponse)
+	   */
+	  
+	@GET
+	@Path("/pcapGetter/getPcapsByIdentifiers")
+
+	  public Response getPcapsByIdentifiers(
+	      @QueryParam ("srcIp") String srcIp, 
+	      @QueryParam ("dstIp") String dstIp,
+	      @QueryParam ("protocol") String protocol, 
+	      @QueryParam ("srcPort") String srcPort,
+	      @QueryParam ("dstPort") String dstPort,
+	      @DefaultValue("-1") @QueryParam ("startTime")long startTime,
+	      @DefaultValue("-1") @QueryParam ("endTime")long endTime,
+	      @DefaultValue("false") @QueryParam ("includeReverseTraffic") boolean includeReverseTraffic,
+	      @Context HttpServletResponse servlet_response)
+	      
+	      throws IOException {
+		
+		if (srcIp == null || srcIp.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'srcIp' must not be null or empty").build();
+		
+		if (dstIp == null || dstIp.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'dstIp' must not be null or empty").build();
+		
+		if (protocol == null || protocol.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'protocol' must not be null or empty").build();
+		
+		if (srcPort == null || srcPort.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'srcPort' must not be null or empty").build();
+		
+		if (dstPort == null || dstPort.equals(""))
+			return Response.serverError().status(Response.Status.NO_CONTENT)
+					.entity("'dstPort' must not be null or empty").build();
+		
+	
+	    PcapsResponse response = null;
+	    try {
+	      String sessionKey = PcapUtils.getSessionKey(srcIp, dstIp, protocol,
+	          srcPort, dstPort);
+	      LOGGER.info("sessionKey =" + sessionKey);
+	      IPcapGetter pcapGetter = PcapGetterHBaseImpl.getInstance();
+	      response = pcapGetter.getPcaps(Arrays.asList(sessionKey), null,
+	          startTime, endTime, includeReverseTraffic, false,
+	          ConfigurationUtil.getDefaultResultSize());
+	      if (response == null || response.getResponseSize() == 0) {
+	         return Response.status(Response.Status.NO_CONTENT).build();
+	      }
+	      servlet_response.setHeader(HEADER_CONTENT_DISPOSITION_NAME,
+					HEADER_CONTENT_DISPOSITION_VALUE);
+
+	    } catch (IOException e) {
+	      LOGGER.error("Exception occurred while fetching Pcaps by identifiers :",
+	          e);
+	      throw e;
+	    }
+	    // return http status '200 OK' along with the complete pcaps response file,
+	    // and headers
+	    return Response
+				.ok(response.getPcaps(), MediaType.APPLICATION_OCTET_STREAM)
+				.status(200).build();
+	  }
+	/**
+	 * This method parses the each value in the List using delimiter ',' and
+	 * builds a new List;.
+	 * 
+	 * @param keys
+	 *            list of keys to be parsed
+	 * @return list of keys
+	 */
+	@VisibleForTesting
+	List<String> parseKeys(List<String> keys) {
+		// Assert.notEmpty(keys);
+		List<String> parsedKeys = new ArrayList<String>();
+		for (String key : keys) {
+			parsedKeys.addAll(Arrays.asList(StringUtils.split(
+					StringUtils.trim(key), ",")));
+		}
+		return parsedKeys;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapScannerHBaseImpl.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapScannerHBaseImpl.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapScannerHBaseImpl.java
new file mode 100644
index 0000000..b1f0179
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapScannerHBaseImpl.java
@@ -0,0 +1,302 @@
+package com.opensoc.pcapservice;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.ZooKeeperConnectionException;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.NoServerForRegionException;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.log4j.Logger;
+import org.springframework.util.Assert;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.opensoc.pcap.PcapMerger;
+
+/**
+ * Singleton class which integrates with HBase table and returns sorted pcaps
+ * based on the timestamp for the given range of keys. Creates HConnection if it
+ * is not already created and the same connection instance is being used for all
+ * requests
+ * 
+ * @author sheetal
+ * @version $Revision: 1.0 $
+ */
+public class PcapScannerHBaseImpl implements IPcapScanner {
+
+  /** The Constant LOGGER. */
+  private static final Logger LOGGER = Logger
+      .getLogger(PcapScannerHBaseImpl.class);
+
+  /** The Constant DEFAULT_HCONNECTION_RETRY_LIMIT. */
+  private static final int DEFAULT_HCONNECTION_RETRY_LIMIT = 0;
+
+  /** The pcap scanner h base. */
+  private static IPcapScanner pcapScannerHBase = null;
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapScanner#getPcaps(java.lang.String,
+   * java.lang.String, long, long, long)
+   */
+  
+  public byte[] getPcaps(String startKey, String endKey, long maxResultSize,
+      long startTime, long endTime) throws IOException {
+    Assert.hasText(startKey, "startKey must no be null or empty");
+    byte[] cf = Bytes.toBytes(ConfigurationUtil.getConfiguration()
+        .getString("hbase.table.column.family"));
+    byte[] cq = Bytes.toBytes(ConfigurationUtil.getConfiguration()
+        .getString("hbase.table.column.qualifier"));
+    // create scan request
+    Scan scan = createScanRequest(cf, cq, startKey, endKey, maxResultSize,
+        startTime, endTime);
+    List<byte[]> pcaps = new ArrayList<byte[]>();
+    HTable table = null;
+    try {
+      pcaps = scanPcaps(pcaps, table, scan, cf, cq);
+    } catch (IOException e) {
+      LOGGER.error(
+          "Exception occurred while fetching Pcaps for the key range : startKey="
+              + startKey + ", endKey=" + endKey, e);
+      if (e instanceof ZooKeeperConnectionException
+          || e instanceof MasterNotRunningException
+          || e instanceof NoServerForRegionException) {
+        int maxRetryLimit = getConnectionRetryLimit();
+        for (int attempt = 1; attempt <= maxRetryLimit; attempt++) {
+          try {
+            HBaseConfigurationUtil.closeConnection(); // closing the existing
+                                                      // connection and retry,
+                                                      // it will create a new
+                                                      // HConnection
+            pcaps = scanPcaps(pcaps, table, scan, cf, cq);
+            break;
+          } catch (IOException ie) {
+            if (attempt == maxRetryLimit) {
+              System.out.println("Throwing the exception after retrying "
+                  + maxRetryLimit + " times.");
+              throw e;
+            }
+          }
+        }
+      } else {
+        throw e;
+      }
+    } finally {
+      if (table != null) {
+        table.close();
+      }
+    }
+    if (pcaps.size() == 1) {
+      return pcaps.get(0);
+    }
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    PcapMerger.merge(baos, pcaps);
+    byte[] response = baos.toByteArray();
+    return response;
+  }
+
+  /**
+   * Creates the scan request.
+   * 
+   * @param cf
+   *          the cf
+   * @param cq
+   *          the cq
+   * @param startKey
+   *          the start key
+   * @param endKey
+   *          the end key
+   * @param maxResultSize
+   *          the max result size
+   * @param startTime
+   *          the start time
+   * @param endTime
+   *          the end time
+   * @return the scan
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @VisibleForTesting
+  Scan createScanRequest(byte[] cf, byte[] cq, String startKey, String endKey,
+      long maxResultSize, long startTime, long endTime) throws IOException {
+    Scan scan = new Scan();
+    scan.addColumn(cf, cq);
+    scan.setMaxVersions(ConfigurationUtil.getConfiguration().getInt(
+        "hbase.table.column.maxVersions"));
+    scan.setStartRow(startKey.getBytes());
+    if (endKey != null) {
+      scan.setStopRow(endKey.getBytes());
+    }
+    scan.setMaxResultSize(maxResultSize);
+    boolean setTimeRange = true;
+    if (startTime < 0 && endTime < 0) {
+      setTimeRange = false;
+    }
+    if (setTimeRange) {
+      if (startTime < 0) {
+        startTime = 0;
+      } else {
+        startTime = PcapHelper.convertToDataCreationTimeUnit(startTime);
+      }
+      if (endTime < 0) {
+        endTime = Long.MAX_VALUE;
+      } else {
+        endTime = PcapHelper.convertToDataCreationTimeUnit(endTime);
+      }
+      Assert.isTrue(startTime < endTime,
+          "startTime value must be less than endTime value");
+    }
+    // create Scan request;
+    if (setTimeRange) {
+      scan.setTimeRange(startTime, endTime);
+    }
+    return scan;
+  }
+
+  /**
+   * Scan pcaps.
+   * 
+   * @param pcaps
+   *          the pcaps
+   * @param table
+   *          the table
+   * @param scan
+   *          the scan
+   * @param cf
+   *          the cf
+   * @param cq
+   *          the cq
+   * @return the list
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @VisibleForTesting
+  List<byte[]> scanPcaps(List<byte[]> pcaps, HTable table, Scan scan,
+      byte[] cf, byte[] cq) throws IOException {
+    LOGGER.info("Scan =" + scan.toString());
+    table = (HTable) HBaseConfigurationUtil.getConnection().getTable(
+    		ConfigurationUtil.getConfiguration().getString("hbase.table.name"));
+    ResultScanner resultScanner = table.getScanner(scan);
+    List<Cell> scannedCells = new ArrayList<Cell>();
+    for (Result result = resultScanner.next(); result != null; result = resultScanner
+        .next()) {
+      List<Cell> cells = result.getColumnCells(cf, cq);
+      if (cells != null) {
+        for (Cell cell : cells) {
+          scannedCells.add(cell);
+        }
+      }
+    }
+    Collections.sort(scannedCells, PcapHelper.getCellTimestampComparator());
+    LOGGER.info("sorted cells :" + scannedCells.toString());
+    for (Cell sortedCell : scannedCells) {
+      pcaps.add(CellUtil.cloneValue(sortedCell));
+    }
+    return pcaps;
+  }
+
+  /**
+   * Gets the connection retry limit.
+   * 
+   * @return the connection retry limit
+   */
+  private int getConnectionRetryLimit() {
+    return ConfigurationUtil.getConfiguration().getInt(
+        "hbase.hconnection.retries.number", DEFAULT_HCONNECTION_RETRY_LIMIT);
+  }
+
+  /*
+   * (non-Javadoc)
+   * 
+   * @see com.cisco.opensoc.hbase.client.IPcapScanner#getPcaps(java.lang.String,
+   * java.lang.String)
+   */
+  
+  public byte[] getPcaps(String startKey, String endKey) throws IOException {
+    Assert.hasText(startKey, "startKey must no be null or empty");
+    Assert.hasText(endKey, "endKey must no be null or empty");
+    return getPcaps(startKey, endKey, ConfigurationUtil.getDefaultResultSize(),
+        -1, -1);
+  }
+
+  /**
+   * Always returns the singleton instance.
+   * 
+   * @return IPcapScanner singleton instance
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public static IPcapScanner getInstance() throws IOException {
+    if (pcapScannerHBase == null) {
+      synchronized (PcapScannerHBaseImpl.class) {
+        if (pcapScannerHBase == null) {
+          pcapScannerHBase = new PcapScannerHBaseImpl();
+        }
+      }
+    }
+    return pcapScannerHBase;
+  }
+
+  /**
+   * Instantiates a new pcap scanner h base impl.
+   */
+  private PcapScannerHBaseImpl() {
+  }
+
+  /**
+   * The main method.
+   */
+  // public static void main(String[] args) throws IOException {
+  // if (args == null || args.length < 3) {
+  // usage();
+  // return;
+  // }
+  // String outputFileName = null;
+  // String startKey = null;
+  // String stopKey = null;
+  // outputFileName = args[0];
+  // startKey = args[1];
+  // if (args.length > 2) { // NOPMD by sheetal on 1/29/14 3:55 PM
+  // stopKey = args[2];
+  // }
+  // PcapScannerHBaseImpl downloader = new PcapScannerHBaseImpl();
+  // byte[] pcaps = downloader.getPcaps(startKey, stopKey, defaultResultSize, 0,
+  // Long.MAX_VALUE);
+  // File file = new File(outputFileName);
+  // FileUtils.write(file, "", false);
+  // ByteArrayOutputStream baos = new ByteArrayOutputStream(); //
+  // $codepro.audit.disable
+  // // closeWhereCreated
+  // PcapMerger.merge(baos, pcaps);
+  // FileUtils.writeByteArrayToFile(file, baos.toByteArray(), true);
+  // }
+
+  /**
+   * Usage.
+   */
+  @SuppressWarnings("unused")
+  private static void usage() {
+    System.out.println("java " + PcapScannerHBaseImpl.class.getName() // NOPMD
+                                                                      // by
+        // sheetal
+        // <!-- //
+        // $codepro.audit.disable
+        // debuggingCode
+        // -->
+        // on
+        // 1/29/14
+        // 3:55
+        // PM
+        + " <zk quorum> <output file> <start key> [stop key]");
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapsResponse.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapsResponse.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapsResponse.java
new file mode 100644
index 0000000..10af9e0
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/PcapsResponse.java
@@ -0,0 +1,153 @@
+/**
+ * 
+ */
+package com.opensoc.pcapservice;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import com.opensoc.pcap.PcapMerger;
+
+
+
+/**
+ * Holds pcaps data, status and the partial response key.
+ * 
+ * @author Sayi
+ */
+public class PcapsResponse {
+
+  /**
+   * The Enum Status.
+   */
+  public enum Status {
+    
+    /** The partial. */
+    PARTIAL, 
+ /** The complete. */
+ COMPLETE
+  };
+
+  /** response of the processed keys. */
+  private List<byte[]> pcaps = new ArrayList<byte[]>();;
+
+  /** partial response key. */
+  private String lastRowKey;
+
+  /** The status. */
+  private Status status = Status.COMPLETE;
+
+  /**
+   * Sets the pcaps.
+   * 
+   * @param pcaps
+   *          the new pcaps
+   */
+  public void setPcaps(List<byte[]> pcaps) {
+    this.pcaps = pcaps;
+  }
+
+  /**
+   * Adds the pcaps.
+   * 
+   * @param pcaps
+   *          the pcaps
+   */
+  public void addPcaps(byte[] pcaps) {
+    this.pcaps.add(pcaps);
+  }
+
+  /**
+   * Gets the partial response key.
+   * 
+   * @return the partial response key
+   */
+  public String getLastRowKey() {
+    return lastRowKey;
+  }
+
+  /**
+   * Sets the partial response key.
+   * 
+   * @param lastRowKey
+   *          the last row key
+   */
+  public void setLastRowKey(String lastRowKey) {
+    this.lastRowKey = lastRowKey;
+  }
+
+  /**
+   * Gets the status.
+   * 
+   * @return the status
+   */
+  public Status getStatus() {
+    return status;
+  }
+
+  /**
+   * Sets the status.
+   * 
+   * @param status
+   *          the new status
+   */
+  public void setStatus(Status status) {
+    this.status = status;
+  }
+
+  /**
+   * Checks if is resonse size within limit.
+   * 
+   * @param maxResultSize
+   *          the max result size
+   * @return true, if is resonse size within limit
+   */
+  public boolean isResonseSizeWithinLimit(long maxResultSize) {
+    // System.out.println("isResonseSizeWithinLimit() : getResponseSize() < (input|default result size - maximum packet size ) ="+
+    // getResponseSize()+ " < " + ( maxResultSize
+    // -ConfigurationUtil.getMaxRowSize()));
+    return getResponseSize() < (maxResultSize - ConfigurationUtil
+        .getMaxRowSize());
+  }
+
+  /**
+   * Gets the response size.
+   * 
+   * @return the response size
+   */
+  public long getResponseSize() {
+    long responseSize = 0;
+    for (byte[] pcap : this.pcaps) {
+      responseSize = responseSize + pcap.length;
+    }
+    return responseSize;
+  }
+
+  /**
+   * Gets the pcaps.
+   * 
+   * @return the pcaps
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  public byte[] getPcaps() throws IOException {
+    if (pcaps.size() == 1) {
+      return pcaps.get(0);
+    }
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    PcapMerger.merge(baos, pcaps);
+    return baos.toByteArray();
+  }
+
+  /* (non-Javadoc)
+   * @see java.lang.Object#toString()
+   */
+  @Override
+  public String toString() {
+    return "PcapsResponse [lastRowKey=" + lastRowKey
+        + ", status=" + status + ", pcapsSize="
+        + String.valueOf(getResponseSize()) + "]";
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/RestTestingUtil.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/RestTestingUtil.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/RestTestingUtil.java
new file mode 100644
index 0000000..651affe
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/RestTestingUtil.java
@@ -0,0 +1,238 @@
+package com.opensoc.pcapservice;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.springframework.http.HttpEntity;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpMethod;
+import org.springframework.http.MediaType;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.client.RestTemplate;
+
+/**
+ * The Class RestTestingUtil.
+ */
+public class RestTestingUtil {
+  
+  /** The host name. */
+  public static String hostName = null;
+
+  /**
+   * Gets the pcaps by keys.
+   * 
+   * @param keys
+   *          the keys
+   * @return the pcaps by keys
+   */
+  @SuppressWarnings("unchecked")
+  private static void getPcapsByKeys(String keys) {
+    System.out
+        .println("**********************getPcapsByKeys ******************************************************************************************");
+    // 1.
+    String url = "http://" + hostName
+        + "/cisco-rest/pcapGetter/getPcapsByKeys?keys={keys}"
+        + "&includeReverseTraffic={includeReverseTraffic}"
+        + "&startTime={startTime}" + "&endTime={endTime}"
+        + "&maxResponseSize={maxResponseSize}";
+    // default values
+    String startTime = "-1";
+    String endTime = "-1";
+    String maxResponseSize = "6";
+    String includeReverseTraffic = "false";
+
+    @SuppressWarnings("rawtypes")
+    Map map = new HashMap();
+    map.put("keys", keys);
+    map.put("includeReverseTraffic", includeReverseTraffic);
+    map.put("startTime", startTime);
+    map.put("endTime", endTime);
+    map.put("maxResponseSize", maxResponseSize);
+
+    RestTemplate template = new RestTemplate();
+
+    // set headers and entity to send
+    HttpHeaders headers = new HttpHeaders();
+    headers.set("Accept", MediaType.APPLICATION_OCTET_STREAM_VALUE);
+    HttpEntity<Object> requestEntity = new HttpEntity<Object>(headers);
+
+    // 1.
+    ResponseEntity<byte[]> response1 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
+            response1);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 2. with reverse traffic
+    includeReverseTraffic = "true";
+    map.put("includeReverseTraffic", includeReverseTraffic);
+    ResponseEntity<byte[]> response2 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
+            response2);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 3.with time range
+    startTime = System.getProperty("startTime", "-1");
+    endTime = System.getProperty("endTime", "-1");
+    map.put("startTime", startTime);
+    map.put("endTime", endTime);
+    ResponseEntity<byte[]> response3 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
+            response3);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 4.with maxResponseSize
+    maxResponseSize = System.getProperty("maxResponseSize", "6");
+    map.put("maxResponseSize", maxResponseSize);
+    ResponseEntity<byte[]> response4 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeys : request= <keys=%s; includeReverseTraffic=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            keys, includeReverseTraffic, startTime, endTime, maxResponseSize,
+            response4);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+  }
+
+  /**
+   * Gets the pcaps by keys range.
+   * 
+   * @param startKey
+   *          the start key
+   * @param endKey
+   *          the end key
+   * @return the pcaps by keys range
+   */
+  @SuppressWarnings("unchecked")
+  private static void getPcapsByKeysRange(String startKey, String endKey) {
+    System.out
+        .println("**********************getPcapsByKeysRange ******************************************************************************************");
+    // 1.
+    String url = "http://" + hostName
+        + "/cisco-rest/pcapGetter/getPcapsByKeyRange?startKey={startKey}"
+        + "&endKey={endKey}" + "&startTime={startTime}" + "&endTime={endTime}"
+        + "&maxResponseSize={maxResponseSize}";
+    // default values
+    String startTime = "-1";
+    String endTime = "-1";
+    String maxResponseSize = "6";
+    @SuppressWarnings("rawtypes")
+    Map map = new HashMap();
+    map.put("startKey", startKey);
+    map.put("endKey", "endKey");
+    map.put("startTime", startTime);
+    map.put("endTime", endTime);
+    map.put("maxResponseSize", maxResponseSize);
+
+    RestTemplate template = new RestTemplate();
+
+    // set headers and entity to send
+    HttpHeaders headers = new HttpHeaders();
+    headers.set("Accept", MediaType.APPLICATION_OCTET_STREAM_VALUE);
+    HttpEntity<Object> requestEntity = new HttpEntity<Object>(headers);
+
+    // 1.
+    ResponseEntity<byte[]> response1 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            startKey, endKey, startTime, endTime, maxResponseSize, response1);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 2. with time range
+    startTime = System.getProperty("startTime", "-1");
+    endTime = System.getProperty("endTime", "-1");
+    map.put("startTime", startTime);
+    map.put("endTime", endTime);
+    ResponseEntity<byte[]> response2 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            startKey, endKey, startTime, endTime, maxResponseSize, response2);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+    // 3. with maxResponseSize
+    maxResponseSize = System.getProperty("maxResponseSize", "6");
+    map.put("maxResponseSize", maxResponseSize);
+    ResponseEntity<byte[]> response3 = template.exchange(url, HttpMethod.GET,
+        requestEntity, byte[].class, map);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out
+        .format(
+            "getPcapsByKeysRange : request= <startKey=%s; endKey=%s; startTime=%s; endTime=%s; maxResponseSize=%s> \n response= %s \n",
+            startKey, endKey, startTime, endTime, maxResponseSize, response3);
+    System.out
+        .println("----------------------------------------------------------------------------------------------------");
+    System.out.println();
+
+  }
+
+  /**
+   * The main method.
+   * 
+   * @param args
+   *          the arguments
+   */
+  public static void main(String[] args) {
+
+    /*
+     * Run this program with system properties
+     * 
+     * -DhostName=mon.hw.com:8090
+     * -Dkeys=18800006-1800000b-06-0019-b39d,18800006-
+     * 1800000b-06-0050-5af6-64840-40785
+     * -DstartKey=18000002-18800002-06-0436-0019-2440-34545
+     * -DendKey=18000002-18800002-06-b773-0019-2840-34585
+     */
+
+    hostName = System.getProperty("hostName");
+
+    String keys = System.getProperty("keys");
+
+    String statyKey = System.getProperty("startKey");
+    String endKey = System.getProperty("endKey");
+
+    getPcapsByKeys(keys);
+    getPcapsByKeysRange(statyKey, endKey);
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/rest/JettyServiceRunner.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/rest/JettyServiceRunner.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/rest/JettyServiceRunner.java
new file mode 100644
index 0000000..1fdb025
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/rest/JettyServiceRunner.java
@@ -0,0 +1,26 @@
+package com.opensoc.pcapservice.rest;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.ws.rs.core.Application;
+
+import com.opensoc.pcapservice.PcapReceiverImplRestEasy;
+
+public class JettyServiceRunner extends Application  {
+	
+
+	private static Set services = new HashSet(); 
+		
+	public  JettyServiceRunner() {     
+		// initialize restful services   
+		services.add(new PcapReceiverImplRestEasy());  
+	}
+	@Override
+	public  Set getSingletons() {
+		return services;
+	}  
+	public  static Set getServices() {  
+		return services;
+	} 
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/rest/PcapService.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/rest/PcapService.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/rest/PcapService.java
new file mode 100644
index 0000000..5f47ead
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/com/opensoc/pcapservice/rest/PcapService.java
@@ -0,0 +1,34 @@
+package com.opensoc.pcapservice.rest;
+
+import java.io.IOException;
+
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.jboss.resteasy.plugins.server.servlet.HttpServletDispatcher;
+
+import com.opensoc.helpers.services.PcapServiceCli;
+
+
+public class PcapService {
+
+	public static void main(String[] args) throws IOException {
+
+		PcapServiceCli cli = new PcapServiceCli(args);
+		cli.parse();
+		
+		Server server = new Server(cli.getPort());
+		ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
+		context.setContextPath("/");
+		ServletHolder h = new ServletHolder(new HttpServletDispatcher());
+		h.setInitParameter("javax.ws.rs.Application", "com.opensoc.pcapservice.rest.JettyServiceRunner");
+		context.addServlet(h, "/*");
+		server.setHandler(context);
+		try {
+			server.start();
+			server.join();
+		} catch (Exception e) {
+			e.printStackTrace();
+		}
+	}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OnlyDeleteExpiredFilesCompactionPolicy.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OnlyDeleteExpiredFilesCompactionPolicy.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OnlyDeleteExpiredFilesCompactionPolicy.java
new file mode 100644
index 0000000..6b17410
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/OnlyDeleteExpiredFilesCompactionPolicy.java
@@ -0,0 +1,37 @@
+package org.apache.hadoop.hbase.regionserver.compactions;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.regionserver.compactions.RatioBasedCompactionPolicy;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.regionserver.StoreConfigInformation;
+import org.apache.hadoop.hbase.regionserver.StoreFile;
+
+public class OnlyDeleteExpiredFilesCompactionPolicy extends RatioBasedCompactionPolicy {
+  private static final Log LOG = LogFactory.getLog(OnlyDeleteExpiredFilesCompactionPolicy.class);
+
+  /**
+   * Constructor.
+   * 
+   * @param conf
+   *          The Conf.
+   * @param storeConfigInfo
+   *          Info about the store.
+   */
+  public OnlyDeleteExpiredFilesCompactionPolicy(final Configuration conf, final StoreConfigInformation storeConfigInfo) {
+    super(conf, storeConfigInfo);
+  }
+
+  @Override
+  final ArrayList<StoreFile> applyCompactionPolicy(final ArrayList<StoreFile> candidates, final boolean mayUseOffPeak,
+      final boolean mayBeStuck) throws IOException {
+    LOG.info("Sending empty list for compaction to avoid compaction and do only deletes of files older than TTL");
+
+    return new ArrayList<StoreFile>();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/config-definition-hbase.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/config-definition-hbase.xml b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/config-definition-hbase.xml
new file mode 100644
index 0000000..efe05e8
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/config-definition-hbase.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="ISO-8859-1" ?>
+
+<configuration>
+	<header>
+		<result delimiterParsingDisabled="true" forceReloadCheck="true"></result>
+		<lookups>
+      		<lookup config-prefix="expr"
+              	config-class="org.apache.commons.configuration.interpol.ExprLookup">
+        		<variables>
+          			<variable name="System" value="Class:java.lang.System"/>
+          			<variable name="net" value="Class:java.net.InetAddress"/>
+          			<variable name="String" value="Class:org.apache.commons.lang.StringUtils"/>
+        		</variables>
+      		</lookup>
+    	</lookups>
+	</header>
+	<override>
+		<!-- 1. properties from 'hbae-config.properties' are loaded first; 
+				if a property is not present in this file, then it will search in the files in the order they are defined here.
+		     2. 'refreshDelay' indicates the minimum delay in milliseconds between checks to see if the underlying file is changed.
+		     3. 'config-optional' indicates this file is not required --> 
+		
+		<properties fileName="${expr:System.getProperty('configPath')+'/hbase-config.properties'}"  config-optional="true">
+			<reloadingStrategy refreshDelay="${expr:System.getProperty('configRefreshDelay')}"
+	      config-class="org.apache.commons.configuration.reloading.FileChangedReloadingStrategy"/>
+	     </properties>
+		
+		<properties fileName="hbase-config-default.properties" config-optional="true">
+<!-- 					<reloadingStrategy refreshDelay="${expr:System.getProperty('defaultConfigRefreshDelay')}"
+	      config-class="org.apache.commons.configuration.reloading.FileChangedReloadingStrategy"/>
+ -->	     </properties>
+		
+	</override>
+</configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/hbase-config-default.properties
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/hbase-config-default.properties b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/hbase-config-default.properties
new file mode 100644
index 0000000..4ee56b6
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/hbase-config-default.properties
@@ -0,0 +1,40 @@
+#hbase zoo keeper configuration
+hbase.zookeeper.quorum=zkpr1,zkpr2,zkpr3
+hbase.zookeeper.clientPort=2181
+hbase.client.retries.number=1
+zookeeper.session.timeout=60000
+zookeeper.recovery.retry=0
+
+#hbase table configuration
+hbase.table.name=pcap
+hbase.table.column.family=t
+hbase.table.column.qualifier=pcap
+hbase.table.column.maxVersions=5
+
+# scan size limit configuration in MB or KB; if the input is negative or greater than max value throw an error.
+hbase.scan.result.size.unit=MB
+hbase.scan.default.result.size=6
+hbase.scan.max.result.size=60
+
+# time stamp conversion configuration; possible values 'SECONDS'(seconds), 'MILLIS'(milli seconds), 'MICROS' (micro seconds)
+hbase.table.data.time.unit=MILLIS
+
+#number of retries in case of ZooKeeper or HBase server down
+hbase.hconnection.retries.number=3
+
+#configuration for including pcaps in the reverse traffic
+pcaps.include.reverse.traffic = false
+
+#maximum table row size in KB or MB 
+hbase.table.row.size.unit = KB
+hbase.table.max.row.size = 70
+
+# tokens of row key configuration
+hbase.table.row.key.tokens=7
+rest.api.input.key.min.tokens=5
+
+# whether or not to include the last row from the previous request, applicable for only partial response scenario
+hbase.table.scan.include.duplicate.lastrow= true;
+
+#number of digits for appending tokens of the row key
+hbase.table.row.key.token.appending.digits=5

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/hbase-site.xml
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/hbase-site.xml b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/hbase-site.xml
new file mode 100644
index 0000000..5c3c819
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/hbase-site.xml
@@ -0,0 +1,127 @@
+<!--Tue Apr  1 18:16:39 2014-->
+  <configuration>
+    <property>
+    <name>hbase.tmp.dir</name>
+    <value>/disk/h/hbase</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.chunkpool.maxsize</name>
+    <value>0.5</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.codecs</name>
+    <value>lzo,gz,snappy</value>
+  </property>
+    <property>
+    <name>hbase.hstore.flush.retries.number</name>
+    <value>120</value>
+  </property>
+    <property>
+    <name>hbase.client.keyvalue.maxsize</name>
+    <value>10485760</value>
+  </property>
+    <property>
+    <name>hbase.rootdir</name>
+    <value>hdfs://nn1:8020/apps/hbase/data</value>
+  </property>
+    <property>
+    <name>hbase.defaults.for.version.skip</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.client.scanner.caching</name>
+    <value>100</value>
+  </property>
+    <property>
+    <name>hbase.superuser</name>
+    <value>hbase</value>
+  </property>
+    <property>
+    <name>hfile.block.cache.size</name>
+    <value>0.40</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.checksum.verify</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.mslab.enabled</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.max.filesize</name>
+    <value>107374182400</value>
+  </property>
+    <property>
+    <name>hbase.cluster.distributed</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>zookeeper.session.timeout</name>
+    <value>30000</value>
+  </property>
+    <property>
+    <name>zookeeper.znode.parent</name>
+    <value>/hbase-unsecure</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.global.memstore.lowerLimit</name>
+    <value>0.38</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.handler.count</name>
+    <value>240</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.mslab.chunksize</name>
+    <value>8388608</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.quorum</name>
+    <value>zkpr1,zkpr2,zkpr3</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.useMulti</name>
+    <value>true</value>
+  </property>
+    <property>
+    <name>hbase.hregion.majorcompaction</name>
+    <value>86400000</value>
+  </property>
+    <property>
+    <name>hbase.hstore.blockingStoreFiles</name>
+    <value>200</value>
+  </property>
+    <property>
+    <name>hbase.zookeeper.property.clientPort</name>
+    <value>2181</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.flush.size</name>
+    <value>134217728</value>
+  </property>
+    <property>
+    <name>hbase.security.authorization</name>
+    <value>false</value>
+  </property>
+    <property>
+    <name>hbase.regionserver.global.memstore.upperLimit</name>
+    <value>0.4</value>
+  </property>
+    <property>
+    <name>hbase.hstore.compactionThreshold</name>
+    <value>4</value>
+  </property>
+    <property>
+    <name>hbase.hregion.memstore.block.multiplier</name>
+    <value>8</value>
+  </property>
+    <property>
+    <name>hbase.security.authentication</name>
+    <value>simple</value>
+  </property>
+    <property>
+    <name>dfs.client.read.shortcircuit</name>
+    <value>true</value>
+  </property>
+  </configuration>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/log4j.properties b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/log4j.properties
new file mode 100644
index 0000000..0b6ca10
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/main/resources/log4j.properties
@@ -0,0 +1,21 @@
+# Root logger option
+log4j.rootLogger=TRACE,file,stdout
+
+# Direct log messages to a log file
+log4j.appender.file=org.apache.log4j.RollingFileAppender
+log4j.appender.file.File=/var/log/hbase/cisco-hbase.log
+log4j.appender.file.MaxFileSize=1MB
+log4j.appender.file.MaxBackupIndex=1
+log4j.appender.file.layout=org.apache.log4j.PatternLayout
+log4j.appender.file.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
+
+
+# Direct log messages to console
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
+
+log4j.logger.backtype.storm=DEBUG
+log4j.logger.clojure.tools=DEBUG
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/CellTimestampComparatorTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/CellTimestampComparatorTest.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/CellTimestampComparatorTest.java
new file mode 100644
index 0000000..c2a4bf2
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/CellTimestampComparatorTest.java
@@ -0,0 +1,92 @@
+package com.opensoc.pcapservice;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.hbase.Cell;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+import com.opensoc.pcapservice.CellTimestampComparator;
+
+/**
+ * The Class CellTimestampComparatorTest.
+ */
+public class CellTimestampComparatorTest {
+
+  /**
+   * Sets the up.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @Before
+  public void setUp() throws Exception {
+  }
+
+  /**
+   * Tear down.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  /**
+   * Test_less.
+   */
+  @Test
+  public void test_less() {
+    // mocking
+    Cell cell1 = Mockito.mock(Cell.class);
+    Mockito.when(cell1.getTimestamp()).thenReturn(13945345808L);
+    Cell cell2 = Mockito.mock(Cell.class);
+    Mockito.when(cell2.getTimestamp()).thenReturn(13845345808L);
+
+    CellTimestampComparator comparator = new CellTimestampComparator();
+
+    // actual call and verify
+    Assert.assertTrue(comparator.compare(cell2, cell1) == -1);
+
+  }
+
+  /**
+   * Test_greater.
+   */
+  @Test
+  public void test_greater() {
+    // mocking
+    Cell cell1 = Mockito.mock(Cell.class);
+    Mockito.when(cell1.getTimestamp()).thenReturn(13745345808L);
+    Cell cell2 = Mockito.mock(Cell.class);
+    Mockito.when(cell2.getTimestamp()).thenReturn(13945345808L);
+
+    CellTimestampComparator comparator = new CellTimestampComparator();
+
+    // actual call and verify
+    Assert.assertTrue(comparator.compare(cell2, cell1) == 1);
+
+  }
+
+  /**
+   * Test_equal.
+   */
+  @Test
+  public void test_equal() {
+    // mocking
+    Cell cell1 = Mockito.mock(Cell.class);
+    Mockito.when(cell1.getTimestamp()).thenReturn(13945345808L);
+    Cell cell2 = Mockito.mock(Cell.class);
+    Mockito.when(cell2.getTimestamp()).thenReturn(13945345808L);
+
+    CellTimestampComparator comparator = new CellTimestampComparator();
+
+    // actual call and verify
+    Assert.assertTrue(comparator.compare(cell2, cell1) == 0);
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/ConfigurationUtilTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/ConfigurationUtilTest.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/ConfigurationUtilTest.java
new file mode 100644
index 0000000..7adf388
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/ConfigurationUtilTest.java
@@ -0,0 +1,50 @@
+package com.opensoc.pcapservice;
+
+import org.eclipse.jdt.internal.core.Assert;
+import org.junit.Test;
+
+import com.opensoc.pcapservice.ConfigurationUtil;
+import com.opensoc.pcapservice.ConfigurationUtil.SizeUnit;
+
+/**
+ * The Class ConfigurationUtilTest.
+ */
+public class ConfigurationUtilTest {
+
+  /**
+   * Test_get max allowable result size in bytes.
+   */
+  @Test
+  public void test_getMaxAllowableResultSizeInBytes() {
+    long result = ConfigurationUtil.getMaxResultSize();
+    Assert.isTrue(result == 62914560);
+  }
+
+  /**
+   * Test_get max allowable results size unit.
+   */
+  @Test
+  public void test_getMaxAllowableResultsSizeUnit() {
+    SizeUnit result = ConfigurationUtil.getResultSizeUnit();
+    Assert.isTrue(SizeUnit.MB == result);
+  }
+
+  /**
+   * Test_get max row size in bytes.
+   */
+  @Test
+  public void test_getMaxRowSizeInBytes() {
+    long result = ConfigurationUtil.getMaxRowSize();
+    Assert.isTrue(result == 71680);
+  }
+
+  /**
+   * Test_get max row size unit.
+   */
+  @Test
+  public void test_getMaxRowSizeUnit() {
+    SizeUnit result = ConfigurationUtil.getRowSizeUnit();
+    Assert.isTrue(SizeUnit.KB == result);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/HBaseConfigurationUtilTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/HBaseConfigurationUtilTest.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/HBaseConfigurationUtilTest.java
new file mode 100644
index 0000000..91f87a9
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/HBaseConfigurationUtilTest.java
@@ -0,0 +1,52 @@
+package com.opensoc.pcapservice;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.springframework.util.Assert;
+
+import com.opensoc.pcapservice.HBaseConfigurationUtil;
+
+/**
+ * The Class HBaseConfigurationUtilTest.
+ */
+public class HBaseConfigurationUtilTest {
+
+  /**
+   * Sets the up.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @Before
+  public void setUp() throws Exception {
+  }
+
+  /**
+   * Tear down.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  /**
+   * Test_read.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_read() throws IOException {
+    Configuration configuration = HBaseConfigurationUtil.read();
+    Assert.isTrue(configuration != null, "Configuration must not be null");
+    Assert.isTrue(configuration.get("hbase.client.retries.number").equals("1"),
+        "value must be equal");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/HBaseIntegrationTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/HBaseIntegrationTest.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/HBaseIntegrationTest.java
new file mode 100644
index 0000000..75f8121
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/HBaseIntegrationTest.java
@@ -0,0 +1,74 @@
+/**
+ * 
+ */
+package com.opensoc.pcapservice;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.util.Bytes;
+
+/**
+ * The Class HBaseIntegrationTest.
+ * 
+ * @author Sayi
+ */
+public class HBaseIntegrationTest {
+
+  /** The test util. */
+  private final HBaseTestingUtility testUtil = new HBaseTestingUtility();
+
+  /** The test table. */
+  private HTable testTable;
+
+  /**
+   * Inits the cluster.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  void initCluster() throws Exception {
+    // testUtil.getConfiguration().addResource("hbase-site-local.xml");
+    // testUtil.getConfiguration().reloadConfiguration();
+    // start mini hbase cluster
+    testUtil.startMiniCluster(1);
+    // create tables
+    createTable();
+
+  }
+
+  /**
+   * Creates the table.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private void createTable() throws IOException {
+    testTable = testUtil.createTable("test_pcaps_local", "cf");
+    System.out.println("after 'test_pcaps_local' table creation ");
+    // create put
+    Put put = new Put(Bytes.toBytes("1111")); // row key =1111
+    put.add(Bytes.toBytes("cf"), Bytes.toBytes("packet"),
+        Bytes.toBytes("aaaaaaaa"));
+    testTable.put(put);
+    System.out.println("after testTable.put(put)");
+
+  }
+
+  /**
+   * The main method.
+   * 
+   * @param args
+   *          the arguments
+   * @throws Exception
+   *           the exception
+   */
+  public static void main(String[] args) throws Exception {
+    // HBaseIntegrationTest test = new HBaseIntegrationTest();
+    // test.initCluster();
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapGetterHBaseImplTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapGetterHBaseImplTest.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapGetterHBaseImplTest.java
new file mode 100644
index 0000000..6e0ad9e
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapGetterHBaseImplTest.java
@@ -0,0 +1,536 @@
+package com.opensoc.pcapservice;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.collections.ListUtils;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Scan;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+import org.springframework.util.Assert;
+
+import com.opensoc.pcapservice.PcapGetterHBaseImpl;
+import com.opensoc.pcapservice.PcapsResponse;
+
+/**
+ * The Class PcapGetterHBaseImplTest.
+ */
+public class PcapGetterHBaseImplTest {
+
+  /**
+   * Sets the up.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @Before
+  public void setUp() throws Exception {
+  }
+
+  /**
+   * Tear down.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  /**
+   * Test_get pcaps_with list.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @SuppressWarnings("unchecked")
+  @Test
+  public void test_getPcaps_withList() throws IOException {
+    // mocking
+    String[] keys = { "0a07002b-0a078039-06-1e8b-0087",
+        "0a070025-0a07807a-06-aab8-c360" };
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    PcapGetterHBaseImpl spy = Mockito.spy(pcapGetter);
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+
+    // Mockito.doReturn(mockPcaps).when(spy).scanPcaps(Mockito.any(ArrayList.class),
+    // Mockito.any(HTable.class), Mockito.any(Scan.class),
+    // Mockito.any(byte[].class), Mockito.any(byte[].class));
+    //
+    //
+    // actual call
+    // PcapsResponse response = spy.getPcaps(Arrays.asList(keys));
+
+    // verify
+    // Assert.assertTrue(response.getResponseSize() == mockPcaps.get(0).length);
+  }
+
+  /**
+   * Test_get pcaps_with key.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @SuppressWarnings("unchecked")
+  @Test
+  public void test_getPcaps_withKey() throws IOException {
+    // mocking
+    String key = "0a07002b-0a078039-06-1e8b-0087";
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    PcapGetterHBaseImpl spy = Mockito.spy(pcapGetter);
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+
+    // //
+    // Mockito.doReturn(mockPcaps).when(spy).scanPcaps(Mockito.any(ArrayList.class),
+    // Mockito.any(HTable.class), Mockito.any(Scan.class),
+    // Mockito.any(byte[].class), Mockito.any(byte[].class));
+    //
+
+    // actual call
+    // PcapsResponse response = spy.getPcaps(key);
+
+    // verify
+    // Assert.assertTrue(response.getResponseSize() == mockPcaps.get(0).length);
+  }
+
+  /**
+   * Test_get pcaps_with key and timestamps.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @SuppressWarnings("unchecked")
+  @Test
+  public void test_getPcaps_withKeyAndTimestamps() throws IOException {
+    // mocking
+    String key = "0a07002b-0a078039-06-1e8b-0087";
+    long startTime = 1376782349234555L;
+    long endTime = 1396782349234555L;
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    PcapGetterHBaseImpl spy = Mockito.spy(pcapGetter);
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+
+    // Mockito.doReturn(mockPcaps).when(spy).scanPcaps(Mockito.any(ArrayList.class),
+    // Mockito.any(HTable.class), Mockito.any(Scan.class),
+    // Mockito.any(byte[].class), Mockito.any(byte[].class));
+
+    // actual call
+    // PcapsResponse response = spy.getPcaps(key, startTime, endTime, false);
+
+    // verify
+    // Assert.assertTrue(response.getResponseSize() == mockPcaps.get(0).length);
+  }
+
+  /**
+   * Test_get pcaps_with key_multiple pcaps.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @SuppressWarnings("unchecked")
+  @Test
+  public void test_getPcaps_withKey_multiplePcaps() throws IOException {
+    // mocking
+    String key = "0a07002b-0a078039-06-1e8b-0087";
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    PcapGetterHBaseImpl spy = Mockito.spy(pcapGetter);
+
+    List<byte[]> mockPcaps = new ArrayList<byte[]>();
+    mockPcaps.add(getTestPcapBytes());
+    mockPcaps.add(getTestPcapBytes());
+
+    /*
+     * Mockito.doReturn(mockPcaps).when(spy).scanPcaps(Mockito.any(ArrayList.class
+     * ), Mockito.any(HTable.class), Mockito.any(Scan.class),
+     * Mockito.any(byte[].class), Mockito.any(byte[].class));
+     */
+    // actual call
+    // PcapsResponse response = spy.getPcaps(key);
+
+    // verify
+    // Assert.assertNotNull(response);
+    // Assert.assertTrue(response.getResponseSize() > mockPcaps.get(0).length);
+  }
+
+  /**
+   * Gets the test pcap bytes.
+   * 
+   * @return the test pcap bytes
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  private byte[] getTestPcapBytes() throws IOException {
+    File fin = new File("src/test/resources/test-tcp-packet.pcap");
+    byte[] pcapBytes = FileUtils.readFileToByteArray(fin);
+    return pcapBytes;
+  }
+
+  /**
+   * Test_remove duplicates.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_removeDuplicates() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0019-caac");
+    keys.add("18800006-1800000b-06-0050-5af6");
+
+    List<String> deDupKeys = pcapGetter.removeDuplicateKeys(keys);
+    Assert.isTrue(deDupKeys.size() == 3);
+    List<String> testKeys = new ArrayList<String>();
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0019-caac");
+
+    ListUtils.isEqualList(deDupKeys, testKeys);
+  }
+
+  /**
+   * Test_sort keys by asc order_with out reverse traffic.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_sortKeysByAscOrder_withOutReverseTraffic()
+      throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-06-0019-caac");
+
+    List<String> result = pcapGetter.sortKeysByAscOrder(keys, false);
+
+    List<String> testKeys = new ArrayList<String>();
+    testKeys.add("18800006-1800000b-06-0019-caac");
+    testKeys.add("18800006-1800000b-06-0050-5af6");
+    testKeys.add("18800006-1800000b-11-0035-3810");
+
+    Assert.isTrue(ListUtils.isEqualList(result, testKeys));
+  }
+
+  /**
+   * Test_sort keys by asc order_with reverse traffic.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_sortKeysByAscOrder_withReverseTraffic() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+    keys.add("18800006-1800000b-11-0035-3812");
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-11-0035-3811");
+
+    List<String> result = pcapGetter.sortKeysByAscOrder(keys, true);
+    Assert.isTrue(result.size() == 6);
+  }
+
+  /**
+   * Test_sort keys by asc order_get unprocessed sublist of keys.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_sortKeysByAscOrder_getUnprocessedSublistOfKeys()
+      throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-06-0019-caac");
+    System.out.println("original keys =" + keys.toString());
+
+    List<String> sortedKeys = pcapGetter.sortKeysByAscOrder(keys, false);
+    System.out.println("after sortKeysByAscOrder =" + sortedKeys.toString());
+
+    List<String> unprocessedKeys1 = pcapGetter.getUnprocessedSublistOfKeys(
+        sortedKeys, "18800006-1800000b-06-0019-caac-65140-40815");
+    System.out.println("unprocessedKeys1 =" + unprocessedKeys1);
+    Assert.isTrue(unprocessedKeys1.size() == 2);
+
+    List<String> unprocessedKeys2 = pcapGetter.getUnprocessedSublistOfKeys(
+        sortedKeys, "18800006-1800000b-06-0050-5af6-65140-40815");
+    // System.out.println("unprocessedKeys2 ="+unprocessedKeys2);
+    Assert.isTrue(unprocessedKeys2.size() == 1);
+
+    List<String> unprocessedKeys3 = pcapGetter.getUnprocessedSublistOfKeys(
+        sortedKeys, "18800006-1800000b-11-0035-3810-6514040815");
+    // System.out.println("unprocessedKeys3 ="+unprocessedKeys3);
+    Assert.isTrue(unprocessedKeys3.size() == 0);
+
+  }
+
+  /**
+   * Test_sort keys by asc order_get unprocessed sublist of keys_with out match.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_sortKeysByAscOrder_getUnprocessedSublistOfKeys_withOutMatch()
+      throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-06-0019-caac");
+    System.out.println("original keys =" + keys.toString());
+
+    List<String> sortedKeys = pcapGetter.sortKeysByAscOrder(keys, false);
+    System.out.println("after sortKeysByAscOrder =" + sortedKeys.toString());
+
+    List<String> unprocessedKeys1 = pcapGetter.getUnprocessedSublistOfKeys(
+        sortedKeys, "18800006-1800000b-11-89-455-65140-40815");
+    System.out.println("unprocessedKeys1 =" + unprocessedKeys1);
+    Assert.isTrue(unprocessedKeys1.size() == 3);
+  }
+
+  /**
+   * Test_create start and stop row keys.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createStartAndStopRowKeys() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String key = "18800006-1800000b-11-0035-3810";
+    Map<String, String> map = pcapGetter.createStartAndStopRowKeys(key, false,
+        false);
+    System.out.println("map =" + map.toString());
+
+    String lastRowKey = "18800006-1800000b-11-0035-3810-23234-32423";
+    Map<String, String> map1 = pcapGetter.createStartAndStopRowKeys(
+        lastRowKey, true, false);
+    System.out.println("map1 =" + map1.toString());
+
+    String lastRowKey2 = "18800006-1800000b-11-0035-3810-23234-32423";
+    Map<String, String> map2 = pcapGetter.createStartAndStopRowKeys(
+        lastRowKey2, true, true);
+    System.out.println("map2 =" + map2.toString());
+
+  }
+
+  /**
+   * Test_check if valid input_valid.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_checkIfValidInput_valid() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    List<String> keys = new ArrayList<String>();
+    keys.add("18800006-1800000b-11-0035-3810");
+    keys.add("18800006-1800000b-06-0050-5af6");
+    keys.add("18800006-1800000b-06-0019-caac");
+
+    String lastRowKey = "18800006-1800000b-11-0035-3810-23234-32423";
+
+    boolean response = pcapGetter.checkIfValidInput(keys, lastRowKey);
+    Assert.isTrue(response);
+
+  }
+
+  /**
+   * Test_check if valid input_in valid.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_checkIfValidInput_inValid() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    @SuppressWarnings("unchecked")
+    boolean response = pcapGetter.checkIfValidInput(Collections.EMPTY_LIST,
+        null);
+    Assert.isTrue(!response);
+
+  }
+
+  /**
+   * Test_check if valid input_valid_mixed.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_checkIfValidInput_valid_mixed() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String lastRowKey = "18800006-1800000b-11-0035-3810-23234-32423";
+    @SuppressWarnings("unchecked")
+    boolean response = pcapGetter.checkIfValidInput(Collections.EMPTY_LIST,
+        lastRowKey);
+    Assert.isTrue(response);
+  }
+
+  /**
+   * Test_create get request.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createGetRequest() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String key = "18800006-1800000b-11-0035-3810-23234-324230";
+
+    long startTime = 139812323L; // in seconds
+    long endTime = 139923424L; // in seconds
+
+    Get get = pcapGetter.createGetRequest(key, startTime, endTime);
+    Assert.notNull(get);
+
+    Assert.isTrue(Arrays.equals(get.getRow(), key.getBytes()));
+    // compare in micros as the data creation time unit is set to Micros in
+    // properties file.
+    Assert.isTrue(get.getTimeRange().getMin() == startTime * 1000 );
+    Assert.isTrue(get.getTimeRange().getMax() == endTime * 1000 );
+  }
+
+  /**
+   * Test_create get request_default time range.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createGetRequest_defaultTimeRange() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String key = "18800006-1800000b-11-0035-3810-23234-324230";
+
+    Get get = pcapGetter.createGetRequest(key, -1, -1);
+    Assert.notNull(get);
+
+    Assert.isTrue(Arrays.equals(get.getRow(), key.getBytes()));
+    Assert.isTrue(get.getTimeRange().getMin() == 0);
+  }
+
+  /**
+   * Test_create get request_with start time.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createGetRequest_withStartTime() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String key = "18800006-1800000b-11-0035-3810-23234-324230";
+
+    long startTime = 139812323L; // in seconds
+
+    Get get = pcapGetter.createGetRequest(key, startTime, -1);
+    Assert.notNull(get);
+
+    Assert.isTrue(Arrays.equals(get.getRow(), key.getBytes()));
+    Assert.isTrue(get.getTimeRange().getMin() == startTime * 1000 );
+    Assert.isTrue(get.getTimeRange().getMax() == Long.valueOf(Long.MAX_VALUE));
+  }
+
+  /**
+   * Test_create get request_with end time.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createGetRequest_withEndTime() throws IOException {
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+    String key = "18800006-1800000b-11-0035-3810-23234-324230";
+
+    long endTime = 139923424L; // in seconds
+
+    Get get = pcapGetter.createGetRequest(key, -1, endTime);
+    Assert.notNull(get);
+
+    Assert.isTrue(Arrays.equals(get.getRow(), key.getBytes()));
+    Assert.isTrue(get.getTimeRange().getMin() == 0);
+    Assert.isTrue(get.getTimeRange().getMax() == endTime * 1000 );
+  }
+
+  /**
+   * Test_create scan request.
+   * 
+   * @throws IOException
+   *           Signals that an I/O exception has occurred.
+   */
+  @Test
+  public void test_createScanRequest() throws IOException {
+    // mocking
+    PcapGetterHBaseImpl pcapGetter = (PcapGetterHBaseImpl) PcapGetterHBaseImpl
+        .getInstance();
+
+    PcapsResponse pcapsResponse = new PcapsResponse();
+
+    Map<String, String> keysMap = new HashMap<String, String>();
+    String startKey = "0a07002b-0a078039-06-1e8b-0087-00000-00000";
+    String endKey = "0a070025-0a07807a-06-aab8-c360-99999-99999";
+    keysMap.put("startKey", startKey);
+    keysMap.put("endKey", endKey);
+
+    long startTime = 139812323L; // in seconds
+    long endTime = 139923424L; // in seconds
+    long maxResultSize = 673424;
+
+    // actual call
+    Scan scan = pcapGetter.createScanRequest(pcapsResponse, keysMap, startTime,
+        endTime, maxResultSize);
+
+    // verify time range
+    Assert.isTrue(scan.getTimeRange().getMin() == startTime * 1000 ); // compare
+                                                                            // in
+                                                                            // millis
+    Assert.isTrue(scan.getTimeRange().getMax() == endTime * 1000 ); // compare
+                                                                          // in
+                                                                          // millis
+
+    // verify start and stop rows
+    Assert.isTrue(Arrays.equals(scan.getStartRow(), startKey.getBytes()));
+    Assert.isTrue(Arrays.equals(scan.getStopRow(), endKey.getBytes()));
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a919cc19/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapHelperTest.java
----------------------------------------------------------------------
diff --git a/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapHelperTest.java b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapHelperTest.java
new file mode 100644
index 0000000..a1f6c04
--- /dev/null
+++ b/opensoc-streaming/OpenSOC-Pcap_Service/src/test/java/com/opensoc/pcapservice/PcapHelperTest.java
@@ -0,0 +1,321 @@
+/**
+ * 
+ */
+package com.opensoc.pcapservice;
+
+import java.util.Arrays;
+import java.util.List;
+
+import org.eclipse.jdt.internal.core.Assert;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
+
+import com.opensoc.pcapservice.PcapHelper;
+import com.opensoc.pcapservice.PcapHelper.TimeUnit;
+
+// TODO: Auto-generated Javadoc
+/**
+ * The Class PcapHelperTest.
+ * 
+ * @author Sayi
+ */
+@RunWith(PowerMockRunner.class)
+@PrepareForTest(PcapHelper.class)
+public class PcapHelperTest {
+
+  /**
+   * Sets the up.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @Before
+  public void setUp() throws Exception {
+    PowerMockito.spy(PcapHelper.class);
+  }
+
+  /**
+   * Tear down.
+   * 
+   * @throws Exception
+   *           the exception
+   */
+  @After
+  public void tearDown() throws Exception {
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in SECONDS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_seconds() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.SECONDS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222L; // input time in seconds
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222L == time);
+  }
+
+  /**
+   * Input time is in MILLIS and data creation time is in SECONDS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_millis_seconds() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.SECONDS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222333L; // input time in millis
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222L == time);
+  }
+
+  /**
+   * Input time is in MICROS and data creation time is in SECONDS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_micros_seconds() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.SECONDS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222333444L; // input time in micros
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MILLIS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_millis() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MILLIS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222L; // input time in seconds
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222000L == time);
+  }
+
+  /**
+   * Input time is in MILLIS and data creation time is in MILLIS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_millis_millis() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MILLIS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 111112222233L; // input time in millis
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(111112222233L == time);
+  }
+
+  /**
+   * Input time is in MICROS and data creation time is in MILLIS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_micros_millis() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MILLIS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 111112222233344L; // input time in micros
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(111112222233L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_micros() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222L; // input time in seconds
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222000000L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_micros_random() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 13388; // input time in seconds
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(13388000000L == time);
+  }
+
+  /**
+   * Input time is in MILLIS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_millis_micros() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 111112222233L; // input time in millis
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(111112222233000L == time);
+  }
+
+  /**
+   * Input time is in MICROS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_micros_micros() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1111122222334444L; // input time in micros
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1111122222334444L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_micros_0() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 0; // input time in micros
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(0 == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_micros_1() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = 1; // input time in micros
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(1000000L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_seconds_micros_decimal() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long inputTime = 13; // input time in seconds (double to long type casting)
+    long time = PcapHelper.convertSecondsToDataCreationTimeUnit(inputTime);
+
+    Assert.isTrue(13000000L == time);
+  }
+
+  /**
+   * Input time is in SECONDS and data creation time is in MICROS.
+   */
+  @Test
+  public void test_convertToDataCreationTimeUnit_() {
+    PowerMockito.when(PcapHelper.getDataCreationTimeUnit()).thenReturn(
+        TimeUnit.MICROS);
+    PowerMockito.verifyNoMoreInteractions();
+
+    long endTime = (long) 111.333; // input time in seconds (double to long type
+                                   // casting)
+    long time = PcapHelper.convertToDataCreationTimeUnit(endTime);
+
+    Assert.isTrue(111000000L == time);
+  }
+
+  /**
+   * Test_get data creation time unit.
+   */
+  @Test
+  public void test_getDataCreationTimeUnit() {
+    TimeUnit dataCreationTimeUnit = PcapHelper.getDataCreationTimeUnit();
+    Assert.isTrue(TimeUnit.MILLIS == dataCreationTimeUnit);
+  }
+
+  /**
+   * Test_reverse key_valid.
+   */
+  @Test
+  public void test_reverseKey_valid() {
+    String key = "162.242.152.24-162.242.153.12-TCP-38190-9092";
+    String reversekey = PcapHelper.reverseKey(key);
+    Assert.isTrue("162.242.153.12-162.242.152.24-TCP-9092-38190"
+        .equals(reversekey));
+  }
+
+  /**
+   * Test_reverse key_valid_with fragment.
+   */
+  @Test
+  public void test_reverseKey_valid_withFragment() {
+    String key = "162.242.152.24-162.242.153.12-TCP-38190-9092-fragmentId";
+    String reversekey = PcapHelper.reverseKey(key);
+    Assert.isTrue("162.242.153.12-162.242.152.24-TCP-9092-38190"
+        .equals(reversekey));
+  }
+
+  /**
+   * Test_reverse key_in valid.
+   */
+  @Test
+  public void test_reverseKey_inValid() {
+    String key = "162.242.152.24-162.242.153.12-TCP-38190-9092-ipId-fragmentId-extra";
+    String reversekey = PcapHelper.reverseKey(key);
+    Assert.isTrue("".equals(reversekey));
+  }
+
+  /**
+   * Test_reverse key_as list.
+   */
+  @Test
+  public void test_reverseKey_asList() {
+    String[] keys = {
+        "162.242.152.24-162.242.153.12-TCP-38190-9092-fragmentId",
+        "162.242.152.24-162.242.153.12-UDP-38190-9092" };
+
+    List<String> reverseKeys = PcapHelper.reverseKey(Arrays.asList(keys));
+
+    Assert.isTrue("162.242.153.12-162.242.152.24-TCP-9092-38190"
+        .equals(reverseKeys.get(0)));
+    Assert.isTrue("162.242.153.12-162.242.152.24-UDP-9092-38190"
+        .equals(reverseKeys.get(1)));
+  }
+
+}