You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@metron.apache.org by ce...@apache.org on 2017/02/06 20:17:29 UTC

[01/17] incubator-metron git commit: METRON-676 Create Zeppelin Notebook for YAF Telemetry (nickwallen) closes apache/incubator-metron#427

Repository: incubator-metron
Updated Branches:
  refs/heads/Metron_0.3.1 f3ca3c05e -> e4d54a278


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/cc29dcab/metron-platform/metron-indexing/src/main/config/zeppelin/metron/metron-yaf-telemetry.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-indexing/src/main/config/zeppelin/metron/metron-yaf-telemetry.json b/metron-platform/metron-indexing/src/main/config/zeppelin/metron/metron-yaf-telemetry.json
new file mode 100644
index 0000000..befdce8
--- /dev/null
+++ b/metron-platform/metron-indexing/src/main/config/zeppelin/metron/metron-yaf-telemetry.json
@@ -0,0 +1 @@
+\ufeff{"paragraphs":[{"text":"%spark.sql\n\n#\n# load the flow telemetry that has been archived by Metron\n#\ncreate temporary table yaf\n  using org.apache.spark.sql.json\n  options (path \"hdfs:///apps/metron/indexing/indexed/yaf\")","dateUpdated":"2017-01-24T21:55:59+0000","config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"tableHide":false,"editorHide":false,"editorMode":"ace/mode/sql"},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1485189410459_-412168737","id":"20170123-163650_844861236","result":{"code":"SUCCESS","type":"TEXT","msg":""},"dateCreated":"2017-01-23T04:36:50+0000","dateStarted":"2017-01-24T21:55:59+0000","dateFinished":"2017-01-24T21:56:18+0000","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:5233","focus":true},{"text":"%pyspark\n\nfrom pyspark.sql.types import BooleanType\nfrom pyspark.sql.functions import udf\nimport re\n\n#\n# define whic
 h networks are internal versus external.  for simplicity, we simply \n# call anything that is a private network as internal and everything else external.\n#\n# in a production environment distinguishing internal from external is likely to be\n# more complex and should be handled by a message enrichment. in that case each \n# message would contain a field indicating if it is internal or external.\n#\nregexes = [\n    \"192\\.168\\.[0-9]+\\.[0-9]+\",     # 192.168.0.0/16\n    \"10\\.[0-9]+\\.[0-9]+\\.[0-9]+\"    # 10.0.0.0/8\n]\n\n#\n# define a UDF that can distinguish between internal and external networks\n#\ndef is_internal(ip): \n    matches = [re.match(r, ip) is not None for r in regexes]\n    return any(matches)\n\nsqlContext.udf.register(\"is_internal\", is_internal, BooleanType())","dateUpdated":"2017-01-24T21:55:59+0000","config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"
 ace/mode/python","editorHide":false},"settings":{"params":{"Internal":"192\\.168\\.[0-9]+\\.[0-9]+","fruit":["apple"],"Internal Network":["192\\.168\\.[0-9]+\\.[0-9]+","10\\.[0-9]+\\.[0-9]+\\.[0-9]+"],"Internal Networks":["192\\.168\\.[0-9]+\\.[0-9]+","10\\.[0-9]+\\.[0-9]+\\.[0-9]+"]},"forms":{}},"jobName":"paragraph_1485273973163_1353447337","id":"20170124-160613_699672885","result":{"code":"SUCCESS","type":"TEXT","msg":""},"dateCreated":"2017-01-24T04:06:13+0000","dateStarted":"2017-01-24T21:56:00+0000","dateFinished":"2017-01-24T21:56:00+0000","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:5234","focus":true},{"text":"","dateUpdated":"2017-01-24T21:55:59+0000","config":{"colWidth":12,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[{"name":"ip_src_addr","index":0,"aggr":"sum"}],"values":[{"name":"_c1","index":1,"aggr":"sum"}],"groups":[],"scatter":{"xAxis":{"name":"ip_src_addr","index":0,"aggr":"sum"},"yAxis":{"name":"_c1","index":1,"aggr":"
 sum"}}},"enabled":true,"editorMode":"ace/mode/scala","editorHide":true,"tableHide":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1485276598605_-131372598","id":"20170124-164958_1912250786","dateCreated":"2017-01-24T04:49:58+0000","dateStarted":"2017-01-24T21:56:00+0000","dateFinished":"2017-01-24T21:56:18+0000","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:5235","result":{"code":"SUCCESS","type":"TEXT","msg":""},"focus":true},{"text":"%md\n\n### Top Talkers - Internal\n\nThe most active talkers on the internal network.  Includes the total number of packets and the total duration of all flows in seconds.\n","dateUpdated":"2017-01-24T21:55:59+0000","config":{"colWidth":3,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/markdown","editorHide":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1485277162415_510785352","id":"20170124-165922_10
 08107661","result":{"code":"SUCCESS","type":"HTML","msg":"<h3>Top Talkers - Internal</h3>\n<p>The most active talkers on the internal network.  Includes the total number of packets and the total duration of all flows in seconds.</p>\n"},"dateCreated":"2017-01-24T04:59:22+0000","dateStarted":"2017-01-24T21:56:00+0000","dateFinished":"2017-01-24T21:56:00+0000","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:5236","focus":true},{"text":"%spark.sql\n\nselect ip, \n    sum(pkts) as pkts, \n    sum(duration) as duration\nfrom (\n    select ip_dst_addr as ip,\n        pkt + rpkt as pkts,\n        duration\n    from yaf\n    where datediff(current_timestamp(), from_unixtime(timestamp/1000)) <= 7\n    union all\n    select ip_src_addr as ip,\n        pkt + rpkt as pkts,\n        duration\n    from yaf\n    where datediff(current_timestamp(), from_unixtime(timestamp/1000)) <= 7\n) ips\nwhere is_internal(ip) = true\ngroup by ip\norder by pkts desc\nlimit 10","dateUpdated
 ":"2017-01-24T22:01:05+0000","config":{"colWidth":7,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[{"name":"ip","index":0,"aggr":"sum"}],"values":[{"name":"pkts","index":1,"aggr":"sum"}],"groups":[],"scatter":{"xAxis":{"name":"ip","index":0,"aggr":"sum"}}},"enabled":true,"editorMode":"ace/mode/scala","editorHide":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1485279772944_-1105609363","id":"20170124-174252_731586468","result":{"code":"SUCCESS","type":"TABLE","msg":"ip\tpkts\tduration\n192.168.138.158\t366649\t58558.270999999986\n192.168.66.1\t249636\t14023.284999999989\n192.168.66.121\t247281\t14023.284999999989\n192.168.138.2\t8910\t39098.352\n","comment":"","msgTable":[[{"key":"pkts","value":"192.168.138.158"},{"key":"pkts","value":"366649"},{"key":"pkts","value":"58558.270999999986"}],[{"key":"duration","value":"192.168.66.1"},{"key":"duration","value":"249636"},{"key":"duration","value":"14023.284999999989"}],[{"value":"192.168.66.121"},{"value"
 :"247281"},{"value":"14023.284999999989"}],[{"value":"192.168.138.2"},{"value":"8910"},{"value":"39098.352"}]],"columnNames":[{"name":"ip","index":0,"aggr":"sum"},{"name":"pkts","index":1,"aggr":"sum"},{"name":"duration","index":2,"aggr":"sum"}],"rows":[["192.168.138.158","366649","58558.270999999986"],["192.168.66.1","249636","14023.284999999989"],["192.168.66.121","247281","14023.284999999989"],["192.168.138.2","8910","39098.352"]]},"dateCreated":"2017-01-24T17:42:52+0000","dateStarted":"2017-01-24T21:56:18+0000","dateFinished":"2017-01-24T21:56:57+0000","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:5237","focus":true},{"text":"%md\n\n### Top Talkers - External\n\nThe most active external talkers.  Includes the total number of packets, total duration of all flows in seconds, and the location of the talker, if one is known.","dateUpdated":"2017-01-24T21:56:00+0000","config":{"colWidth":3,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"val
 ues":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/markdown","editorHide":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1485277133105_1967013966","id":"20170124-165853_1925159527","result":{"code":"SUCCESS","type":"HTML","msg":"<h3>Top Talkers - External</h3>\n<p>The most active external talkers.  Includes the total number of packets, total duration of all flows in seconds, and the location of the talker, if one is known.</p>\n"},"dateCreated":"2017-01-24T04:58:53+0000","dateStarted":"2017-01-24T21:56:00+0000","dateFinished":"2017-01-24T21:56:00+0000","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:5238","focus":true},{"text":"%spark.sql\n\nselect ip, \n    sum(pkts) as pkts,\n    sum(duration) as duration,\n    country, \n    city\nfrom (\n    select ip_dst_addr as ip,\n        `enrichments.geo.ip_dst_addr.country` as country,\n        `enrichments.geo.ip_dst_addr.city` as city,\n        pkt + rpkt as pkts,\n        durat
 ion\n    from yaf\n    where datediff(current_timestamp(), from_unixtime(timestamp/1000)) <= 7\n    union all\n    select ip_src_addr as ip,\n        `enrichments.geo.ip_src_addr.country` as country,\n        `enrichments.geo.ip_src_addr.city` as city,\n        pkt + rpkt as pkts,\n        duration\n    from yaf\n    where datediff(current_timestamp(), from_unixtime(timestamp/1000)) <= 7\n) ips\nwhere is_internal(ip) = false\ngroup by ip, country, city\norder by pkts desc\nlimit 10","dateUpdated":"2017-01-24T22:01:10+0000","config":{"colWidth":7,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[{"name":"ip","index":0,"aggr":"sum"}],"values":[{"name":"pkts","index":1,"aggr":"sum"}],"groups":[],"scatter":{"xAxis":{"name":"ip","index":0,"aggr":"sum"},"yAxis":{"name":"pkts","index":1,"aggr":"sum"}}},"enabled":true,"editorMode":"ace/mode/sql","editorHide":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1485208156114_1819414426","id":"20170123-214916_1678988",
 "result":{"code":"SUCCESS","type":"TABLE","msg":"ip\tpkts\tduration\tcountry\tcity\n62.75.195.236\t241668\t3742.253999999998\tFR\tStrassbourg\n95.163.121.204\t48039\t14688.73099999999\tRU\tnull\n72.34.49.86\t42570\t354.7450000000003\tUS\tLos Angeles\n204.152.254.221\t21588\t54.17200000000001\tUS\tPhoenix\n188.165.164.184\t3874\t620.0169999999997\tFR\tnull\n224.0.0.251\t2355\t0.0\tnull\tnull\n","comment":"","msgTable":[[{"key":"pkts","value":"62.75.195.236"},{"key":"pkts","value":"241668"},{"key":"pkts","value":"3742.253999999998"},{"key":"pkts","value":"FR"},{"key":"pkts","value":"Strassbourg"}],[{"key":"duration","value":"95.163.121.204"},{"key":"duration","value":"48039"},{"key":"duration","value":"14688.73099999999"},{"key":"duration","value":"RU"},{"key":"duration","value":"null"}],[{"key":"country","value":"72.34.49.86"},{"key":"country","value":"42570"},{"key":"country","value":"354.7450000000003"},{"key":"country","value":"US"},{"key":"country","value":"Los Angeles"}],[{"key"
 :"city","value":"204.152.254.221"},{"key":"city","value":"21588"},{"key":"city","value":"54.17200000000001"},{"key":"city","value":"US"},{"key":"city","value":"Phoenix"}],[{"value":"188.165.164.184"},{"value":"3874"},{"value":"620.0169999999997"},{"value":"FR"},{"value":"null"}],[{"value":"224.0.0.251"},{"value":"2355"},{"value":"0.0"},{"value":"null"},{"value":"null"}]],"columnNames":[{"name":"ip","index":0,"aggr":"sum"},{"name":"pkts","index":1,"aggr":"sum"},{"name":"duration","index":2,"aggr":"sum"},{"name":"country","index":3,"aggr":"sum"},{"name":"city","index":4,"aggr":"sum"}],"rows":[["62.75.195.236","241668","3742.253999999998","FR","Strassbourg"],["95.163.121.204","48039","14688.73099999999","RU","null"],["72.34.49.86","42570","354.7450000000003","US","Los Angeles"],["204.152.254.221","21588","54.17200000000001","US","Phoenix"],["188.165.164.184","3874","620.0169999999997","FR","null"],["224.0.0.251","2355","0.0","null","null"]]},"dateCreated":"2017-01-23T09:49:16+0000","da
 teStarted":"2017-01-24T21:56:19+0000","dateFinished":"2017-01-24T21:57:26+0000","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:5239","focus":true},{"text":"%md\n\n### Flows by Hour\n\nThe total number of flows by hour over the past 7 days.","dateUpdated":"2017-01-24T21:56:00+0000","config":{"colWidth":3,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/markdown","editorHide":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1485280832044_1372278057","id":"20170124-180032_562068398","result":{"code":"SUCCESS","type":"HTML","msg":"<h3>Flows by Hour</h3>\n<p>The total number of flows by hour over the past 7 days.</p>\n"},"dateCreated":"2017-01-24T18:00:32+0000","dateStarted":"2017-01-24T21:56:00+0000","dateFinished":"2017-01-24T21:56:00+0000","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:5240","focus":true},{"text":"%spark.sql\n\nsele
 ct\n    from_unixtime(timestamp/1000,'YYYY-MM-dd HH:00') as time,\n    count(*) as Flows\nfrom yaf\nwhere datediff(current_timestamp(), from_unixtime(timestamp/1000)) <= 7\ngroup by from_unixtime(timestamp/1000,'YYYY-MM-dd HH:00')\norder by from_unixtime(timestamp/1000,'YYYY-MM-dd HH:00') asc\n\n","dateUpdated":"2017-01-24T22:01:22+0000","config":{"colWidth":7,"graph":{"mode":"multiBarChart","height":354,"optionOpen":false,"keys":[{"name":"time","index":0,"aggr":"sum"}],"values":[{"name":"Flows","index":1,"aggr":"sum"}],"groups":[],"scatter":{"xAxis":{"name":"time","index":0,"aggr":"sum"}}},"enabled":true,"editorMode":"ace/mode/sql","editorHide":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1485268882405_-490297046","id":"20170124-144122_1753419602","result":{"code":"SUCCESS","type":"TABLE","msg":"time\tFlows\n2017-01-18 15:00\t9047\n2017-01-18 16:00\t7810\n2017-01-18 17:00\t2186\n2017-01-18 18:00\t2296\n2017-01-18 19:00\t2858\n2017-01-18 20:00\t3330\n2017-01-18 21:
 00\t2645\n2017-01-18 22:00\t3789\n2017-01-18 23:00\t8946\n2017-01-19 00:00\t10171\n2017-01-19 01:00\t4517\n2017-01-19 02:00\t1226\n2017-01-19 03:00\t1965\n2017-01-19 04:00\t2204\n2017-01-19 05:00\t1512\n2017-01-19 06:00\t2339\n2017-01-19 07:00\t2616\n2017-01-19 08:00\t2262\n2017-01-19 09:00\t5522\n2017-01-19 10:00\t10184\n2017-01-19 11:00\t6177\n2017-01-19 12:00\t3681\n2017-01-19 13:00\t3474\n2017-01-19 14:00\t3030\n2017-01-19 15:00\t3473\n2017-01-19 16:00\t2464\n2017-01-19 17:00\t2346\n2017-01-19 18:00\t2210\n2017-01-19 19:00\t2915\n2017-01-19 20:00\t2118\n2017-01-19 21:00\t2339\n2017-01-19 22:00\t2552\n2017-01-19 23:00\t3066\n2017-01-20 00:00\t3701\n2017-01-20 01:00\t7096\n2017-01-20 02:00\t9895\n2017-01-20 03:00\t6328\n2017-01-20 04:00\t3046\n2017-01-20 05:00\t2313\n2017-01-20 06:00\t1840\n2017-01-20 07:00\t4521\n2017-01-20 08:00\t4902\n2017-01-20 09:00\t2420\n2017-01-20 10:00\t2263\n2017-01-20 11:00\t3382\n2017-01-20 12:00\t2578\n2017-01-20 13:00\t4877\n2017-01-20 14:00\t4156\n2
 017-01-20 15:00\t1801\n2017-01-20 16:00\t1527\n2017-01-20 17:00\t2794\n2017-01-20 18:00\t3250\n2017-01-20 19:00\t2260\n2017-01-20 20:00\t5759\n2017-01-20 21:00\t5484\n2017-01-20 22:00\t2311\n2017-01-20 23:00\t2054\n2017-01-21 00:00\t6476\n2017-01-21 01:00\t5626\n2017-01-21 02:00\t3072\n2017-01-21 03:00\t4916\n2017-01-21 04:00\t4792\n2017-01-21 05:00\t2332\n2017-01-21 06:00\t1929\n2017-01-21 07:00\t6699\n2017-01-21 08:00\t4796\n2017-01-21 09:00\t3711\n2017-01-21 10:00\t3613\n2017-01-21 11:00\t2192\n2017-01-21 12:00\t1750\n2017-01-21 13:00\t1527\n2017-01-21 14:00\t1895\n2017-01-21 15:00\t3016\n2017-01-21 16:00\t2907\n2017-01-21 17:00\t3346\n2017-01-21 18:00\t3203\n2017-01-21 19:00\t7215\n2017-01-21 20:00\t6487\n2017-01-21 21:00\t3242\n2017-01-21 22:00\t3505\n2017-01-21 23:00\t6100\n2017-01-22 00:00\t3771\n2017-01-22 01:00\t6288\n2017-01-22 02:00\t7762\n2017-01-22 03:00\t8890\n2017-01-22 04:00\t5806\n2017-01-22 05:00\t3207\n2017-01-22 06:00\t3402\n2017-01-22 07:00\t3750\n2017-01-22 08:
 00\t6234\n2017-01-22 09:00\t7227\n2017-01-22 10:00\t4109\n2017-01-22 11:00\t3072\n2017-01-22 12:00\t2491\n2017-01-22 13:00\t2777\n2017-01-22 14:00\t6014\n2017-01-22 15:00\t4681\n2017-01-22 16:00\t2839\n2017-01-22 17:00\t3069\n2017-01-22 18:00\t7324\n2017-01-22 19:00\t5977\n2017-01-22 20:00\t2197\n2017-01-22 21:00\t2257\n2017-01-22 22:00\t2063\n2017-01-22 23:00\t1464\n2017-01-23 00:00\t2735\n2017-01-23 01:00\t2772\n2017-01-23 02:00\t2537\n2017-01-23 03:00\t2445\n2017-01-23 04:00\t3639\n2017-01-23 05:00\t2898\n2017-01-23 06:00\t1944\n2017-01-23 07:00\t6869\n2017-01-23 08:00\t4857\n2017-01-23 09:00\t2034\n2017-01-23 10:00\t2571\n2017-01-23 11:00\t2042\n2017-01-23 12:00\t2212\n2017-01-23 13:00\t3304\n2017-01-23 14:00\t8219\n2017-01-23 15:00\t10895\n2017-01-23 16:00\t5050\n2017-01-23 17:00\t2381\n2017-01-23 18:00\t2326\n2017-01-23 19:00\t7140\n2017-01-23 20:00\t6214\n2017-01-23 21:00\t5771\n2017-01-23 22:00\t4548\n2017-01-23 23:00\t2728\n2017-01-24 00:00\t3039\n2017-01-24 01:00\t3468\n20
 17-01-24 02:00\t2907\n2017-01-24 03:00\t2053\n2017-01-24 04:00\t1985\n2017-01-24 05:00\t2384\n2017-01-24 06:00\t2881\n2017-01-24 07:00\t6602\n2017-01-24 08:00\t4426\n2017-01-24 09:00\t2412\n2017-01-24 10:00\t2031\n2017-01-24 11:00\t2588\n2017-01-24 12:00\t3578\n2017-01-24 13:00\t7608\n2017-01-24 14:00\t5635\n2017-01-24 15:00\t3117\n2017-01-24 16:00\t1413\n","comment":"","msgTable":[[{"key":"Flows","value":"2017-01-18 15:00"},{"key":"Flows","value":"9047"}],[{"value":"2017-01-18 16:00"},{"value":"7810"}],[{"value":"2017-01-18 17:00"},{"value":"2186"}],[{"value":"2017-01-18 18:00"},{"value":"2296"}],[{"value":"2017-01-18 19:00"},{"value":"2858"}],[{"value":"2017-01-18 20:00"},{"value":"3330"}],[{"value":"2017-01-18 21:00"},{"value":"2645"}],[{"value":"2017-01-18 22:00"},{"value":"3789"}],[{"value":"2017-01-18 23:00"},{"value":"8946"}],[{"value":"2017-01-19 00:00"},{"value":"10171"}],[{"value":"2017-01-19 01:00"},{"value":"4517"}],[{"value":"2017-01-19 02:00"},{"value":"1226"}],[{"valu
 e":"2017-01-19 03:00"},{"value":"1965"}],[{"value":"2017-01-19 04:00"},{"value":"2204"}],[{"value":"2017-01-19 05:00"},{"value":"1512"}],[{"value":"2017-01-19 06:00"},{"value":"2339"}],[{"value":"2017-01-19 07:00"},{"value":"2616"}],[{"value":"2017-01-19 08:00"},{"value":"2262"}],[{"value":"2017-01-19 09:00"},{"value":"5522"}],[{"value":"2017-01-19 10:00"},{"value":"10184"}],[{"value":"2017-01-19 11:00"},{"value":"6177"}],[{"value":"2017-01-19 12:00"},{"value":"3681"}],[{"value":"2017-01-19 13:00"},{"value":"3474"}],[{"value":"2017-01-19 14:00"},{"value":"3030"}],[{"value":"2017-01-19 15:00"},{"value":"3473"}],[{"value":"2017-01-19 16:00"},{"value":"2464"}],[{"value":"2017-01-19 17:00"},{"value":"2346"}],[{"value":"2017-01-19 18:00"},{"value":"2210"}],[{"value":"2017-01-19 19:00"},{"value":"2915"}],[{"value":"2017-01-19 20:00"},{"value":"2118"}],[{"value":"2017-01-19 21:00"},{"value":"2339"}],[{"value":"2017-01-19 22:00"},{"value":"2552"}],[{"value":"2017-01-19 23:00"},{"value":"306
 6"}],[{"value":"2017-01-20 00:00"},{"value":"3701"}],[{"value":"2017-01-20 01:00"},{"value":"7096"}],[{"value":"2017-01-20 02:00"},{"value":"9895"}],[{"value":"2017-01-20 03:00"},{"value":"6328"}],[{"value":"2017-01-20 04:00"},{"value":"3046"}],[{"value":"2017-01-20 05:00"},{"value":"2313"}],[{"value":"2017-01-20 06:00"},{"value":"1840"}],[{"value":"2017-01-20 07:00"},{"value":"4521"}],[{"value":"2017-01-20 08:00"},{"value":"4902"}],[{"value":"2017-01-20 09:00"},{"value":"2420"}],[{"value":"2017-01-20 10:00"},{"value":"2263"}],[{"value":"2017-01-20 11:00"},{"value":"3382"}],[{"value":"2017-01-20 12:00"},{"value":"2578"}],[{"value":"2017-01-20 13:00"},{"value":"4877"}],[{"value":"2017-01-20 14:00"},{"value":"4156"}],[{"value":"2017-01-20 15:00"},{"value":"1801"}],[{"value":"2017-01-20 16:00"},{"value":"1527"}],[{"value":"2017-01-20 17:00"},{"value":"2794"}],[{"value":"2017-01-20 18:00"},{"value":"3250"}],[{"value":"2017-01-20 19:00"},{"value":"2260"}],[{"value":"2017-01-20 20:00"},{"
 value":"5759"}],[{"value":"2017-01-20 21:00"},{"value":"5484"}],[{"value":"2017-01-20 22:00"},{"value":"2311"}],[{"value":"2017-01-20 23:00"},{"value":"2054"}],[{"value":"2017-01-21 00:00"},{"value":"6476"}],[{"value":"2017-01-21 01:00"},{"value":"5626"}],[{"value":"2017-01-21 02:00"},{"value":"3072"}],[{"value":"2017-01-21 03:00"},{"value":"4916"}],[{"value":"2017-01-21 04:00"},{"value":"4792"}],[{"value":"2017-01-21 05:00"},{"value":"2332"}],[{"value":"2017-01-21 06:00"},{"value":"1929"}],[{"value":"2017-01-21 07:00"},{"value":"6699"}],[{"value":"2017-01-21 08:00"},{"value":"4796"}],[{"value":"2017-01-21 09:00"},{"value":"3711"}],[{"value":"2017-01-21 10:00"},{"value":"3613"}],[{"value":"2017-01-21 11:00"},{"value":"2192"}],[{"value":"2017-01-21 12:00"},{"value":"1750"}],[{"value":"2017-01-21 13:00"},{"value":"1527"}],[{"value":"2017-01-21 14:00"},{"value":"1895"}],[{"value":"2017-01-21 15:00"},{"value":"3016"}],[{"value":"2017-01-21 16:00"},{"value":"2907"}],[{"value":"2017-01-21
  17:00"},{"value":"3346"}],[{"value":"2017-01-21 18:00"},{"value":"3203"}],[{"value":"2017-01-21 19:00"},{"value":"7215"}],[{"value":"2017-01-21 20:00"},{"value":"6487"}],[{"value":"2017-01-21 21:00"},{"value":"3242"}],[{"value":"2017-01-21 22:00"},{"value":"3505"}],[{"value":"2017-01-21 23:00"},{"value":"6100"}],[{"value":"2017-01-22 00:00"},{"value":"3771"}],[{"value":"2017-01-22 01:00"},{"value":"6288"}],[{"value":"2017-01-22 02:00"},{"value":"7762"}],[{"value":"2017-01-22 03:00"},{"value":"8890"}],[{"value":"2017-01-22 04:00"},{"value":"5806"}],[{"value":"2017-01-22 05:00"},{"value":"3207"}],[{"value":"2017-01-22 06:00"},{"value":"3402"}],[{"value":"2017-01-22 07:00"},{"value":"3750"}],[{"value":"2017-01-22 08:00"},{"value":"6234"}],[{"value":"2017-01-22 09:00"},{"value":"7227"}],[{"value":"2017-01-22 10:00"},{"value":"4109"}],[{"value":"2017-01-22 11:00"},{"value":"3072"}],[{"value":"2017-01-22 12:00"},{"value":"2491"}],[{"value":"2017-01-22 13:00"},{"value":"2777"}],[{"value":
 "2017-01-22 14:00"},{"value":"6014"}],[{"value":"2017-01-22 15:00"},{"value":"4681"}],[{"value":"2017-01-22 16:00"},{"value":"2839"}],[{"value":"2017-01-22 17:00"},{"value":"3069"}],[{"value":"2017-01-22 18:00"},{"value":"7324"}],[{"value":"2017-01-22 19:00"},{"value":"5977"}],[{"value":"2017-01-22 20:00"},{"value":"2197"}],[{"value":"2017-01-22 21:00"},{"value":"2257"}],[{"value":"2017-01-22 22:00"},{"value":"2063"}],[{"value":"2017-01-22 23:00"},{"value":"1464"}],[{"value":"2017-01-23 00:00"},{"value":"2735"}],[{"value":"2017-01-23 01:00"},{"value":"2772"}],[{"value":"2017-01-23 02:00"},{"value":"2537"}],[{"value":"2017-01-23 03:00"},{"value":"2445"}],[{"value":"2017-01-23 04:00"},{"value":"3639"}],[{"value":"2017-01-23 05:00"},{"value":"2898"}],[{"value":"2017-01-23 06:00"},{"value":"1944"}],[{"value":"2017-01-23 07:00"},{"value":"6869"}],[{"value":"2017-01-23 08:00"},{"value":"4857"}],[{"value":"2017-01-23 09:00"},{"value":"2034"}],[{"value":"2017-01-23 10:00"},{"value":"2571"}]
 ,[{"value":"2017-01-23 11:00"},{"value":"2042"}],[{"value":"2017-01-23 12:00"},{"value":"2212"}],[{"value":"2017-01-23 13:00"},{"value":"3304"}],[{"value":"2017-01-23 14:00"},{"value":"8219"}],[{"value":"2017-01-23 15:00"},{"value":"10895"}],[{"value":"2017-01-23 16:00"},{"value":"5050"}],[{"value":"2017-01-23 17:00"},{"value":"2381"}],[{"value":"2017-01-23 18:00"},{"value":"2326"}],[{"value":"2017-01-23 19:00"},{"value":"7140"}],[{"value":"2017-01-23 20:00"},{"value":"6214"}],[{"value":"2017-01-23 21:00"},{"value":"5771"}],[{"value":"2017-01-23 22:00"},{"value":"4548"}],[{"value":"2017-01-23 23:00"},{"value":"2728"}],[{"value":"2017-01-24 00:00"},{"value":"3039"}],[{"value":"2017-01-24 01:00"},{"value":"3468"}],[{"value":"2017-01-24 02:00"},{"value":"2907"}],[{"value":"2017-01-24 03:00"},{"value":"2053"}],[{"value":"2017-01-24 04:00"},{"value":"1985"}],[{"value":"2017-01-24 05:00"},{"value":"2384"}],[{"value":"2017-01-24 06:00"},{"value":"2881"}],[{"value":"2017-01-24 07:00"},{"val
 ue":"6602"}],[{"value":"2017-01-24 08:00"},{"value":"4426"}],[{"value":"2017-01-24 09:00"},{"value":"2412"}],[{"value":"2017-01-24 10:00"},{"value":"2031"}],[{"value":"2017-01-24 11:00"},{"value":"2588"}],[{"value":"2017-01-24 12:00"},{"value":"3578"}],[{"value":"2017-01-24 13:00"},{"value":"7608"}],[{"value":"2017-01-24 14:00"},{"value":"5635"}],[{"value":"2017-01-24 15:00"},{"value":"3117"}],[{"value":"2017-01-24 16:00"},{"value":"1413"}]],"columnNames":[{"name":"time","index":0,"aggr":"sum"},{"name":"Flows","index":1,"aggr":"sum"}],"rows":[["2017-01-18 15:00","9047"],["2017-01-18 16:00","7810"],["2017-01-18 17:00","2186"],["2017-01-18 18:00","2296"],["2017-01-18 19:00","2858"],["2017-01-18 20:00","3330"],["2017-01-18 21:00","2645"],["2017-01-18 22:00","3789"],["2017-01-18 23:00","8946"],["2017-01-19 00:00","10171"],["2017-01-19 01:00","4517"],["2017-01-19 02:00","1226"],["2017-01-19 03:00","1965"],["2017-01-19 04:00","2204"],["2017-01-19 05:00","1512"],["2017-01-19 06:00","2339"]
 ,["2017-01-19 07:00","2616"],["2017-01-19 08:00","2262"],["2017-01-19 09:00","5522"],["2017-01-19 10:00","10184"],["2017-01-19 11:00","6177"],["2017-01-19 12:00","3681"],["2017-01-19 13:00","3474"],["2017-01-19 14:00","3030"],["2017-01-19 15:00","3473"],["2017-01-19 16:00","2464"],["2017-01-19 17:00","2346"],["2017-01-19 18:00","2210"],["2017-01-19 19:00","2915"],["2017-01-19 20:00","2118"],["2017-01-19 21:00","2339"],["2017-01-19 22:00","2552"],["2017-01-19 23:00","3066"],["2017-01-20 00:00","3701"],["2017-01-20 01:00","7096"],["2017-01-20 02:00","9895"],["2017-01-20 03:00","6328"],["2017-01-20 04:00","3046"],["2017-01-20 05:00","2313"],["2017-01-20 06:00","1840"],["2017-01-20 07:00","4521"],["2017-01-20 08:00","4902"],["2017-01-20 09:00","2420"],["2017-01-20 10:00","2263"],["2017-01-20 11:00","3382"],["2017-01-20 12:00","2578"],["2017-01-20 13:00","4877"],["2017-01-20 14:00","4156"],["2017-01-20 15:00","1801"],["2017-01-20 16:00","1527"],["2017-01-20 17:00","2794"],["2017-01-20 18
 :00","3250"],["2017-01-20 19:00","2260"],["2017-01-20 20:00","5759"],["2017-01-20 21:00","5484"],["2017-01-20 22:00","2311"],["2017-01-20 23:00","2054"],["2017-01-21 00:00","6476"],["2017-01-21 01:00","5626"],["2017-01-21 02:00","3072"],["2017-01-21 03:00","4916"],["2017-01-21 04:00","4792"],["2017-01-21 05:00","2332"],["2017-01-21 06:00","1929"],["2017-01-21 07:00","6699"],["2017-01-21 08:00","4796"],["2017-01-21 09:00","3711"],["2017-01-21 10:00","3613"],["2017-01-21 11:00","2192"],["2017-01-21 12:00","1750"],["2017-01-21 13:00","1527"],["2017-01-21 14:00","1895"],["2017-01-21 15:00","3016"],["2017-01-21 16:00","2907"],["2017-01-21 17:00","3346"],["2017-01-21 18:00","3203"],["2017-01-21 19:00","7215"],["2017-01-21 20:00","6487"],["2017-01-21 21:00","3242"],["2017-01-21 22:00","3505"],["2017-01-21 23:00","6100"],["2017-01-22 00:00","3771"],["2017-01-22 01:00","6288"],["2017-01-22 02:00","7762"],["2017-01-22 03:00","8890"],["2017-01-22 04:00","5806"],["2017-01-22 05:00","3207"],["20
 17-01-22 06:00","3402"],["2017-01-22 07:00","3750"],["2017-01-22 08:00","6234"],["2017-01-22 09:00","7227"],["2017-01-22 10:00","4109"],["2017-01-22 11:00","3072"],["2017-01-22 12:00","2491"],["2017-01-22 13:00","2777"],["2017-01-22 14:00","6014"],["2017-01-22 15:00","4681"],["2017-01-22 16:00","2839"],["2017-01-22 17:00","3069"],["2017-01-22 18:00","7324"],["2017-01-22 19:00","5977"],["2017-01-22 20:00","2197"],["2017-01-22 21:00","2257"],["2017-01-22 22:00","2063"],["2017-01-22 23:00","1464"],["2017-01-23 00:00","2735"],["2017-01-23 01:00","2772"],["2017-01-23 02:00","2537"],["2017-01-23 03:00","2445"],["2017-01-23 04:00","3639"],["2017-01-23 05:00","2898"],["2017-01-23 06:00","1944"],["2017-01-23 07:00","6869"],["2017-01-23 08:00","4857"],["2017-01-23 09:00","2034"],["2017-01-23 10:00","2571"],["2017-01-23 11:00","2042"],["2017-01-23 12:00","2212"],["2017-01-23 13:00","3304"],["2017-01-23 14:00","8219"],["2017-01-23 15:00","10895"],["2017-01-23 16:00","5050"],["2017-01-23 17:00",
 "2381"],["2017-01-23 18:00","2326"],["2017-01-23 19:00","7140"],["2017-01-23 20:00","6214"],["2017-01-23 21:00","5771"],["2017-01-23 22:00","4548"],["2017-01-23 23:00","2728"],["2017-01-24 00:00","3039"],["2017-01-24 01:00","3468"],["2017-01-24 02:00","2907"],["2017-01-24 03:00","2053"],["2017-01-24 04:00","1985"],["2017-01-24 05:00","2384"],["2017-01-24 06:00","2881"],["2017-01-24 07:00","6602"],["2017-01-24 08:00","4426"],["2017-01-24 09:00","2412"],["2017-01-24 10:00","2031"],["2017-01-24 11:00","2588"],["2017-01-24 12:00","3578"],["2017-01-24 13:00","7608"],["2017-01-24 14:00","5635"],["2017-01-24 15:00","3117"],["2017-01-24 16:00","1413"]]},"dateCreated":"2017-01-24T02:41:22+0000","dateStarted":"2017-01-24T22:00:02+0000","dateFinished":"2017-01-24T22:00:16+0000","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:5241","focus":true},{"text":"%md\n\n### Flows by Hour - Internal\n\nThe total number of internal flows by hour over the past 7 days.","dateUpdated":
 "2017-01-24T21:56:00+0000","config":{"colWidth":3,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/markdown","editorHide":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1485281472501_-1997281123","id":"20170124-181112_314130540","result":{"code":"SUCCESS","type":"HTML","msg":"<h3>Flows by Hour - Internal</h3>\n<p>The total number of internal flows by hour over the past 7 days.</p>\n"},"dateCreated":"2017-01-24T18:11:12+0000","dateStarted":"2017-01-24T21:56:00+0000","dateFinished":"2017-01-24T21:56:00+0000","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:5242","focus":true},{"text":"%spark.sql\n\nselect\n    from_unixtime(timestamp/1000,'YYYY-MM-dd HH:00') as time,\n    count(*) as Flows\nfrom yaf\nwhere \n    is_internal(ip_src_addr) == is_internal(ip_dst_addr) and\n    datediff(current_timestamp(), from_unixtime(timestamp/1000)) <= 7\ngroup by from_uni
 xtime(timestamp/1000,'YYYY-MM-dd HH:00')\norder by from_unixtime(timestamp/1000,'YYYY-MM-dd HH:00') asc   \n","dateUpdated":"2017-01-24T22:01:34+0000","config":{"colWidth":7,"graph":{"mode":"multiBarChart","height":300,"optionOpen":false,"keys":[{"name":"time","index":0,"aggr":"sum"}],"values":[{"name":"Flows","index":1,"aggr":"sum"}],"groups":[],"scatter":{"xAxis":{"name":"time","index":0,"aggr":"sum"}}},"enabled":true,"editorMode":"ace/mode/sql","editorHide":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1485281457089_1201121160","id":"20170124-181057_1268682896","result":{"code":"SUCCESS","type":"TABLE","msg":"time\tFlows\n2017-01-18 15:00\t3979\n2017-01-18 16:00\t3291\n2017-01-18 17:00\t927\n2017-01-18 18:00\t978\n2017-01-18 19:00\t1253\n2017-01-18 20:00\t1414\n2017-01-18 21:00\t1119\n2017-01-18 22:00\t1686\n2017-01-18 23:00\t3888\n2017-01-19 00:00\t4392\n2017-01-19 01:00\t1908\n2017-01-19 02:00\t535\n2017-01-19 03:00\t894\n2017-01-19 04:00\t925\n2017-01-19 05:00
 \t655\n2017-01-19 06:00\t1045\n2017-01-19 07:00\t1105\n2017-01-19 08:00\t984\n2017-01-19 09:00\t2409\n2017-01-19 10:00\t4391\n2017-01-19 11:00\t2609\n2017-01-19 12:00\t1608\n2017-01-19 13:00\t1461\n2017-01-19 14:00\t1298\n2017-01-19 15:00\t1499\n2017-01-19 16:00\t1032\n2017-01-19 17:00\t998\n2017-01-19 18:00\t983\n2017-01-19 19:00\t1238\n2017-01-19 20:00\t896\n2017-01-19 21:00\t1001\n2017-01-19 22:00\t1034\n2017-01-19 23:00\t1326\n2017-01-20 00:00\t1586\n2017-01-20 01:00\t3086\n2017-01-20 02:00\t4247\n2017-01-20 03:00\t2679\n2017-01-20 04:00\t1313\n2017-01-20 05:00\t1011\n2017-01-20 06:00\t813\n2017-01-20 07:00\t1944\n2017-01-20 08:00\t2127\n2017-01-20 09:00\t1026\n2017-01-20 10:00\t975\n2017-01-20 11:00\t1486\n2017-01-20 12:00\t1091\n2017-01-20 13:00\t2122\n2017-01-20 14:00\t1750\n2017-01-20 15:00\t772\n2017-01-20 16:00\t635\n2017-01-20 17:00\t1256\n2017-01-20 18:00\t1352\n2017-01-20 19:00\t959\n2017-01-20 20:00\t2556\n2017-01-20 21:00\t2333\n2017-01-20 22:00\t978\n2017-01-20 23:00
 \t915\n2017-01-21 00:00\t2815\n2017-01-21 01:00\t2371\n2017-01-21 02:00\t1309\n2017-01-21 03:00\t2123\n2017-01-21 04:00\t2065\n2017-01-21 05:00\t982\n2017-01-21 06:00\t839\n2017-01-21 07:00\t2937\n2017-01-21 08:00\t2015\n2017-01-21 09:00\t1636\n2017-01-21 10:00\t1564\n2017-01-21 11:00\t964\n2017-01-21 12:00\t762\n2017-01-21 13:00\t657\n2017-01-21 14:00\t848\n2017-01-21 15:00\t1302\n2017-01-21 16:00\t1225\n2017-01-21 17:00\t1483\n2017-01-21 18:00\t1362\n2017-01-21 19:00\t3140\n2017-01-21 20:00\t2779\n2017-01-21 21:00\t1408\n2017-01-21 22:00\t1543\n2017-01-21 23:00\t2669\n2017-01-22 00:00\t1604\n2017-01-22 01:00\t2738\n2017-01-22 02:00\t3312\n2017-01-22 03:00\t3873\n2017-01-22 04:00\t2477\n2017-01-22 05:00\t1405\n2017-01-22 06:00\t1492\n2017-01-22 07:00\t1615\n2017-01-22 08:00\t2707\n2017-01-22 09:00\t3096\n2017-01-22 10:00\t1722\n2017-01-22 11:00\t1317\n2017-01-22 12:00\t1098\n2017-01-22 13:00\t1163\n2017-01-22 14:00\t2588\n2017-01-22 15:00\t1989\n2017-01-22 16:00\t1231\n2017-01-22 1
 7:00\t1288\n2017-01-22 18:00\t3185\n2017-01-22 19:00\t2534\n2017-01-22 20:00\t967\n2017-01-22 21:00\t968\n2017-01-22 22:00\t936\n2017-01-22 23:00\t612\n2017-01-23 00:00\t1227\n2017-01-23 01:00\t1187\n2017-01-23 02:00\t1139\n2017-01-23 03:00\t1014\n2017-01-23 04:00\t1576\n2017-01-23 05:00\t1251\n2017-01-23 06:00\t841\n2017-01-23 07:00\t3018\n2017-01-23 08:00\t2050\n2017-01-23 09:00\t883\n2017-01-23 10:00\t1148\n2017-01-23 11:00\t864\n2017-01-23 12:00\t950\n2017-01-23 13:00\t1466\n2017-01-23 14:00\t3613\n2017-01-23 15:00\t4729\n2017-01-23 16:00\t2139\n2017-01-23 17:00\t1054\n2017-01-23 18:00\t1037\n2017-01-23 19:00\t3130\n2017-01-23 20:00\t2627\n2017-01-23 21:00\t2502\n2017-01-23 22:00\t1918\n2017-01-23 23:00\t1187\n2017-01-24 00:00\t1335\n2017-01-24 01:00\t1501\n2017-01-24 02:00\t1234\n2017-01-24 03:00\t903\n2017-01-24 04:00\t857\n2017-01-24 05:00\t1060\n2017-01-24 06:00\t1243\n2017-01-24 07:00\t2863\n2017-01-24 08:00\t1879\n2017-01-24 09:00\t1065\n2017-01-24 10:00\t857\n2017-01-24 1
 1:00\t1116\n2017-01-24 12:00\t1568\n2017-01-24 13:00\t3339\n2017-01-24 14:00\t2389\n2017-01-24 15:00\t1366\n2017-01-24 16:00\t596\n","comment":"","msgTable":[[{"key":"Flows","value":"2017-01-18 15:00"},{"key":"Flows","value":"3979"}],[{"value":"2017-01-18 16:00"},{"value":"3291"}],[{"value":"2017-01-18 17:00"},{"value":"927"}],[{"value":"2017-01-18 18:00"},{"value":"978"}],[{"value":"2017-01-18 19:00"},{"value":"1253"}],[{"value":"2017-01-18 20:00"},{"value":"1414"}],[{"value":"2017-01-18 21:00"},{"value":"1119"}],[{"value":"2017-01-18 22:00"},{"value":"1686"}],[{"value":"2017-01-18 23:00"},{"value":"3888"}],[{"value":"2017-01-19 00:00"},{"value":"4392"}],[{"value":"2017-01-19 01:00"},{"value":"1908"}],[{"value":"2017-01-19 02:00"},{"value":"535"}],[{"value":"2017-01-19 03:00"},{"value":"894"}],[{"value":"2017-01-19 04:00"},{"value":"925"}],[{"value":"2017-01-19 05:00"},{"value":"655"}],[{"value":"2017-01-19 06:00"},{"value":"1045"}],[{"value":"2017-01-19 07:00"},{"value":"1105"}],[
 {"value":"2017-01-19 08:00"},{"value":"984"}],[{"value":"2017-01-19 09:00"},{"value":"2409"}],[{"value":"2017-01-19 10:00"},{"value":"4391"}],[{"value":"2017-01-19 11:00"},{"value":"2609"}],[{"value":"2017-01-19 12:00"},{"value":"1608"}],[{"value":"2017-01-19 13:00"},{"value":"1461"}],[{"value":"2017-01-19 14:00"},{"value":"1298"}],[{"value":"2017-01-19 15:00"},{"value":"1499"}],[{"value":"2017-01-19 16:00"},{"value":"1032"}],[{"value":"2017-01-19 17:00"},{"value":"998"}],[{"value":"2017-01-19 18:00"},{"value":"983"}],[{"value":"2017-01-19 19:00"},{"value":"1238"}],[{"value":"2017-01-19 20:00"},{"value":"896"}],[{"value":"2017-01-19 21:00"},{"value":"1001"}],[{"value":"2017-01-19 22:00"},{"value":"1034"}],[{"value":"2017-01-19 23:00"},{"value":"1326"}],[{"value":"2017-01-20 00:00"},{"value":"1586"}],[{"value":"2017-01-20 01:00"},{"value":"3086"}],[{"value":"2017-01-20 02:00"},{"value":"4247"}],[{"value":"2017-01-20 03:00"},{"value":"2679"}],[{"value":"2017-01-20 04:00"},{"value":"13
 13"}],[{"value":"2017-01-20 05:00"},{"value":"1011"}],[{"value":"2017-01-20 06:00"},{"value":"813"}],[{"value":"2017-01-20 07:00"},{"value":"1944"}],[{"value":"2017-01-20 08:00"},{"value":"2127"}],[{"value":"2017-01-20 09:00"},{"value":"1026"}],[{"value":"2017-01-20 10:00"},{"value":"975"}],[{"value":"2017-01-20 11:00"},{"value":"1486"}],[{"value":"2017-01-20 12:00"},{"value":"1091"}],[{"value":"2017-01-20 13:00"},{"value":"2122"}],[{"value":"2017-01-20 14:00"},{"value":"1750"}],[{"value":"2017-01-20 15:00"},{"value":"772"}],[{"value":"2017-01-20 16:00"},{"value":"635"}],[{"value":"2017-01-20 17:00"},{"value":"1256"}],[{"value":"2017-01-20 18:00"},{"value":"1352"}],[{"value":"2017-01-20 19:00"},{"value":"959"}],[{"value":"2017-01-20 20:00"},{"value":"2556"}],[{"value":"2017-01-20 21:00"},{"value":"2333"}],[{"value":"2017-01-20 22:00"},{"value":"978"}],[{"value":"2017-01-20 23:00"},{"value":"915"}],[{"value":"2017-01-21 00:00"},{"value":"2815"}],[{"value":"2017-01-21 01:00"},{"value"
 :"2371"}],[{"value":"2017-01-21 02:00"},{"value":"1309"}],[{"value":"2017-01-21 03:00"},{"value":"2123"}],[{"value":"2017-01-21 04:00"},{"value":"2065"}],[{"value":"2017-01-21 05:00"},{"value":"982"}],[{"value":"2017-01-21 06:00"},{"value":"839"}],[{"value":"2017-01-21 07:00"},{"value":"2937"}],[{"value":"2017-01-21 08:00"},{"value":"2015"}],[{"value":"2017-01-21 09:00"},{"value":"1636"}],[{"value":"2017-01-21 10:00"},{"value":"1564"}],[{"value":"2017-01-21 11:00"},{"value":"964"}],[{"value":"2017-01-21 12:00"},{"value":"762"}],[{"value":"2017-01-21 13:00"},{"value":"657"}],[{"value":"2017-01-21 14:00"},{"value":"848"}],[{"value":"2017-01-21 15:00"},{"value":"1302"}],[{"value":"2017-01-21 16:00"},{"value":"1225"}],[{"value":"2017-01-21 17:00"},{"value":"1483"}],[{"value":"2017-01-21 18:00"},{"value":"1362"}],[{"value":"2017-01-21 19:00"},{"value":"3140"}],[{"value":"2017-01-21 20:00"},{"value":"2779"}],[{"value":"2017-01-21 21:00"},{"value":"1408"}],[{"value":"2017-01-21 22:00"},{"v
 alue":"1543"}],[{"value":"2017-01-21 23:00"},{"value":"2669"}],[{"value":"2017-01-22 00:00"},{"value":"1604"}],[{"value":"2017-01-22 01:00"},{"value":"2738"}],[{"value":"2017-01-22 02:00"},{"value":"3312"}],[{"value":"2017-01-22 03:00"},{"value":"3873"}],[{"value":"2017-01-22 04:00"},{"value":"2477"}],[{"value":"2017-01-22 05:00"},{"value":"1405"}],[{"value":"2017-01-22 06:00"},{"value":"1492"}],[{"value":"2017-01-22 07:00"},{"value":"1615"}],[{"value":"2017-01-22 08:00"},{"value":"2707"}],[{"value":"2017-01-22 09:00"},{"value":"3096"}],[{"value":"2017-01-22 10:00"},{"value":"1722"}],[{"value":"2017-01-22 11:00"},{"value":"1317"}],[{"value":"2017-01-22 12:00"},{"value":"1098"}],[{"value":"2017-01-22 13:00"},{"value":"1163"}],[{"value":"2017-01-22 14:00"},{"value":"2588"}],[{"value":"2017-01-22 15:00"},{"value":"1989"}],[{"value":"2017-01-22 16:00"},{"value":"1231"}],[{"value":"2017-01-22 17:00"},{"value":"1288"}],[{"value":"2017-01-22 18:00"},{"value":"3185"}],[{"value":"2017-01-22 
 19:00"},{"value":"2534"}],[{"value":"2017-01-22 20:00"},{"value":"967"}],[{"value":"2017-01-22 21:00"},{"value":"968"}],[{"value":"2017-01-22 22:00"},{"value":"936"}],[{"value":"2017-01-22 23:00"},{"value":"612"}],[{"value":"2017-01-23 00:00"},{"value":"1227"}],[{"value":"2017-01-23 01:00"},{"value":"1187"}],[{"value":"2017-01-23 02:00"},{"value":"1139"}],[{"value":"2017-01-23 03:00"},{"value":"1014"}],[{"value":"2017-01-23 04:00"},{"value":"1576"}],[{"value":"2017-01-23 05:00"},{"value":"1251"}],[{"value":"2017-01-23 06:00"},{"value":"841"}],[{"value":"2017-01-23 07:00"},{"value":"3018"}],[{"value":"2017-01-23 08:00"},{"value":"2050"}],[{"value":"2017-01-23 09:00"},{"value":"883"}],[{"value":"2017-01-23 10:00"},{"value":"1148"}],[{"value":"2017-01-23 11:00"},{"value":"864"}],[{"value":"2017-01-23 12:00"},{"value":"950"}],[{"value":"2017-01-23 13:00"},{"value":"1466"}],[{"value":"2017-01-23 14:00"},{"value":"3613"}],[{"value":"2017-01-23 15:00"},{"value":"4729"}],[{"value":"2017-01-
 23 16:00"},{"value":"2139"}],[{"value":"2017-01-23 17:00"},{"value":"1054"}],[{"value":"2017-01-23 18:00"},{"value":"1037"}],[{"value":"2017-01-23 19:00"},{"value":"3130"}],[{"value":"2017-01-23 20:00"},{"value":"2627"}],[{"value":"2017-01-23 21:00"},{"value":"2502"}],[{"value":"2017-01-23 22:00"},{"value":"1918"}],[{"value":"2017-01-23 23:00"},{"value":"1187"}],[{"value":"2017-01-24 00:00"},{"value":"1335"}],[{"value":"2017-01-24 01:00"},{"value":"1501"}],[{"value":"2017-01-24 02:00"},{"value":"1234"}],[{"value":"2017-01-24 03:00"},{"value":"903"}],[{"value":"2017-01-24 04:00"},{"value":"857"}],[{"value":"2017-01-24 05:00"},{"value":"1060"}],[{"value":"2017-01-24 06:00"},{"value":"1243"}],[{"value":"2017-01-24 07:00"},{"value":"2863"}],[{"value":"2017-01-24 08:00"},{"value":"1879"}],[{"value":"2017-01-24 09:00"},{"value":"1065"}],[{"value":"2017-01-24 10:00"},{"value":"857"}],[{"value":"2017-01-24 11:00"},{"value":"1116"}],[{"value":"2017-01-24 12:00"},{"value":"1568"}],[{"value":"
 2017-01-24 13:00"},{"value":"3339"}],[{"value":"2017-01-24 14:00"},{"value":"2389"}],[{"value":"2017-01-24 15:00"},{"value":"1366"}],[{"value":"2017-01-24 16:00"},{"value":"596"}]],"columnNames":[{"name":"time","index":0,"aggr":"sum"},{"name":"Flows","index":1,"aggr":"sum"}],"rows":[["2017-01-18 15:00","3979"],["2017-01-18 16:00","3291"],["2017-01-18 17:00","927"],["2017-01-18 18:00","978"],["2017-01-18 19:00","1253"],["2017-01-18 20:00","1414"],["2017-01-18 21:00","1119"],["2017-01-18 22:00","1686"],["2017-01-18 23:00","3888"],["2017-01-19 00:00","4392"],["2017-01-19 01:00","1908"],["2017-01-19 02:00","535"],["2017-01-19 03:00","894"],["2017-01-19 04:00","925"],["2017-01-19 05:00","655"],["2017-01-19 06:00","1045"],["2017-01-19 07:00","1105"],["2017-01-19 08:00","984"],["2017-01-19 09:00","2409"],["2017-01-19 10:00","4391"],["2017-01-19 11:00","2609"],["2017-01-19 12:00","1608"],["2017-01-19 13:00","1461"],["2017-01-19 14:00","1298"],["2017-01-19 15:00","1499"],["2017-01-19 16:00",
 "1032"],["2017-01-19 17:00","998"],["2017-01-19 18:00","983"],["2017-01-19 19:00","1238"],["2017-01-19 20:00","896"],["2017-01-19 21:00","1001"],["2017-01-19 22:00","1034"],["2017-01-19 23:00","1326"],["2017-01-20 00:00","1586"],["2017-01-20 01:00","3086"],["2017-01-20 02:00","4247"],["2017-01-20 03:00","2679"],["2017-01-20 04:00","1313"],["2017-01-20 05:00","1011"],["2017-01-20 06:00","813"],["2017-01-20 07:00","1944"],["2017-01-20 08:00","2127"],["2017-01-20 09:00","1026"],["2017-01-20 10:00","975"],["2017-01-20 11:00","1486"],["2017-01-20 12:00","1091"],["2017-01-20 13:00","2122"],["2017-01-20 14:00","1750"],["2017-01-20 15:00","772"],["2017-01-20 16:00","635"],["2017-01-20 17:00","1256"],["2017-01-20 18:00","1352"],["2017-01-20 19:00","959"],["2017-01-20 20:00","2556"],["2017-01-20 21:00","2333"],["2017-01-20 22:00","978"],["2017-01-20 23:00","915"],["2017-01-21 00:00","2815"],["2017-01-21 01:00","2371"],["2017-01-21 02:00","1309"],["2017-01-21 03:00","2123"],["2017-01-21 04:00"
 ,"2065"],["2017-01-21 05:00","982"],["2017-01-21 06:00","839"],["2017-01-21 07:00","2937"],["2017-01-21 08:00","2015"],["2017-01-21 09:00","1636"],["2017-01-21 10:00","1564"],["2017-01-21 11:00","964"],["2017-01-21 12:00","762"],["2017-01-21 13:00","657"],["2017-01-21 14:00","848"],["2017-01-21 15:00","1302"],["2017-01-21 16:00","1225"],["2017-01-21 17:00","1483"],["2017-01-21 18:00","1362"],["2017-01-21 19:00","3140"],["2017-01-21 20:00","2779"],["2017-01-21 21:00","1408"],["2017-01-21 22:00","1543"],["2017-01-21 23:00","2669"],["2017-01-22 00:00","1604"],["2017-01-22 01:00","2738"],["2017-01-22 02:00","3312"],["2017-01-22 03:00","3873"],["2017-01-22 04:00","2477"],["2017-01-22 05:00","1405"],["2017-01-22 06:00","1492"],["2017-01-22 07:00","1615"],["2017-01-22 08:00","2707"],["2017-01-22 09:00","3096"],["2017-01-22 10:00","1722"],["2017-01-22 11:00","1317"],["2017-01-22 12:00","1098"],["2017-01-22 13:00","1163"],["2017-01-22 14:00","2588"],["2017-01-22 15:00","1989"],["2017-01-22 1
 6:00","1231"],["2017-01-22 17:00","1288"],["2017-01-22 18:00","3185"],["2017-01-22 19:00","2534"],["2017-01-22 20:00","967"],["2017-01-22 21:00","968"],["2017-01-22 22:00","936"],["2017-01-22 23:00","612"],["2017-01-23 00:00","1227"],["2017-01-23 01:00","1187"],["2017-01-23 02:00","1139"],["2017-01-23 03:00","1014"],["2017-01-23 04:00","1576"],["2017-01-23 05:00","1251"],["2017-01-23 06:00","841"],["2017-01-23 07:00","3018"],["2017-01-23 08:00","2050"],["2017-01-23 09:00","883"],["2017-01-23 10:00","1148"],["2017-01-23 11:00","864"],["2017-01-23 12:00","950"],["2017-01-23 13:00","1466"],["2017-01-23 14:00","3613"],["2017-01-23 15:00","4729"],["2017-01-23 16:00","2139"],["2017-01-23 17:00","1054"],["2017-01-23 18:00","1037"],["2017-01-23 19:00","3130"],["2017-01-23 20:00","2627"],["2017-01-23 21:00","2502"],["2017-01-23 22:00","1918"],["2017-01-23 23:00","1187"],["2017-01-24 00:00","1335"],["2017-01-24 01:00","1501"],["2017-01-24 02:00","1234"],["2017-01-24 03:00","903"],["2017-01-24
  04:00","857"],["2017-01-24 05:00","1060"],["2017-01-24 06:00","1243"],["2017-01-24 07:00","2863"],["2017-01-24 08:00","1879"],["2017-01-24 09:00","1065"],["2017-01-24 10:00","857"],["2017-01-24 11:00","1116"],["2017-01-24 12:00","1568"],["2017-01-24 13:00","3339"],["2017-01-24 14:00","2389"],["2017-01-24 15:00","1366"],["2017-01-24 16:00","596"]]},"dateCreated":"2017-01-24T18:10:57+0000","dateStarted":"2017-01-24T22:01:34+0000","dateFinished":"2017-01-24T22:01:53+0000","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:5243","focus":true},{"text":"%md\n\n### Flows by Hour - External\n\nThe total number of external flows by hour over the past 7 days.","dateUpdated":"2017-01-24T21:56:00+0000","config":{"colWidth":3,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/markdown","editorHide":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1485281706633_47808711","
 id":"20170124-181506_209801818","result":{"code":"SUCCESS","type":"HTML","msg":"<h3>Flows by Hour - External</h3>\n<p>The total number of external flows by hour over the past 7 days.</p>\n"},"dateCreated":"2017-01-24T18:15:06+0000","dateStarted":"2017-01-24T21:56:00+0000","dateFinished":"2017-01-24T21:56:00+0000","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:5244","focus":true},{"text":"%spark.sql\n\nselect\n    from_unixtime(timestamp/1000,'YYYY-MM-dd HH:00') as time,\n    count(*) as Flows\nfrom yaf\nwhere \n    is_internal(ip_src_addr) <> is_internal(ip_dst_addr) and\n    datediff(current_timestamp(), from_unixtime(timestamp/1000)) <= 7\ngroup by from_unixtime(timestamp/1000,'YYYY-MM-dd HH:00')\norder by from_unixtime(timestamp/1000,'YYYY-MM-dd HH:00') asc","dateUpdated":"2017-01-24T22:03:59+0000","config":{"colWidth":7,"graph":{"mode":"multiBarChart","height":300,"optionOpen":false,"keys":[{"name":"time","index":0,"aggr":"sum"}],"values":[{"name":"Flows"
 ,"index":1,"aggr":"sum"}],"groups":[],"scatter":{"xAxis":{"name":"time","index":0,"aggr":"sum"}}},"enabled":true,"editorMode":"ace/mode/sql","editorHide":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1485281704427_1527922405","id":"20170124-181504_2082808725","result":{"code":"SUCCESS","type":"TABLE","msg":"time\tFlows\n2017-01-18 15:00\t5068\n2017-01-18 16:00\t4519\n2017-01-18 17:00\t1259\n2017-01-18 18:00\t1318\n2017-01-18 19:00\t1605\n2017-01-18 20:00\t1916\n2017-01-18 21:00\t1526\n2017-01-18 22:00\t2103\n2017-01-18 23:00\t5058\n2017-01-19 00:00\t5779\n2017-01-19 01:00\t2609\n2017-01-19 02:00\t691\n2017-01-19 03:00\t1071\n2017-01-19 04:00\t1279\n2017-01-19 05:00\t857\n2017-01-19 06:00\t1294\n2017-01-19 07:00\t1511\n2017-01-19 08:00\t1278\n2017-01-19 09:00\t3113\n2017-01-19 10:00\t5793\n2017-01-19 11:00\t3568\n2017-01-19 12:00\t2073\n2017-01-19 13:00\t2013\n2017-01-19 14:00\t1732\n2017-01-19 15:00\t1974\n2017-01-19 16:00\t1432\n2017-01-19 17:00\t1348\n2017-01-19 1
 8:00\t1227\n2017-01-19 19:00\t1677\n2017-01-19 20:00\t1222\n2017-01-19 21:00\t1338\n2017-01-19 22:00\t1518\n2017-01-19 23:00\t1740\n2017-01-20 00:00\t2115\n2017-01-20 01:00\t4010\n2017-01-20 02:00\t5648\n2017-01-20 03:00\t3649\n2017-01-20 04:00\t1733\n2017-01-20 05:00\t1302\n2017-01-20 06:00\t1027\n2017-01-20 07:00\t2577\n2017-01-20 08:00\t2775\n2017-01-20 09:00\t1394\n2017-01-20 10:00\t1288\n2017-01-20 11:00\t1896\n2017-01-20 12:00\t1487\n2017-01-20 13:00\t2755\n2017-01-20 14:00\t2406\n2017-01-20 15:00\t1029\n2017-01-20 16:00\t892\n2017-01-20 17:00\t1538\n2017-01-20 18:00\t1898\n2017-01-20 19:00\t1301\n2017-01-20 20:00\t3203\n2017-01-20 21:00\t3151\n2017-01-20 22:00\t1333\n2017-01-20 23:00\t1139\n2017-01-21 00:00\t3661\n2017-01-21 01:00\t3255\n2017-01-21 02:00\t1763\n2017-01-21 03:00\t2793\n2017-01-21 04:00\t2727\n2017-01-21 05:00\t1350\n2017-01-21 06:00\t1090\n2017-01-21 07:00\t3762\n2017-01-21 08:00\t2781\n2017-01-21 09:00\t2075\n2017-01-21 10:00\t2049\n2017-01-21 11:00\t1228\n20
 17-01-21 12:00\t988\n2017-01-21 13:00\t870\n2017-01-21 14:00\t1047\n2017-01-21 15:00\t1714\n2017-01-21 16:00\t1682\n2017-01-21 17:00\t1863\n2017-01-21 18:00\t1841\n2017-01-21 19:00\t4075\n2017-01-21 20:00\t3708\n2017-01-21 21:00\t1834\n2017-01-21 22:00\t1962\n2017-01-21 23:00\t3431\n2017-01-22 00:00\t2167\n2017-01-22 01:00\t3550\n2017-01-22 02:00\t4450\n2017-01-22 03:00\t5017\n2017-01-22 04:00\t3329\n2017-01-22 05:00\t1802\n2017-01-22 06:00\t1910\n2017-01-22 07:00\t2135\n2017-01-22 08:00\t3527\n2017-01-22 09:00\t4131\n2017-01-22 10:00\t2387\n2017-01-22 11:00\t1755\n2017-01-22 12:00\t1393\n2017-01-22 13:00\t1614\n2017-01-22 14:00\t3426\n2017-01-22 15:00\t2692\n2017-01-22 16:00\t1608\n2017-01-22 17:00\t1781\n2017-01-22 18:00\t4139\n2017-01-22 19:00\t3443\n2017-01-22 20:00\t1230\n2017-01-22 21:00\t1289\n2017-01-22 22:00\t1127\n2017-01-22 23:00\t852\n2017-01-23 00:00\t1508\n2017-01-23 01:00\t1585\n2017-01-23 02:00\t1398\n2017-01-23 03:00\t1431\n2017-01-23 04:00\t2063\n2017-01-23 05:00\t
 1647\n2017-01-23 06:00\t1103\n2017-01-23 07:00\t3851\n2017-01-23 08:00\t2807\n2017-01-23 09:00\t1151\n2017-01-23 10:00\t1423\n2017-01-23 11:00\t1178\n2017-01-23 12:00\t1262\n2017-01-23 13:00\t1838\n2017-01-23 14:00\t4606\n2017-01-23 15:00\t6166\n2017-01-23 16:00\t2911\n2017-01-23 17:00\t1327\n2017-01-23 18:00\t1289\n2017-01-23 19:00\t4010\n2017-01-23 20:00\t3587\n2017-01-23 21:00\t3269\n2017-01-23 22:00\t2630\n2017-01-23 23:00\t1541\n2017-01-24 00:00\t1704\n2017-01-24 01:00\t1967\n2017-01-24 02:00\t1673\n2017-01-24 03:00\t1150\n2017-01-24 04:00\t1128\n2017-01-24 05:00\t1324\n2017-01-24 06:00\t1638\n2017-01-24 07:00\t3739\n2017-01-24 08:00\t2547\n2017-01-24 09:00\t1347\n2017-01-24 10:00\t1174\n2017-01-24 11:00\t1472\n2017-01-24 12:00\t2010\n2017-01-24 13:00\t4269\n2017-01-24 14:00\t3246\n2017-01-24 15:00\t1751\n2017-01-24 16:00\t817\n","comment":"","msgTable":[[{"key":"Flows","value":"2017-01-18 15:00"},{"key":"Flows","value":"5068"}],[{"value":"2017-01-18 16:00"},{"value":"4519"}],[
 {"value":"2017-01-18 17:00"},{"value":"1259"}],[{"value":"2017-01-18 18:00"},{"value":"1318"}],[{"value":"2017-01-18 19:00"},{"value":"1605"}],[{"value":"2017-01-18 20:00"},{"value":"1916"}],[{"value":"2017-01-18 21:00"},{"value":"1526"}],[{"value":"2017-01-18 22:00"},{"value":"2103"}],[{"value":"2017-01-18 23:00"},{"value":"5058"}],[{"value":"2017-01-19 00:00"},{"value":"5779"}],[{"value":"2017-01-19 01:00"},{"value":"2609"}],[{"value":"2017-01-19 02:00"},{"value":"691"}],[{"value":"2017-01-19 03:00"},{"value":"1071"}],[{"value":"2017-01-19 04:00"},{"value":"1279"}],[{"value":"2017-01-19 05:00"},{"value":"857"}],[{"value":"2017-01-19 06:00"},{"value":"1294"}],[{"value":"2017-01-19 07:00"},{"value":"1511"}],[{"value":"2017-01-19 08:00"},{"value":"1278"}],[{"value":"2017-01-19 09:00"},{"value":"3113"}],[{"value":"2017-01-19 10:00"},{"value":"5793"}],[{"value":"2017-01-19 11:00"},{"value":"3568"}],[{"value":"2017-01-19 12:00"},{"value":"2073"}],[{"value":"2017-01-19 13:00"},{"value":"
 2013"}],[{"value":"2017-01-19 14:00"},{"value":"1732"}],[{"value":"2017-01-19 15:00"},{"value":"1974"}],[{"value":"2017-01-19 16:00"},{"value":"1432"}],[{"value":"2017-01-19 17:00"},{"value":"1348"}],[{"value":"2017-01-19 18:00"},{"value":"1227"}],[{"value":"2017-01-19 19:00"},{"value":"1677"}],[{"value":"2017-01-19 20:00"},{"value":"1222"}],[{"value":"2017-01-19 21:00"},{"value":"1338"}],[{"value":"2017-01-19 22:00"},{"value":"1518"}],[{"value":"2017-01-19 23:00"},{"value":"1740"}],[{"value":"2017-01-20 00:00"},{"value":"2115"}],[{"value":"2017-01-20 01:00"},{"value":"4010"}],[{"value":"2017-01-20 02:00"},{"value":"5648"}],[{"value":"2017-01-20 03:00"},{"value":"3649"}],[{"value":"2017-01-20 04:00"},{"value":"1733"}],[{"value":"2017-01-20 05:00"},{"value":"1302"}],[{"value":"2017-01-20 06:00"},{"value":"1027"}],[{"value":"2017-01-20 07:00"},{"value":"2577"}],[{"value":"2017-01-20 08:00"},{"value":"2775"}],[{"value":"2017-01-20 09:00"},{"value":"1394"}],[{"value":"2017-01-20 10:00"}
 ,{"value":"1288"}],[{"value":"2017-01-20 11:00"},{"value":"1896"}],[{"value":"2017-01-20 12:00"},{"value":"1487"}],[{"value":"2017-01-20 13:00"},{"value":"2755"}],[{"value":"2017-01-20 14:00"},{"value":"2406"}],[{"value":"2017-01-20 15:00"},{"value":"1029"}],[{"value":"2017-01-20 16:00"},{"value":"892"}],[{"value":"2017-01-20 17:00"},{"value":"1538"}],[{"value":"2017-01-20 18:00"},{"value":"1898"}],[{"value":"2017-01-20 19:00"},{"value":"1301"}],[{"value":"2017-01-20 20:00"},{"value":"3203"}],[{"value":"2017-01-20 21:00"},{"value":"3151"}],[{"value":"2017-01-20 22:00"},{"value":"1333"}],[{"value":"2017-01-20 23:00"},{"value":"1139"}],[{"value":"2017-01-21 00:00"},{"value":"3661"}],[{"value":"2017-01-21 01:00"},{"value":"3255"}],[{"value":"2017-01-21 02:00"},{"value":"1763"}],[{"value":"2017-01-21 03:00"},{"value":"2793"}],[{"value":"2017-01-21 04:00"},{"value":"2727"}],[{"value":"2017-01-21 05:00"},{"value":"1350"}],[{"value":"2017-01-21 06:00"},{"value":"1090"}],[{"value":"2017-01-
 21 07:00"},{"value":"3762"}],[{"value":"2017-01-21 08:00"},{"value":"2781"}],[{"value":"2017-01-21 09:00"},{"value":"2075"}],[{"value":"2017-01-21 10:00"},{"value":"2049"}],[{"value":"2017-01-21 11:00"},{"value":"1228"}],[{"value":"2017-01-21 12:00"},{"value":"988"}],[{"value":"2017-01-21 13:00"},{"value":"870"}],[{"value":"2017-01-21 14:00"},{"value":"1047"}],[{"value":"2017-01-21 15:00"},{"value":"1714"}],[{"value":"2017-01-21 16:00"},{"value":"1682"}],[{"value":"2017-01-21 17:00"},{"value":"1863"}],[{"value":"2017-01-21 18:00"},{"value":"1841"}],[{"value":"2017-01-21 19:00"},{"value":"4075"}],[{"value":"2017-01-21 20:00"},{"value":"3708"}],[{"value":"2017-01-21 21:00"},{"value":"1834"}],[{"value":"2017-01-21 22:00"},{"value":"1962"}],[{"value":"2017-01-21 23:00"},{"value":"3431"}],[{"value":"2017-01-22 00:00"},{"value":"2167"}],[{"value":"2017-01-22 01:00"},{"value":"3550"}],[{"value":"2017-01-22 02:00"},{"value":"4450"}],[{"value":"2017-01-22 03:00"},{"value":"5017"}],[{"value":
 "2017-01-22 04:00"},{"value":"3329"}],[{"value":"2017-01-22 05:00"},{"value":"1802"}],[{"value":"2017-01-22 06:00"},{"value":"1910"}],[{"value":"2017-01-22 07:00"},{"value":"2135"}],[{"value":"2017-01-22 08:00"},{"value":"3527"}],[{"value":"2017-01-22 09:00"},{"value":"4131"}],[{"value":"2017-01-22 10:00"},{"value":"2387"}],[{"value":"2017-01-22 11:00"},{"value":"1755"}],[{"value":"2017-01-22 12:00"},{"value":"1393"}],[{"value":"2017-01-22 13:00"},{"value":"1614"}],[{"value":"2017-01-22 14:00"},{"value":"3426"}],[{"value":"2017-01-22 15:00"},{"value":"2692"}],[{"value":"2017-01-22 16:00"},{"value":"1608"}],[{"value":"2017-01-22 17:00"},{"value":"1781"}],[{"value":"2017-01-22 18:00"},{"value":"4139"}],[{"value":"2017-01-22 19:00"},{"value":"3443"}],[{"value":"2017-01-22 20:00"},{"value":"1230"}],[{"value":"2017-01-22 21:00"},{"value":"1289"}],[{"value":"2017-01-22 22:00"},{"value":"1127"}],[{"value":"2017-01-22 23:00"},{"value":"852"}],[{"value":"2017-01-23 00:00"},{"value":"1508"}],
 [{"value":"2017-01-23 01:00"},{"value":"1585"}],[{"value":"2017-01-23 02:00"},{"value":"1398"}],[{"value":"2017-01-23 03:00"},{"value":"1431"}],[{"value":"2017-01-23 04:00"},{"value":"2063"}],[{"value":"2017-01-23 05:00"},{"value":"1647"}],[{"value":"2017-01-23 06:00"},{"value":"1103"}],[{"value":"2017-01-23 07:00"},{"value":"3851"}],[{"value":"2017-01-23 08:00"},{"value":"2807"}],[{"value":"2017-01-23 09:00"},{"value":"1151"}],[{"value":"2017-01-23 10:00"},{"value":"1423"}],[{"value":"2017-01-23 11:00"},{"value":"1178"}],[{"value":"2017-01-23 12:00"},{"value":"1262"}],[{"value":"2017-01-23 13:00"},{"value":"1838"}],[{"value":"2017-01-23 14:00"},{"value":"4606"}],[{"value":"2017-01-23 15:00"},{"value":"6166"}],[{"value":"2017-01-23 16:00"},{"value":"2911"}],[{"value":"2017-01-23 17:00"},{"value":"1327"}],[{"value":"2017-01-23 18:00"},{"value":"1289"}],[{"value":"2017-01-23 19:00"},{"value":"4010"}],[{"value":"2017-01-23 20:00"},{"value":"3587"}],[{"value":"2017-01-23 21:00"},{"value
 ":"3269"}],[{"value":"2017-01-23 22:00"},{"value":"2630"}],[{"value":"2017-01-23 23:00"},{"value":"1541"}],[{"value":"2017-01-24 00:00"},{"value":"1704"}],[{"value":"2017-01-24 01:00"},{"value":"1967"}],[{"value":"2017-01-24 02:00"},{"value":"1673"}],[{"value":"2017-01-24 03:00"},{"value":"1150"}],[{"value":"2017-01-24 04:00"},{"value":"1128"}],[{"value":"2017-01-24 05:00"},{"value":"1324"}],[{"value":"2017-01-24 06:00"},{"value":"1638"}],[{"value":"2017-01-24 07:00"},{"value":"3739"}],[{"value":"2017-01-24 08:00"},{"value":"2547"}],[{"value":"2017-01-24 09:00"},{"value":"1347"}],[{"value":"2017-01-24 10:00"},{"value":"1174"}],[{"value":"2017-01-24 11:00"},{"value":"1472"}],[{"value":"2017-01-24 12:00"},{"value":"2010"}],[{"value":"2017-01-24 13:00"},{"value":"4269"}],[{"value":"2017-01-24 14:00"},{"value":"3246"}],[{"value":"2017-01-24 15:00"},{"value":"1751"}],[{"value":"2017-01-24 16:00"},{"value":"817"}]],"columnNames":[{"name":"time","index":0,"aggr":"sum"},{"name":"Flows","ind
 ex":1,"aggr":"sum"}],"rows":[["2017-01-18 15:00","5068"],["2017-01-18 16:00","4519"],["2017-01-18 17:00","1259"],["2017-01-18 18:00","1318"],["2017-01-18 19:00","1605"],["2017-01-18 20:00","1916"],["2017-01-18 21:00","1526"],["2017-01-18 22:00","2103"],["2017-01-18 23:00","5058"],["2017-01-19 00:00","5779"],["2017-01-19 01:00","2609"],["2017-01-19 02:00","691"],["2017-01-19 03:00","1071"],["2017-01-19 04:00","1279"],["2017-01-19 05:00","857"],["2017-01-19 06:00","1294"],["2017-01-19 07:00","1511"],["2017-01-19 08:00","1278"],["2017-01-19 09:00","3113"],["2017-01-19 10:00","5793"],["2017-01-19 11:00","3568"],["2017-01-19 12:00","2073"],["2017-01-19 13:00","2013"],["2017-01-19 14:00","1732"],["2017-01-19 15:00","1974"],["2017-01-19 16:00","1432"],["2017-01-19 17:00","1348"],["2017-01-19 18:00","1227"],["2017-01-19 19:00","1677"],["2017-01-19 20:00","1222"],["2017-01-19 21:00","1338"],["2017-01-19 22:00","1518"],["2017-01-19 23:00","1740"],["2017-01-20 00:00","2115"],["2017-01-20 01:00
 ","4010"],["2017-01-20 02:00","5648"],["2017-01-20 03:00","3649"],["2017-01-20 04:00","1733"],["2017-01-20 05:00","1302"],["2017-01-20 06:00","1027"],["2017-01-20 07:00","2577"],["2017-01-20 08:00","2775"],["2017-01-20 09:00","1394"],["2017-01-20 10:00","1288"],["2017-01-20 11:00","1896"],["2017-01-20 12:00","1487"],["2017-01-20 13:00","2755"],["2017-01-20 14:00","2406"],["2017-01-20 15:00","1029"],["2017-01-20 16:00","892"],["2017-01-20 17:00","1538"],["2017-01-20 18:00","1898"],["2017-01-20 19:00","1301"],["2017-01-20 20:00","3203"],["2017-01-20 21:00","3151"],["2017-01-20 22:00","1333"],["2017-01-20 23:00","1139"],["2017-01-21 00:00","3661"],["2017-01-21 01:00","3255"],["2017-01-21 02:00","1763"],["2017-01-21 03:00","2793"],["2017-01-21 04:00","2727"],["2017-01-21 05:00","1350"],["2017-01-21 06:00","1090"],["2017-01-21 07:00","3762"],["2017-01-21 08:00","2781"],["2017-01-21 09:00","2075"],["2017-01-21 10:00","2049"],["2017-01-21 11:00","1228"],["2017-01-21 12:00","988"],["2017-01
 -21 13:00","870"],["2017-01-21 14:00","1047"],["2017-01-21 15:00","1714"],["2017-01-21 16:00","1682"],["2017-01-21 17:00","1863"],["2017-01-21 18:00","1841"],["2017-01-21 19:00","4075"],["2017-01-21 20:00","3708"],["2017-01-21 21:00","1834"],["2017-01-21 22:00","1962"],["2017-01-21 23:00","3431"],["2017-01-22 00:00","2167"],["2017-01-22 01:00","3550"],["2017-01-22 02:00","4450"],["2017-01-22 03:00","5017"],["2017-01-22 04:00","3329"],["2017-01-22 05:00","1802"],["2017-01-22 06:00","1910"],["2017-01-22 07:00","2135"],["2017-01-22 08:00","3527"],["2017-01-22 09:00","4131"],["2017-01-22 10:00","2387"],["2017-01-22 11:00","1755"],["2017-01-22 12:00","1393"],["2017-01-22 13:00","1614"],["2017-01-22 14:00","3426"],["2017-01-22 15:00","2692"],["2017-01-22 16:00","1608"],["2017-01-22 17:00","1781"],["2017-01-22 18:00","4139"],["2017-01-22 19:00","3443"],["2017-01-22 20:00","1230"],["2017-01-22 21:00","1289"],["2017-01-22 22:00","1127"],["2017-01-22 23:00","852"],["2017-01-23 00:00","1508"],
 ["2017-01-23 01:00","1585"],["2017-01-23 02:00","1398"],["2017-01-23 03:00","1431"],["2017-01-23 04:00","2063"],["2017-01-23 05:00","1647"],["2017-01-23 06:00","1103"],["2017-01-23 07:00","3851"],["2017-01-23 08:00","2807"],["2017-01-23 09:00","1151"],["2017-01-23 10:00","1423"],["2017-01-23 11:00","1178"],["2017-01-23 12:00","1262"],["2017-01-23 13:00","1838"],["2017-01-23 14:00","4606"],["2017-01-23 15:00","6166"],["2017-01-23 16:00","2911"],["2017-01-23 17:00","1327"],["2017-01-23 18:00","1289"],["2017-01-23 19:00","4010"],["2017-01-23 20:00","3587"],["2017-01-23 21:00","3269"],["2017-01-23 22:00","2630"],["2017-01-23 23:00","1541"],["2017-01-24 00:00","1704"],["2017-01-24 01:00","1967"],["2017-01-24 02:00","1673"],["2017-01-24 03:00","1150"],["2017-01-24 04:00","1128"],["2017-01-24 05:00","1324"],["2017-01-24 06:00","1638"],["2017-01-24 07:00","3739"],["2017-01-24 08:00","2547"],["2017-01-24 09:00","1347"],["2017-01-24 10:00","1174"],["2017-01-24 11:00","1472"],["2017-01-24 12:0
 0","2010"],["2017-01-24 13:00","4269"],["2017-01-24 14:00","3246"],["2017-01-24 15:00","1751"],["2017-01-24 16:00","817"]]},"dateCreated":"2017-01-24T18:15:04+0000","dateStarted":"2017-01-24T22:01:44+0000","dateFinished":"2017-01-24T22:02:11+0000","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:5245","focus":true},{"text":"%md\n\n### Top Locations\n\nThe top 10 locations where external flows terminate as determined by number of packets.\n","dateUpdated":"2017-01-24T21:56:00+0000","config":{"colWidth":3,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/markdown","editorHide":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1485199170630_249678087","id":"20170123-191930_2085111600","result":{"code":"SUCCESS","type":"HTML","msg":"<h3>Top Locations</h3>\n<p>The top 10 locations where external flows terminate as determined by number of packets.</p>\n"},"dateCre
 ated":"2017-01-23T07:19:30+0000","dateStarted":"2017-01-24T21:56:00+0000","dateFinished":"2017-01-24T21:56:00+0000","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:5246","focus":true},{"text":"%spark.sql\n\nselect country, city, sum(pkts) as pkts\nfrom (\n    select \n        `enrichments.geo.ip_dst_addr.country` as country,\n        `enrichments.geo.ip_dst_addr.city` as city,\n        pkt + rpkt as pkts\n    from yaf\n    where is_internal(ip_src_addr) <> is_internal(ip_dst_addr) and\n        datediff(current_timestamp(), from_unixtime(timestamp/1000)) <= 7\n    union all\n    select \n        `enrichments.geo.ip_src_addr.country` as country,\n        `enrichments.geo.ip_src_addr.city` as city,\n        pkt + rpkt as pkts\n    from yaf\n    where is_internal(ip_src_addr) <> is_internal(ip_dst_addr) and\n        datediff(current_timestamp(), from_unixtime(timestamp/1000)) <= 7\n) ips\nwhere country is not null or city is not null\ngroup by country, city\norder
  by pkts desc\nlimit 10","dateUpdated":"2017-01-24T21:56:00+0000","config":{"colWidth":7,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[{"name":"country","index":0,"aggr":"sum"}],"values":[{"name":"city","index":1,"aggr":"sum"}],"groups":[],"scatter":{"xAxis":{"name":"country","index":0,"aggr":"sum"},"yAxis":{"name":"city","index":1,"aggr":"sum"}}},"enabled":true,"editorMode":"ace/mode/scala","editorHide":true,"tableHide":false},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1485189483427_-889370099","id":"20170123-163803_1991676623","result":{"code":"SUCCESS","type":"TABLE","msg":"country\tcity\tpkts\nFR\tStrassbourg\t241668\nRU\tnull\t48039\nUS\tLos Angeles\t42570\nUS\tPhoenix\t21588\nFR\tnull\t3874\n","comment":"","msgTable":[[{"key":"city","value":"FR"},{"key":"city","value":"Strassbourg"},{"key":"city","value":"241668"}],[{"key":"pkts","value":"RU"},{"key":"pkts","value":"null"},{"key":"pkts","value":"48039"}],[{"value":"US"},{"value":"Los Angeles"},{
 "value":"42570"}],[{"value":"US"},{"value":"Phoenix"},{"value":"21588"}],[{"value":"FR"},{"value":"null"},{"value":"3874"}]],"columnNames":[{"name":"country","index":0,"aggr":"sum"},{"name":"city","index":1,"aggr":"sum"},{"name":"pkts","index":2,"aggr":"sum"}],"rows":[["FR","Strassbourg","241668"],["RU","null","48039"],["US","Los Angeles","42570"],["US","Phoenix","21588"],["FR","null","3874"]]},"dateCreated":"2017-01-23T04:38:03+0000","dateStarted":"2017-01-24T21:58:01+0000","dateFinished":"2017-01-24T21:59:01+0000","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:5247","focus":true},{"config":{"colWidth":3,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/markdown","editorHide":true,"tableHide":false},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1485286003622_-1705947416","id":"20170124-192643_548471388","dateCreated":"2017-01-24T19:26:43+0000","status":"FIN
 ISHED","progressUpdateIntervalMs":500,"focus":true,"$$hashKey":"object:7830","dateUpdated":"2017-01-24T21:56:00+0000","dateFinished":"2017-01-24T21:56:00+0000","dateStarted":"2017-01-24T21:56:00+0000","result":{"code":"SUCCESS","type":"HTML","msg":"<h3>Flow Duration</h3>\n<p>The average duration of each flow by hour in fractional seconds.</p>\n"},"text":"%md\n\n### Flow Duration\n\nThe average duration of each flow by hour in fractional seconds."},{"config":{"colWidth":7,"graph":{"mode":"lineChart","height":300,"optionOpen":false,"keys":[{"name":"time","index":0,"aggr":"sum"}],"values":[{"name":"Duration","index":1,"aggr":"sum"}],"groups":[],"scatter":{"xAxis":{"name":"time","index":0,"aggr":"sum"},"yAxis":{"name":"Duration","index":1,"aggr":"sum"}}},"enabled":true,"editorHide":true},"settings":{"params":{},"forms":{}},"jobName":"paragraph_1485286005230_-748692153","id":"20170124-192645_1541199231","dateCreated":"2017-01-24T19:26:45+0000","status":"FINISHED","progressUpdateIntervalM
 s":500,"focus":true,"$$hashKey":"object:7900","dateUpdated":"2017-01-24T22:03:47+0000","dateFinished":"2017-01-24T22:02:23+0000","dateStarted":"2017-01-24T22:02:00+0000","result":{"code":"SUCCESS","type":"TABLE","msg":"time\tDuration\n2017-01-18 15:00\t0.19031612689289265\n2017-01-18 16:00\t0.09600653008962867\n2017-01-18 17:00\t0.11555946935041171\n2017-01-18 18:00\t0.12217770034843205\n2017-01-18 19:00\t0.1669958012596221\n2017-01-18 20:00\t0.12340630630630631\n2017-01-18 21:00\t0.10697542533081285\n2017-01-18 22:00\t0.14323700184745317\n2017-01-18 23:00\t0.1631703554661301\n2017-01-19 00:00\t0.13247930390325435\n2017-01-19 01:00\t0.04939716626079257\n2017-01-19 02:00\t0.10303833605220229\n2017-01-19 03:00\t0.10067531806615776\n2017-01-19 04:00\t0.03247731397459165\n2017-01-19 05:00\t0.22729497354497355\n2017-01-19 06:00\t0.21010773834972213\n2017-01-19 07:00\t0.08055848623853211\n2017-01-19 08:00\t3.9832007073386383E-4\n2017-01-19 09:00\t0.19030351321984787\n2017-01-19 10:00\t0.1
 452054202670856\n2017-01-19 11:00\t0.1482769953051643\n2017-01-19 12:00\t0.07671529475685955\n2017-01-19 13:00\t0.042551237766263676\n2017-01-19 14:00\t0.029415181518151815\n2017-01-19 15:00\t0.20017679239850272\n2017-01-19 16:00\t0.08409375\n2017-01-19 17:00\t0.2308393009377664\n2017-01-19 18:00\t0.09319999999999999\n2017-01-19 19:00\t0.18260548885077185\n2017-01-19 20:00\t4.796978281397545E-4\n2017-01-19 21:00\t0.11935314236853357\n2017-01-19 22:00\t0.15877311912225706\n2017-01-19 23:00\t0.23679060665362034\n2017-01-20 00:00\t0.15056687381788708\n2017-01-20 01:00\t0.13923590755355128\n2017-01-20 02:00\t0.14459666498231427\n2017-01-20 03:00\t0.08863527180783819\n2017-01-20 04:00\t0.1746280367695338\n2017-01-20 05:00\t0.029050151318633807\n2017-01-20 06:00\t0.0017896739130434784\n2017-01-20 07:00\t0.09208272506082724\n2017-01-20 08:00\t0.10011607507139941\n2017-01-20 09:00\t0.0844285123966942\n2017-01-20 10:00\t0.006947856827220504\n2017-01-20 11:00\t0.06180100532229451\n2017-01-20 
 12:00\t0.031015128006206365\n2017-01-20 13:00\t0.10380992413368874\n2017-01-20 14:00\t0.1563943695861405\n2017-01-20 15:00\t6.302054414214325E-4\n2017-01-20 16:00\t0.0026234446627373936\n2017-01-20 17:00\t0.1754280601288475\n2017-01-20 18:00\t0.09807907692307692\n2017-01-20 19:00\t0.07658407079646017\n2017-01-20 20:00\t0.20836499392255597\n2017-01-20 21:00\t0.06330415754923414\n2017-01-20 22:00\t0.16029337948939854\n2017-01-20 23:00\t0.19351168451801365\n2017-01-21 00:00\t0.14672251389746754\n2017-01-21 01:00\t0.10111322431567722\n2017-01-21 02:00\t0.09224283854166666\n2017-01-21 03:00\t0.13477034174125302\n2017-01-21 04:00\t0.23768948247078464\n2017-01-21 05:00\t0.1218173241852487\n2017-01-21 06:00\t0.0662239502332815\n2017-01-21 07:00\t0.16158068368413195\n2017-01-21 08:00\t0.135581317764804\n2017-01-21 09:00\t0.12834492050660198\n2017-01-21 10:00\t0.09243758649321893\n2017-01-21 11:00\t0.037721259124087594\n2017-01-21 12:00\t0.001694857142857143\n2017-01-21 13:00\t0.2279521938441
 388\n2017-01-21 14:00\t0.04165118733509235\n2017-01-21 15:00\t0.216584549071618\n2017-01-21 16:00\t0.17769900240798076\n2017-01-21 17:00\t0.18550478182904964\n2017-01-21 18:00\t0.18409959413050267\n2017-01-21 19:00\t0.14356826056826055\n2017-01-21 20:00\t0.04235902574379528\n2017-01-21 21:00\t0.08232819247378162\n2017-01-21 22:00\t0.0951803138373752\n2017-01-21 23:00\t0.06805885245901637\n2017-01-22 00:00\t0.10884062582869265\n2017-01-22 01:00\t0.14689821882951648\n2017-01-22 02:00\t0.18955771708322594\n2017-01-22 03:00\t0.1267548931383577\n2017-01-22 04:00\t0.10627040992077161\n2017-01-22 05:00\t0.13949204864359213\n2017-01-22 06:00\t0.18298706643151086\n2017-01-22 07:00\t0.08909306666666668\n2017-01-22 08:00\t0.10219826756496628\n2017-01-22 09:00\t0.1094142797841428\n2017-01-22 10:00\t0.13313847651496716\n2017-01-22 11:00\t0.22065462239583333\n2017-01-22 12:00\t0.05742713769570453\n2017-01-22 13:00\t0.09727475693194094\n2017-01-22 14:00\t0.15535018290655137\n2017-01-22 15:00\t0.04
 639948728904081\n2017-01-22 16:00\t0.16877034166960198\n2017-01-22 17:00\t0.16052199413489737\n2017-01-22 18:00\t0.1477992900054615\n2017-01-22 19:00\t0.16697590764597625\n2017-01-22 20:00\t0.12821210741920802\n2017-01-22 21:00\t0.26528622064687635\n2017-01-22 22:00\t0.10951478429471645\n2017-01-22 23:00\t8.907103825136611E-4\n2017-01-23 00:00\t0.061153930530164546\n2017-01-23 01:00\t0.19865295815295814\n2017-01-23 02:00\t0.06403429247142294\n2017-01-23 03:00\t0.11023640081799592\n2017-01-23 04:00\t0.1491297059631767\n2017-01-23 05:00\t0.03294133885438233\n2017-01-23 06:00\t0.13834876543209879\n2017-01-23 07:00\t0.1727832289998544\n2017-01-23 08:00\t0.15339118797611695\n2017-01-23 09:00\t0.12512143559488692\n2017-01-23 10:00\t0.09902022559315442\n2017-01-23 11:00\t0.22309696376101862\n2017-01-23 12:00\t0.17790415913200722\n2017-01-23 13:00\t0.20218280871670705\n2017-01-23 14:00\t0.1394567465628422\n2017-01-23 15:00\t0.12533896282698487\n2017-01-23 16:00\t0.04402594059405941\n2017-01
 -23 17:00\t0.3417320453590928\n2017-01-23 18:00\t0.054911006018916596\n2017-01-23 19:00\t0.16920854341736694\n2017-01-23 20:00\t0.06835967170904408\n2017-01-23 21:00\t0.13492999480159418\n2017-01-23 22:00\t0.046313544415127535\n2017-01-23 23:00\t0.19541715542521992\n2017-01-24 00:00\t0.1406265218821981\n2017-01-24 01:00\t0.07935322952710494\n2017-01-24 02:00\t0.1307764017887857\n2017-01-24 03:00\t0.0625874330248417\n2017-01-24 04:00\t0.3031566750629723\n2017-01-24 05:00\t0.1141354865771812\n2017-01-24 06:00\t0.12247032280458175\n2017-01-24 07:00\t0.14380869433504992\n2017-01-24 08:00\t0.21328400361500227\n2017-01-24 09:00\t0.19322595356550581\n2017-01-24 10:00\t0.1620773018217627\n2017-01-24 11:00\t0.04968894899536321\n2017-01-24 12:00\t0.05735494689770822\n2017-01-24 13:00\t0.09200328601472131\n2017-01-24 14:00\t0.10968589174800354\n2017-01-24 15:00\t0.022384343920436315\n2017-01-24 16:00\t0.010777777777777778\n","comment":"","msgTable":[[{"key":"Duration","value":"2017-01-18 15:00
 "},{"key":"Duration","value":"0.19031612689289265"}],[{"value":"2017-01-18 16:00"},{"value":"0.09600653008962867"}],[{"value":"2017-01-18 17:00"},{"value":"0.11555946935041171"}],[{"value":"2017-01-18 18:00"},{"value":"0.12217770034843205"}],[{"value":"2017-01-18 19:00"},{"value":"0.1669958012596221"}],[{"value":"2017-01-18 20:00"},{"value":"0.12340630630630631"}],[{"value":"2017-01-18 21:00"},{"value":"0.10697542533081285"}],[{"value":"2017-01-18 22:00"},{"value":"0.14323700184745317"}],[{"value":"2017-01-18 23:00"},{"value":"0.1631703554661301"}],[{"value":"2017-01-19 00:00"},{"value":"0.13247930390325435"}],[{"value":"2017-01-19 01:00"},{"value":"0.04939716626079257"}],[{"value":"2017-01-19 02:00"},{"value":"0.10303833605220229"}],[{"value":"2017-01-19 03:00"},{"value":"0.10067531806615776"}],[{"value":"2017-01-19 04:00"},{"value":"0.03247731397459165"}],[{"value":"2017-01-19 05:00"},{"value":"0.22729497354497355"}],[{"value":"2017-01-19 06:00"},{"value":"0.21010773834972213"}],[
 {"value":"2017-01-19 07:00"},{"value":"0.08055848623853211"}],[{"value":"2017-01-19 08:00"},{"value":"3.9832007073386383E-4"}],[{"value":"2017-01-19 09:00"},{"value":"0.19030351321984787"}],[{"value":"2017-01-19 10:00"},{"value":"0.1452054202670856"}],[{"value":"2017-01-19 11:00"},{"value":"0.1482769953051643"}],[{"value":"2017-01-19 12:00"},{"value":"0.07671529475685955"}],[{"value":"2017-01-19 13:00"},{"value":"0.042551237766263676"}],[{"value":"2017-01-19 14:00"},{"value":"0.029415181518151815"}],[{"value":"2017-01-19 15:00"},{"value":"0.20017679239850272"}],[{"value":"2017-01-19 16:00"},{"value":"0.08409375"}],[{"value":"2017-01-19 17:00"},{"value":"0.2308393009377664"}],[{"value":"2017-01-19 18:00"},{"value":"0.09319999999999999"}],[{"value":"2017-01-19 19:00"},{"value":"0.18260548885077185"}],[{"value":"2017-01-19 20:00"},{"value":"4.796978281397545E-4"}],[{"value":"2017-01-19 21:00"},{"value":"0.11935314236853357"}],[{"value":"2017-01-19 22:00"},{"value":"0.15877311912225706"
 }],[{"value":"2017-01-19 23:00"},{"value":"0.23679060665362034"}],[{"value":"2017-01-20 00:00"},{"value":"0.15056687381788708"}],[{"value":"2017-01-20 01:00"},{"value":"0.13923590755355128"}],[{"value":"2017-01-20 02:00"},{"value":"0.14459666498231427"}],[{"value":"2017-01-20 03:00"},{"value":"0.08863527180783819"}],[{"value":"2017-01-20 04:00"},{"value":"0.1746280367695338"}],[{"value":"2017-01-20 05:00"},{"value":"0.029050151318633807"}],[{"value":"2017-01-20 06:00"},{"value":"0.0017896739130434784"}],[{"value":"2017-01-20 07:00"},{"value":"0.09208272506082724"}],[{"value":"2017-01-20 08:00"},{"value":"0.10011607507139941"}],[{"value":"2017-01-20 09:00"},{"value":"0.0844285123966942"}],[{"value":"2017-01-20 10:00"},{"value":"0.006947856827220504"}],[{"value":"2017-01-20 11:00"},{"value":"0.06180100532229451"}],[{"value":"2017-01-20 12:00"},{"value":"0.031015128006206365"}],[{"value":"2017-01-20 13:00"},{"value":"0.10380992413368874"}],[{"value":"2017-01-20 14:00"},{"value":"0.1563
 943695861405"}],[{"value":"2017-01-20 15:00"},{"value":"6.302054414214325E-4"}],[{"value":"2017-01-20 16:00"},{"value":"0.0026234446627373936"}],[{"value":"2017-01-20 17:00"},{"value":"0.1754280601288475"}],[{"value":"2017-01-20 18:00"},{"value":"0.09807907692307692"}],[{"value":"2017-01-20 19:00"},{"value":"0.07658407079646017"}],[{"value":"2017-01-20 20:00"},{"value":"0.20836499392255597"}],[{"value":"2017-01-20 21:00"},{"value":"0.06330415754923414"}],[{"value":"2017-01-20 22:00"},{"value":"0.16029337948939854"}],[{"value":"2017-01-20 23:00"},{"value":"0.19351168451801365"}],[{"value":"2017-01-21 00:00"},{"value":"0.14672251389746754"}],[{"value":"2017-01-21 01:00"},{"value":"0.10111322431567722"}],[{"value":"2017-01-21 02:00"},{"value":"0.09224283854166666"}],[{"value":"2017-01-21 03:00"},{"value":"0.13477034174125302"}],[{"value":"2017-01-21 04:00"},{"value":"0.23768948247078464"}],[{"value":"2017-01-21 05:00"},{"value":"0.1218173241852487"}],[{"value":"2017-01-21 06:00"},{"val
 ue":"0.0662239502332815"}],[{"value":"2017-01-21 07:00"},{"value":"0.16158068368413195"}],[{"value":"2017-01-21 08:00"},{"value":"0.135581317764804"}],[{"value":"2017-01-21 09:00"},{"value":"0.12834492050660198"}],[{"value":"2017-01-21 10:00"},{"value":"0.09243758649321893"}],[{"value":"2017-01-21 11:00"},{"value":"0.037721259124087594"}],[{"value":"2017-01-21 12:00"},{"value":"0.001694857142857143"}],[{"value":"2017-01-21 13:00"},{"value":"0.2279521938441388"}],[{"value":"2017-01-21 14:00"},{"value":"0.04165118733509235"}],[{"value":"2017-01-21 15:00"},{"value":"0.216584549071618"}],[{"value":"2017-01-21 16:00"},{"value":"0.17769900240798076"}],[{"value":"2017-01-21 17:00"},{"value":"0.18550478182904964"}],[{"value":"2017-01-21 18:00"},{"value":"0.18409959413050267"}],[{"value":"2017-01-21 19:00"},{"value":"0.14356826056826055"}],[{"value":"2017-01-21 20:00"},{"value":"0.04235902574379528"}],[{"value":"2017-01-21 21:00"},{"value":"0.08232819247378162"}],[{"value":"2017-01-21 22:00"
 },{"value":"0.0951803138373752"}],[{"value":"2017-01-21 23:00"},{"value":"0.06805885245901637"}],[{"value":"2017-01-22 00:00"},{"value":"0.10884062582869265"}],[{"value":"2017-01-22 01:00"},{"value":"0.14689821882951648"}],[{"value":"2017-01-22 02:00"},{"value":"0.18955771708322594"}],[{"value":"2017-01-22 03:00"},{"value":"0.1267548931383577"}],[{"value":"2017-01-22 04:00"},{"value":"0.10627040992077161"}],[{"value":"2017-01-22 05:00"},{"value":"0.13949204864359213"}],[{"value":"2017-01-22 06:00"},{"value":"0.18298706643151086"}],[{"value":"2017-01-22 07:00"},{"value":"0.08909306666666668"}],[{"value":"2017-01-22 08:00"},{"value":"0.10219826756496628"}],[{"value":"2017-01-22 09:00"},{"value":"0.1094142797841428"}],[{"value":"2017-01-22 10:00"},{"value":"0.13313847651496716"}],[{"value":"2017-01-22 11:00"},{"value":"0.22065462239583333"}],[{"value":"2017-01-22 12:00"},{"value":"0.05742713769570453"}],[{"value":"2017-01-22 13:00"},{"value":"0.09727475693194094"}],[{"value":"2017-01-2
 2 14:00"},{"value":"0.15535018290655137"}],[{"value":"2017-01-22 15:00"},{"value":"0.04639948728904081"}],[{"value":"2017-01-22 16:00"},{"value":"0.16877034166960198"}],[{"value":"2017-01-22 17:00"},{"value":"0.16052199413489737"}],[{"value":"2017-01-22 18:00"},{"value":"0.1477992900054615"}],[{"value":"2017-01-22 19:00"},{"value":"0.16697590764597625"}],[{"value":"2017-01-22 20:00"},{"value":"0.12821210741920802"}],[{"value":"2017-01-22 21:00"},{"value":"0.26528622064687635"}],[{"value":"2017-01-22 22:00"},{"value":"0.10951478429471645"}],[{"value":"2017-01-22 23:00"},{"value":"8.907103825136611E-4"}],[{"value":"2017-01-23 00:00"},{"value":"0.061153930530164546"}],[{"value":"2017-01-23 01:00"},{"value":"0.19865295815295814"}],[{"value":"2017-01-23 02:00"},{"value":"0.06403429247142294"}],[{"value":"2017-01-23 03:00"},{"value":"0.11023640081799592"}],[{"value":"2017-01-23 04:00"},{"value":"0.1491297059631767"}],[{"value":"2017-01-23 05:00"},{"value":"0.03294133885438233"}],[{"value"
 :"2017-01-23 06:00"},{"value":"0.13834876543209879"}],[{"value":"2017-01-23 07:00"},{"value":"0.1727832289998544"}],[{"value":"2017-01-23 08:00"},{"value":"0.15339118797611695"}],[{"value":"2017-01-23 09:00"},{"value":"0.12512143559488692"}],[{"value":"2017-01-23 10:00"},{"value":"0.09902022559315442"}],[{"value":"2017-01-23 11:00"},{"value":"0.22309696376101862"}],[{"value":"2017-01-23 12:00"},{"value":"0.17790415913200722"}],[{"value":"2017-01-23 13:00"},{"value":"0.20218280871670705"}],[{"value":"2017-01-23 14:00"},{"value":"0.1394567465628422"}],[{"value":"2017-01-23 15:00"},{"value":"0.12533896282698487"}],[{"value":"2017-01-23 16:00"},{"value":"0.04402594059405941"}],[{"value":"2017-01-23 17:00"},{"value":"0.3417320453590928"}],[{"value":"2017-01-23 18:00"},{"value":"0.054911006018916596"}],[{"value":"2017-01-23 19:00"},{"value":"0.16920854341736694"}],[{"value":"2017-01-23 20:00"},{"value":"0.06835967170904408"}],[{"value":"2017-01-23 21:00"},{"value":"0.13492999480159418"}],
 [{"value":"2017-01-23 22:00"},{"value":"0.046313544415127535"}],[{"value":"2017-01-23 23:00"},{"value":"0.19541715542521992"}],[{"value":"2017-01-24 00:00"},{"value":"0.1406265218821981"}],[{"value":"2017-01-24 01:00"},{"value":"0.07935322952710494"}],[{"value":"2017-01-24 02:00"},{"value":"0.1307764017887857"}],[{"value":"2017-01-24 03:00"},{"value":"0.0625874330248417"}],[{"value":"2017-01-24 04:00"},{"value":"0.3031566750629723"}],[{"value":"2017-01-24 05:00"},{"value":"0.1141354865771812"}],[{"value":"2017-01-24 06:00"},{"value":"0.12247032280458175"}],[{"value":"2017-01-24 07:00"},{"value":"0.14380869433504992"}],[{"value":"2017-01-24 08:00"},{"value":"0.21328400361500227"}],[{"value":"2017-01-24 09:00"},{"value":"0.19322595356550581"}],[{"value":"2017-01-24 10:00"},{"value":"0.1620773018217627"}],[{"value":"2017-01-24 11:00"},{"value":"0.04968894899536321"}],[{"value":"2017-01-24 12:00"},{"value":"0.05735494689770822"}],[{"value":"2017-01-24 13:00"},{"value":"0.092003286014721
 31"}],[{"value":"2017-01-24 14:00"},{"value":"0.10968589174800354"}],[{"value":"2017-01-24 15:00"},{"value":"0.022384343920436315"}],[{"value":"2017-01-24 16:00"},{"value":"0.010777777777777778"}]],"columnNames":[{"name":"time","index":0,"aggr":"sum"},{"name":"Duration","index":1,"aggr":"sum"}],"rows":[["2017-01-18 15:00","0.19031612689289265"],["2017-01-18 16:00","0.09600653008962867"],["2017-01-18 17:00","0.11555946935041171"],["2017-01-18 18:00","0.12217770034843205"],["2017-01-18 19:00","0.1669958012596221"],["2017-01-18 20:00","0.12340630630630631"],["2017-01-18 21:00","0.10697542533081285"],["2017-01-18 22:00","0.14323700184745317"],["2017-01-18 23:00","0.1631703554661301"],["2017-01-19 00:00","0.13247930390325435"],["2017-01-19 01:00","0.04939716626079257"],["2017-01-19 02:00","0.10303833605220229"],["2017-01-19 03:00","0.10067531806615776"],["2017-01-19 04:00","0.03247731397459165"],["2017-01-19 05:00","0.22729497354497355"],["2017-01-19 06:00","0.21010773834972213"],["2017-
 01-19 07:00","0.08055848623853211"],["2017-01-19 08:00","3.9832007073386383E-4"],["2017-01-19 09:00","0.19030351321984787"],["2017-01-19 10:00","0.1452054202670856"],["2017-01-19 11:00","0.1482769953051643"],["2017-01-19 12:00","0.07671529475685955"],["2017-01-19 13:00","0.042551237766263676"],["2017-01-19 14:00","0.029415181518151815"],["2017-01-19 15:00","0.20017679239850272"],["2017-01-19 16:00","0.08409375"],["2017-01-19 17:00","0.2308393009377664"],["2017-01-19 18:00","0.09319999999999999"],["2017-01-19 19:00","0.18260548885077185"],["2017-01-19 20:00","4.796978281397545E-4"],["2017-01-19 21:00","0.11935314236853357"],["2017-01-19 22:00","0.15877311912225706"],["2017-01-19 23:00","0.23679060665362034"],["2017-01-20 00:00","0.15056687381788708"],["2017-01-20 01:00","0.13923590755355128"],["2017-01-20 02:00","0.14459666498231427"],["2017-01-20 03:00","0.08863527180783819"],["2017-01-20 04:00","0.1746280367695338"],["2017-01-20 05:00","0.029050151318633807"],["2017-01-20 06:00","0
 .0017896739130434784"],["2017-01-20 07:00","0.09208272506082724"],["2017-01-20 08:00","0.10011607507139941"],["2017-01-20 09:00","0.0844285123966942"],["2017-01-20 10:00","0.006947856827220504"],["2017-01-20 11:00","0.06180100532229451"],["2017-01-20 12:00","0.031015128006206365"],["2017-01-20 13:00","0.10380992413368874"],["2017-01-20 14:00","0.1563943695861405"],["2017-01-20 15:00","6.302054414214325E-4"],["2017-01-20 16:00","0.0026234446627373936"],["2017-01-20 17:00","0.1754280601288475"],["2017-01-20 18:00","0.09807907692307692"],["2017-01-20 19:00","0.07658407079646017"],["2017-01-20 20:00","0.20836499392255597"],["2017-01-20 21:00","0.06330415754923414"],["2017-01-20 22:00","0.16029337948939854"],["2017-01-20 23:00","0.19351168451801365"],["2017-01-21 00:00","0.14672251389746754"],["2017-01-21 01:00","0.10111322431567722"],["2017-01-21 02:00","0.09224283854166666"],["2017-01-21 03:00","0.13477034174125302"],["2017-01-21 04:00","0.23768948247078464"],["2017-01-21 05:00","0.121
 8173241852487"],["2017-01-21 06:00","0.0662239502332815"],["2017-01-21 07:00","0.16158068368413195"],["2017-01-21 08:00","0.135581317764804"],["2017-01-21 09:00","0.12834492050660198"],["2017-01-21 10:00","0.09243758649321893"],["2017-01-21 11:00","0.037721259124087594"],["2017-01-21 12:00","0.001694857142857143"],["2017-01-21 13:00","0.2279521938441388"],["2017-01-21 14:00","0.04165118733509235"],["2017-01-21 15:00","0.216584549071618"],["2017-01-21 16:00","0.17769900240798076"],["2017-01-21 17:00","0.18550478182904964"],["2017-01-21 18:00","0.18409959413050267"],["2017-01-21 19:00","0.14356826056826055"],["2017-01-21 20:00","0.04235902574379528"],["2017-01-21 21:00","0.08232819247378162"],["2017-01-21 22:00","0.0951803138373752"],["2017-01-21 23:00","0.06805885245901637"],["2017-01-22 00:00","0.10884062582869265"],["2017-01-22 01:00","0.14689821882951648"],["2017-01-22 02:00","0.18955771708322594"],["2017-01-22 03:00","0.1267548931383577"],["2017-01-22 04:00","0.10627040992077161"
 ],["2017-01-22 05:00","0.13949204864359213"],["2017-01-22 06:00","0.18298706643151086"],["2017-01-22 07:00","0.08909306666666668"],["2017-01-22 08:00","0.10219826756496628"],["2017-01-22 09:00","0.1094142797841428"],["2017-01-22 10:00","0.13313847651496716"],["2017-01-22 11:00","0.22065462239583333"],["2017-01-22 12:00","0.05742713769570453"],["2017-01-22 13:00","0.09727475693194094"],["2017-01-22 14:00","0.15535018290655137"],["2017-01-22 15:00","0.04639948728904081"],["2017-01-22 16:00","0.16877034166960198"],["2017-01-22 17:00","0.16052199413489737"],["2017-01-22 18:00","0.1477992900054615"],["2017-01-22 19:00","0.16697590764597625"],["2017-01-22 20:00","0.12821210741920802"],["2017-01-22 21:00","0.26528622064687635"],["2017-01-22 22:00","0.10951478429471645"],["2017-01-22 23:00","8.907103825136611E-4"],["2017-01-23 00:00","0.061153930530164546"],["2017-01-23 01:00","0.19865295815295814"],["2017-01-23 02:00","0.06403429247142294"],["2017-01-23 03:00","0.11023640081799592"],["2017
 -01-23 04:00","0.1491297059631767"],["2017-01-23 05:00","0.03294133885438233"],["2017-01-23 06:00","0.13834876543209879"],["2017-01-23 07:00","0.1727832289998544"],["2017-01-23 08:00","0.15339118797611695"],["2017-01-23 09:00","0.12512143559488692"],["2017-01-23 10:00","0.09902022559315442"],["2017-01-23 11:00","0.22309696376101862"],["2017-01-23 12:00","0.17790415913200722"],["2017-01-23 13:00","0.20218280871670705"],["2017-01-23 14:00","0.1394567465628422"],["2017-01-23 15:00","0.12533896282698487"],["2017-01-23 16:00","0.04402594059405941"],["2017-01-23 17:00","0.3417320453590928"],["2017-01-23 18:00","0.054911006018916596"],["2017-01-23 19:00","0.16920854341736694"],["2017-01-23 20:00","0.06835967170904408"],["2017-01-23 21:00","0.13492999480159418"],["2017-01-23 22:00","0.046313544415127535"],["2017-01-23 23:00","0.19541715542521992"],["2017-01-24 00:00","0.1406265218821981"],["2017-01-24 01:00","0.07935322952710494"],["2017-01-24 02:00","0.1307764017887857"],["2017-01-24 03:00
 ","0.0625874330248417"],["2017-01-24 04:00","0.3031566750629723"],["2017-01-24 05:00","0.1141354865771812"],["2017-01-24 06:00","0.12247032280458175"],["2017-01-24 07:00","0.14380869433504992"],["2017-01-24 08:00","0.21328400361500227"],["2017-01-24 09:00","0.19322595356550581"],["2017-01-24 10:00","0.1620773018217627"],["2017-01-24 11:00","0.04968894899536321"],["2017-01-24 12:00","0.05735494689770822"],["2017-01-24 13:00","0.09200328601472131"],["2017-01-24 14:00","0.10968589174800354"],["2017-01-24 15:00","0.022384343920436315"],["2017-01-24 16:00","0.010777777777777778"]]},"text":"%spark.sql\n\nselect \n    from_unixtime(timestamp/1000,'YYYY-MM-dd HH:00') as time,\n    mean(duration) as Duration\nfrom yaf\ngroup by from_unixtime(timestamp/1000,'YYYY-MM-dd HH:00')\norder by from_unixtime(timestamp/1000,'YYYY-MM-dd HH:00') asc"},{"text":"%md\n \n### Flow Duration - Internal\n\nThe average duration of each internal flow by hour in fractional seconds.","dateUpdated":"2017-01-24T21:5
 6:00+0000","config":{"colWidth":3,"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true,"editorMode":"ace/mode/markdown","editorHide":true},"settings":{"params":{"formName":"mm","minutes":"mm"},"forms":{}},"jobName":"paragraph_1485192111229_-181663106","id":"20170123-172151_1334413283","result":{"code":"SUCCESS","type":"HTML","msg":"<h3>Flow Duration - Internal</h3>\n<p>The average duration of each internal flow by hour in fractional seconds.</p>\n"},"dateCreated":"2017-01-23T05:21:51+0000","dateStarted":"2017-01-24T21:56:00+0000","dateFinished":"2017-01-24T21:56:00+0000","status":"FINISHED","progressUpdateIntervalMs":500,"$$hashKey":"object:5248","focus":true},{"text":"%spark.sql\n\nselect \n    from_unixtime(timestamp/1000,'YYYY-MM-dd HH:00') as time,\n    mean(duration) as Internal\nfrom yaf\nwhere \n    is_internal(ip_src_addr) == is_internal(ip_dst_addr) and\n    datediff(current_timestamp(), from_unixtime(timesta
 mp/1000)) <= 7\ngroup by from_unixtime(timestamp/1000,'YYYY-MM-dd HH:00')\norder by from_unixtime(timestamp/1000,'YYYY-MM-dd HH:00') asc","dateUpdated":"2017-01-2

<TRUNCATED>


[15/17] incubator-metron git commit: METRON-660 Prototype structure for .md assembly into book with maven doxia-markdown plugin.

Posted by ce...@apache.org.
METRON-660 Prototype structure for .md assembly into book with maven doxia-markdown plugin.


Project: http://git-wip-us.apache.org/repos/asf/incubator-metron/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-metron/commit/dd99533e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-metron/tree/dd99533e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-metron/diff/dd99533e

Branch: refs/heads/Metron_0.3.1
Commit: dd99533e0511495bdb27b2a23a0e07a13e35309f
Parents: 73cb657
Author: mattf-horton <mf...@hortonworks.com>
Authored: Mon Jan 16 17:18:57 2017 -0800
Committer: mattf-horton <mf...@hortonworks.com>
Committed: Mon Feb 6 09:58:17 2017 -0800

----------------------------------------------------------------------
 site-book/.gitignore                  |  25 +++++++++
 site-book/pom.xml                     |  78 +++++++++++++++++++++++++++++
 site-book/site/images/metron-logo.png | Bin 0 -> 21186 bytes
 site-book/site/site.xml               |  62 +++++++++++++++++++++++
 4 files changed, 165 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/dd99533e/site-book/.gitignore
----------------------------------------------------------------------
diff --git a/site-book/.gitignore b/site-book/.gitignore
new file mode 100644
index 0000000..de3a505
--- /dev/null
+++ b/site-book/.gitignore
@@ -0,0 +1,25 @@
+site/markdown/
+*~
+target/
+*dependency-reduced-pom.xml
+.idea
+*.iml
+*.iws
+.DS_Store
+*.project
+*.classpath
+*.settings
+*.metadata
+*hbase-site.xml
+*.log
+*.swp
+*.tmp
+*.bak
+*.class
+
+tmp/**
+tmp/**/*
+temp/**
+temp/**/*
+
+repodata/

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/dd99533e/site-book/pom.xml
----------------------------------------------------------------------
diff --git a/site-book/pom.xml b/site-book/pom.xml
new file mode 100644
index 0000000..74ce248
--- /dev/null
+++ b/site-book/pom.xml
@@ -0,0 +1,78 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software
+	Foundation (ASF) under one or more contributor license agreements. See the
+	NOTICE file distributed with this work for additional information regarding
+	copyright ownership. The ASF licenses this file to You under the Apache License,
+	Version 2.0 (the "License"); you may not use this file except in compliance
+	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+	Unless required by applicable law or agreed to in writing, software distributed
+	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
+	OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+  the specific language governing permissions and limitations under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+		 xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<groupId>org.apache.metron</groupId>
+	<artifactId>site-book</artifactId>
+	<packaging>pom</packaging>
+	<name>site-book</name>
+	<parent>
+		<groupId>org.apache.metron</groupId>
+		<artifactId>Metron</artifactId>
+		<version>0.3.0</version>
+	</parent>
+	<description>User Documentation for Metron</description>
+	<url>https://metron.incubator.apache.org/</url>
+	<scm>
+		<connection>scm:git:https://git-wip-us.apache.org/repos/asf/incubator-metron.git</connection>
+		<developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/incubator-metron.git</developerConnection>
+		<tag>HEAD</tag>
+		<url>https://git-wip-us.apache.org/repos/asf/incubator-metron</url>
+	</scm>
+
+	<licenses>
+		<license>
+			<name>The Apache Software License, Version 2.0</name>
+			<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+			<distribution>repo</distribution>
+		</license>
+	</licenses>
+	<modules>
+		<module>site</module>
+	</modules>
+
+	<properties>
+          <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+	</properties>
+
+	<build>
+	  <plugins>
+            <plugin>
+              <groupId>org.apache.maven.plugins</groupId>
+              <artifactId>maven-site-plugin</artifactId>
+              <version>3.4</version>
+              <dependencies>
+		<dependency>
+                  <groupId>org.apache.maven.doxia</groupId>
+                  <artifactId>doxia-module-markdown</artifactId>
+                  <version>1.7</version>
+		</dependency>
+              </dependencies>
+              <executions>
+		<execution>
+                  <goals>
+                    <goal>site</goal>
+                  </goals>
+		</execution>
+              </executions>
+              <configuration>
+		<skip>false</skip>
+              </configuration>
+            </plugin>
+	  </plugins>
+	</build>
+
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/dd99533e/site-book/site/images/metron-logo.png
----------------------------------------------------------------------
diff --git a/site-book/site/images/metron-logo.png b/site-book/site/images/metron-logo.png
new file mode 100644
index 0000000..a0bc8cb
Binary files /dev/null and b/site-book/site/images/metron-logo.png differ

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/dd99533e/site-book/site/site.xml
----------------------------------------------------------------------
diff --git a/site-book/site/site.xml b/site-book/site/site.xml
new file mode 100644
index 0000000..90f774a
--- /dev/null
+++ b/site-book/site/site.xml
@@ -0,0 +1,62 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+  
+       http://www.apache.org/licenses/LICENSE-2.0
+  
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project name="Falcon" xmlns="http://maven.apache.org/DECORATION/1.3.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/DECORATION/1.3.0 http://maven.apache.org/xsd/decoration-1.3.0.xsd">
+
+    <skin>
+        <groupId>org.apache.maven.skins</groupId>
+        <artifactId>maven-fluido-skin</artifactId>
+        <version>1.3.0</version>
+    </skin>
+
+    <custom>
+        <fluidoSkin>
+            <project>Apache Metron - Incubating</project>
+            <sideBarEnabled>false</sideBarEnabled>
+        </fluidoSkin>
+    </custom>
+
+    <bannerLeft>
+        <name>Apache Metron - Incubating</name>
+        <src>./images/metron-logo.png</src>
+        <width>740px</width>
+        <height>242px</height>
+    </bannerLeft>
+
+    <publishDate position="right"/>
+    <version position="right"/>
+
+    <body>
+        <head>
+            <script type="text/javascript">
+                $( document ).ready( function() { $( '.carousel' ).carousel( { interval: 3500 } ) } );
+            </script>
+        </head>
+
+        <breadcrumbs position="left">
+            <item name="Metron" title="Apache Metron - Incubating" href="index.html"/>
+        </breadcrumbs>
+
+        <footer>
+            © 2015-2016 The Apache Software Foundation. Apache Metron, Metron, Apache, the Apache feather logo,
+            and the Apache Metron project logo are trademarks of The Apache Software Foundation.
+        </footer>
+    </body>
+</project>


[10/17] incubator-metron git commit: METRON-684 Decouple Timestamp calculation from PROFILE_GET (cestella via ottobackwards) closes apache/incubator-metron#435

Posted by ce...@apache.org.
METRON-684 Decouple Timestamp calculation from PROFILE_GET (cestella via ottobackwards) closes apache/incubator-metron#435


Project: http://git-wip-us.apache.org/repos/asf/incubator-metron/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-metron/commit/57c38af1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-metron/tree/57c38af1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-metron/diff/57c38af1

Branch: refs/heads/Metron_0.3.1
Commit: 57c38af1c014a8c9158c51b6d5f9042536b59047
Parents: 84a36a6
Author: cestella <ce...@gmail.com>
Authored: Sun Feb 5 09:30:58 2017 -0500
Committer: Otto Fowler <ot...@apache.org>
Committed: Sun Feb 5 09:30:58 2017 -0500

----------------------------------------------------------------------
 .../metron-profiler-client/README.md            |  18 +-
 .../profiler/client/HBaseProfilerClient.java    |  29 +++
 .../metron/profiler/client/ProfilerClient.java  |  15 ++
 .../profiler/client/stellar/FixedLookback.java  |  74 +++++++
 .../profiler/client/stellar/GetProfile.java     | 208 ++-----------------
 .../profiler/client/stellar/ProfilerConfig.java | 104 ++++++++++
 .../metron/profiler/client/stellar/Util.java    | 118 +++++++++++
 .../metron/profiler/client/GetProfileTest.java  |  75 +++----
 .../apache/metron/profiler/ProfilePeriod.java   |  27 ++-
 .../metron/profiler/hbase/RowKeyBuilder.java    |  16 ++
 .../profiler/hbase/SaltyRowKeyBuilder.java      |  76 +++++--
 metron-analytics/metron-profiler/README.md      |   6 +-
 metron-analytics/metron-statistics/README.md    |   4 +-
 metron-platform/metron-common/README.md         |  12 +-
 14 files changed, 533 insertions(+), 249 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/57c38af1/metron-analytics/metron-profiler-client/README.md
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-profiler-client/README.md b/metron-analytics/metron-profiler-client/README.md
index 105fce9..60779c8 100644
--- a/metron-analytics/metron-profiler-client/README.md
+++ b/metron-analytics/metron-profiler-client/README.md
@@ -29,8 +29,7 @@ The Stellar client consists of the `PROFILE_GET` command, which takes the follow
 REQUIRED:
     profile - The name of the profile
     entity - The name of the entity
-    durationAgo - How long ago should values be retrieved from?
-    units - The units of 'durationAgo'
+    periods - The list of profile periods to grab.  These are ProfilePeriod objects.
 OPTIONAL:
 	groups_list - Optional, must correspond to the 'groupBy' list used in profile creation - List (in square brackets) of 
             groupBy values used to filter the profile. Default is the empty list, meaning groupBy was not used when 
@@ -40,6 +39,21 @@ OPTIONAL:
 ```
 There is an older calling format where `groups_list` is specified as a sequence of group names, "varargs" style, instead of a List object.  This format is still supported for backward compatibility, but it is deprecated, and it is disallowed if the optional `config_overrides` argument is used.
 
+The `periods` field is (likely) the output of another Stellar function which defines the times to include.
+
+`PROFILE_FIXED`: The profiler periods associated with a fixed lookback starting from now.  These are ProfilePeriod objects.
+```
+REQUIRED:
+    durationAgo - How long ago should values be retrieved from?
+    units - The units of 'durationAgo'.
+OPTIONAL:
+    config_overrides - Optional - Map (in curly braces) of name:value pairs, each overriding the global config parameter
+            of the same name. Default is the empty Map, meaning no overrides.
+
+e.g. To retrieve all the profiles for the last 5 hours.  PROFILE_GET('profile', 'entity', PROFILE_FIXED(5, 'HOURS'))
+```
+
+
 ### Groups_list argument
 The `groups_list` argument in the client must exactly correspond to the [`groupBy`](../metron-profiler#groupby) configuration in the profile definition.  If `groupBy` was not used in the profile, `groups_list` must be empty in the client.  If `groupBy` was used in the profile, then the client `groups_list` is <b>not</b> optional; it must be the same length as the `groupBy` list, and specify exactly one selected group value for each `groupBy` criterion, in the same order.  For example:
 ```

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/57c38af1/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/HBaseProfilerClient.java
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/HBaseProfilerClient.java b/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/HBaseProfilerClient.java
index 42df6c2..7c4ec84 100644
--- a/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/HBaseProfilerClient.java
+++ b/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/HBaseProfilerClient.java
@@ -24,6 +24,7 @@ import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.metron.profiler.ProfilePeriod;
 import org.apache.metron.profiler.hbase.ColumnBuilder;
 import org.apache.metron.profiler.hbase.RowKeyBuilder;
 import org.apache.metron.common.utils.SerDeUtils;
@@ -111,6 +112,34 @@ public class HBaseProfilerClient implements ProfilerClient {
   }
 
   /**
+   * Fetch the values stored in a profile based on a set of timestamps.
+   *
+   * @param clazz      The type of values stored by the profile.
+   * @param profile    The name of the profile.
+   * @param entity     The name of the entity.
+   * @param groups     The groups used to sort the profile data.
+   * @param periods    The set of profile measurement periods
+   * @return A list of values.
+   */
+  @Override
+  public <T> List<T> fetch(Class<T> clazz, String profile, String entity, List<Object> groups, Iterable<ProfilePeriod> periods) {
+    byte[] columnFamily = Bytes.toBytes(columnBuilder.getColumnFamily());
+    byte[] columnQualifier = columnBuilder.getColumnQualifier("value");
+
+    // find all the row keys that satisfy this fetch
+    List<byte[]> keysToFetch = rowKeyBuilder.rowKeys(profile, entity, groups, periods);
+
+    // create a Get for each of the row keys
+    List<Get> gets = keysToFetch
+            .stream()
+            .map(k -> new Get(k).addColumn(columnFamily, columnQualifier))
+            .collect(Collectors.toList());
+
+    // get the 'gets'
+    return get(gets, columnQualifier, columnFamily, clazz);
+  }
+
+  /**
    * Submits multiple Gets to HBase and deserialize the results.
    *
    * @param gets            The gets to submit to HBase.

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/57c38af1/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/ProfilerClient.java
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/ProfilerClient.java b/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/ProfilerClient.java
index c6a5379..57b0e04 100644
--- a/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/ProfilerClient.java
+++ b/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/ProfilerClient.java
@@ -20,6 +20,8 @@
 
 package org.apache.metron.profiler.client;
 
+import org.apache.metron.profiler.ProfilePeriod;
+
 import java.util.List;
 import java.util.concurrent.TimeUnit;
 
@@ -55,4 +57,17 @@ public interface ProfilerClient {
    * @return A list of values.
    */
   <T> List<T> fetch(Class<T> clazz, String profile, String entity, List<Object> groups, long start, long end);
+
+  /**
+   * Fetch the values stored in a profile based on a set of period keys.
+   *
+   * @param clazz   The type of values stored by the profile.
+   * @param profile The name of the profile.
+   * @param entity  The name of the entity.
+   * @param groups  The groups used to sort the profile data.
+   * @param periods The set of profile period keys
+   * @param <T>     The type of values stored by the profile.
+   * @return A list of values.
+   */
+  <T> List<T> fetch(Class<T> clazz, String profile, String entity, List<Object> groups, Iterable<ProfilePeriod> periods);
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/57c38af1/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/FixedLookback.java
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/FixedLookback.java b/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/FixedLookback.java
new file mode 100644
index 0000000..c4ed582
--- /dev/null
+++ b/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/FixedLookback.java
@@ -0,0 +1,74 @@
+/*
+ *
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *
+ */
+package org.apache.metron.profiler.client.stellar;
+
+import org.apache.metron.common.dsl.Context;
+import org.apache.metron.common.dsl.ParseException;
+import org.apache.metron.common.dsl.Stellar;
+import org.apache.metron.common.dsl.StellarFunction;
+import org.apache.metron.profiler.ProfilePeriod;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.concurrent.TimeUnit;
+
+@Stellar(
+      namespace="PROFILE",
+      name="FIXED",
+      description="The profiler periods associated with a fixed lookback starting from now.",
+      params={
+        "durationAgo - How long ago should values be retrieved from?",
+        "units - The units of 'durationAgo'.",
+        "config_overrides - Optional - Map (in curly braces) of name:value pairs, each overriding the global config parameter " +
+                "of the same name. Default is the empty Map, meaning no overrides."
+      },
+      returns="The selected profile measurement periods.  These are ProfilePeriod objects."
+)
+public class FixedLookback implements StellarFunction {
+
+  @Override
+  public Object apply(List<Object> args, Context context) throws ParseException {
+    Optional<Map> configOverridesMap = Optional.empty();
+    long durationAgo = Util.getArg(0, Long.class, args);
+    String unitsName = Util.getArg(1, String.class, args);
+    TimeUnit units = TimeUnit.valueOf(unitsName);
+    if(args.size() > 2) {
+      Map rawMap = Util.getArg(2, Map.class, args);
+      configOverridesMap = rawMap == null || rawMap.isEmpty() ? Optional.empty() : Optional.of(rawMap);
+    }
+    Map<String, Object> effectiveConfigs = Util.getEffectiveConfig(context, configOverridesMap.orElse(null));
+    Long tickDuration = ProfilerConfig.PROFILER_PERIOD.get(effectiveConfigs, Long.class);
+    TimeUnit tickUnit = TimeUnit.valueOf(ProfilerConfig.PROFILER_PERIOD_UNITS.get(effectiveConfigs, String.class));
+    long end = System.currentTimeMillis();
+    long start = end - units.toMillis(durationAgo);
+    return ProfilePeriod.visitPeriods(start, end, tickDuration, tickUnit, Optional.empty(), period -> period);
+  }
+
+  @Override
+  public void initialize(Context context) {
+
+  }
+
+  @Override
+  public boolean isInitialized() {
+    return true;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/57c38af1/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/GetProfile.java
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/GetProfile.java b/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/GetProfile.java
index beb55e0..ecce7e0 100644
--- a/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/GetProfile.java
+++ b/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/GetProfile.java
@@ -20,16 +20,15 @@
 
 package org.apache.metron.profiler.client.stellar;
 
-import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.metron.common.dsl.Context;
 import org.apache.metron.common.dsl.ParseException;
 import org.apache.metron.common.dsl.Stellar;
 import org.apache.metron.common.dsl.StellarFunction;
-import org.apache.metron.common.utils.ConversionUtils;
 import org.apache.metron.hbase.HTableProvider;
 import org.apache.metron.hbase.TableProvider;
+import org.apache.metron.profiler.ProfilePeriod;
 import org.apache.metron.profiler.client.HBaseProfilerClient;
 import org.apache.metron.profiler.client.ProfilerClient;
 import org.apache.metron.profiler.hbase.ColumnBuilder;
@@ -40,16 +39,13 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 import java.util.concurrent.TimeUnit;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
 
 import static java.lang.String.format;
-import static org.apache.metron.common.dsl.Context.Capabilities.GLOBAL_CONFIG;
+import static org.apache.metron.profiler.client.stellar.ProfilerConfig.*;
+import static org.apache.metron.profiler.client.stellar.Util.getArg;
+import static org.apache.metron.profiler.client.stellar.Util.getEffectiveConfig;
 
 /**
  * A Stellar function that can retrieve data contained within a Profile.
@@ -86,8 +82,7 @@ import static org.apache.metron.common.dsl.Context.Capabilities.GLOBAL_CONFIG;
         params={
           "profile - The name of the profile.",
           "entity - The name of the entity.",
-          "durationAgo - How long ago should values be retrieved from?",
-          "units - The units of 'durationAgo'.",
+          "periods - The list of profile periods to grab.  These are ProfilePeriod objects.",
           "groups_list - Optional, must correspond to the 'groupBy' list used in profile creation - List (in square brackets) of "+
                   "groupBy values used to filter the profile. Default is the " +
                   "empty list, meaning groupBy was not used when creating the profile.",
@@ -98,62 +93,7 @@ import static org.apache.metron.common.dsl.Context.Capabilities.GLOBAL_CONFIG;
 )
 public class GetProfile implements StellarFunction {
 
-  /**
-   * A global property that defines the name of the HBase table used to store profile data.
-   */
-  public static final String PROFILER_HBASE_TABLE = "profiler.client.hbase.table";
-
-  /**
-   * A global property that defines the name of the column family used to store profile data.
-   */
-  public static final String PROFILER_COLUMN_FAMILY = "profiler.client.hbase.column.family";
-
-  /**
-   * A global property that defines the name of the HBaseTableProvider implementation class.
-   */
-  public static final String PROFILER_HBASE_TABLE_PROVIDER = "hbase.provider.impl";
 
-  /**
-   * A global property that defines the duration of each profile period.  This value
-   * should be defined along with 'profiler.client.period.duration.units'.
-   */
-  public static final String PROFILER_PERIOD = "profiler.client.period.duration";
-
-  /**
-   * A global property that defines the units of the profile period duration.  This value
-   * should be defined along with 'profiler.client.period.duration'.
-   */
-  public static final String PROFILER_PERIOD_UNITS = "profiler.client.period.duration.units";
-
-  /**
-   * A global property that defines the salt divisor used to store profile data.
-   */
-  public static final String PROFILER_SALT_DIVISOR = "profiler.client.salt.divisor";
-
-  /**
-   * The default Profile HBase table name should none be defined in the global properties.
-   */
-  public static final String PROFILER_HBASE_TABLE_DEFAULT = "profiler";
-
-  /**
-   * The default Profile column family name should none be defined in the global properties.
-   */
-  public static final String PROFILER_COLUMN_FAMILY_DEFAULT = "P";
-
-  /**
-   * The default Profile period duration should none be defined in the global properties.
-   */
-  public static final String PROFILER_PERIOD_DEFAULT = "15";
-
-  /**
-   * The default units of the Profile period should none be defined in the global properties.
-   */
-  public static final String PROFILER_PERIOD_UNITS_DEFAULT = "MINUTES";
-
-  /**
-   * The default salt divisor should none be defined in the global properties.
-   */
-  public static final String PROFILER_SALT_DIVISOR_DEFAULT = "1000";
 
   /**
    * Cached client that can retrieve profile values.
@@ -193,29 +133,27 @@ public class GetProfile implements StellarFunction {
 
     String profile = getArg(0, String.class, args);
     String entity = getArg(1, String.class, args);
-    long durationAgo = getArg(2, Long.class, args);
-    String unitsName = getArg(3, String.class, args);
-    TimeUnit units = TimeUnit.valueOf(unitsName);
+    Optional<List<ProfilePeriod>> periods = Optional.ofNullable(getArg(2, List.class, args));
     //Optional arguments
     @SuppressWarnings("unchecked")
     List<Object> groups = null;
     Map configOverridesMap = null;
-    if (args.size() < 5) {
+    if (args.size() < 4) {
       // no optional args, so default 'groups' and configOverridesMap remains null.
       groups = new ArrayList<>(0);
     }
-    else if (args.get(4) instanceof List) {
+    else if (args.get(3) instanceof List) {
       // correct extensible usage
-      groups = getArg(4, List.class, args);
-      if (args.size() >= 6) {
-        configOverridesMap = getArg(5, Map.class, args);
+      groups = getArg(3, List.class, args);
+      if (args.size() >= 5) {
+        configOverridesMap = getArg(4, Map.class, args);
         if (configOverridesMap.isEmpty()) configOverridesMap = null;
       }
     }
     else {
       // Deprecated "varargs" style usage for groups_list
       // configOverridesMap cannot be specified so it remains null.
-      groups = getGroupsArg(4, args);
+      groups = getGroupsArg(3, args);
     }
 
     Map<String, Object> effectiveConfig = getEffectiveConfig(context, configOverridesMap);
@@ -229,83 +167,10 @@ public class GetProfile implements StellarFunction {
       cachedConfigMap = effectiveConfig;
     }
 
-    return client.fetch(Object.class, profile, entity, groups, durationAgo, units);
+    return client.fetch(Object.class, profile, entity, groups, periods.orElse(new ArrayList<>(0)));
   }
 
-  /**
-   * Merge the configuration parameter override Map into the config from global context,
-   * and return the result.  This has to be done on each call, because either may have changed.
-   *
-   * Only the six recognized profiler client config parameters may be set,
-   * all other key-value pairs in either Map will be ignored.
-   *
-   * Type violations cause a Stellar ParseException.
-   *
-   * @param context - from which we get the global config Map.
-   * @param configOverridesMap - Map of overrides as described above.
-   * @return effective config Map with overrides applied.
-   * @throws ParseException - if any override values are of wrong type.
-   */
-  private Map<String, Object> getEffectiveConfig(
-              Context context
-              , Map configOverridesMap
-  ) throws ParseException {
-
-    final String[] KEYLIST = {
-            PROFILER_HBASE_TABLE, PROFILER_COLUMN_FAMILY,
-            PROFILER_HBASE_TABLE_PROVIDER, PROFILER_PERIOD,
-            PROFILER_PERIOD_UNITS, PROFILER_SALT_DIVISOR};
-
-    // ensure the required capabilities are defined
-    final Context.Capabilities[] required = { GLOBAL_CONFIG };
-    validateCapabilities(context, required);
-    @SuppressWarnings("unchecked")
-    Map<String, Object> global = (Map<String, Object>) context.getCapability(GLOBAL_CONFIG).get();
-
-    Map<String, Object> result = new HashMap<String, Object>(6);
-    Object v;
-
-    // extract the relevant parameters from global
-    for (String k : KEYLIST) {
-      v = global.get(k);
-      if (v != null) result.put(k, v);
-    }
-    if (configOverridesMap == null) return result;
 
-    // extract override values, typechecking as we go
-    try {
-      for (Object key : configOverridesMap.keySet()) {
-        if (!(key instanceof String)) {
-          // Probably unintended user error, so throw an exception rather than ignore
-          throw new ParseException("Non-string key in config_overrides map is not allowed: " + key.toString());
-        }
-        switch ((String) key) {
-          case PROFILER_HBASE_TABLE:
-          case PROFILER_COLUMN_FAMILY:
-          case PROFILER_HBASE_TABLE_PROVIDER:
-          case PROFILER_PERIOD_UNITS:
-            v = configOverridesMap.get(key);
-            v = ConversionUtils.convert(v, String.class);
-            result.put((String) key, v);
-            break;
-          case PROFILER_PERIOD:
-          case PROFILER_SALT_DIVISOR:
-            // be tolerant if the user put a number instead of a string
-            // regardless, validate that it is an integer value
-            v = configOverridesMap.get(key);
-            long vlong = ConversionUtils.convert(v, Long.class);
-            result.put((String) key, String.valueOf(vlong));
-            break;
-          default:
-            LOG.warn("Ignoring unallowed key {} in config_overrides map.", key);
-            break;
-        }
-      }
-    } catch (ClassCastException | NumberFormatException cce) {
-      throw new ParseException("Type violation in config_overrides map values: ", cce);
-    }
-    return result;
-  }
 
   /**
    * Get the groups defined by the user.
@@ -329,40 +194,9 @@ public class GetProfile implements StellarFunction {
     return groups;
   }
 
-  /**
-   * Ensure that the required capabilities are defined.
-   * @param context The context to validate.
-   * @param required The required capabilities.
-   * @throws IllegalStateException if all of the required capabilities are not present in the Context.
-   */
-  private void validateCapabilities(Context context, Context.Capabilities[] required) throws IllegalStateException {
 
-    // collect the name of each missing capability
-    String missing = Stream
-            .of(required)
-            .filter(c -> !context.getCapability(c).isPresent())
-            .map(c -> c.toString())
-            .collect(Collectors.joining(", "));
 
-    if(StringUtils.isNotBlank(missing) || context == null) {
-      throw new IllegalStateException("missing required context: " + missing);
-    }
-  }
-
-  /**
-   * Get an argument from a list of arguments.
-   * @param index The index within the list of arguments.
-   * @param clazz The type expected.
-   * @param args All of the arguments.
-   * @param <T> The type of the argument expected.
-   */
-  private <T> T getArg(int index, Class<T> clazz, List<Object> args) {
-    if(index >= args.size()) {
-      throw new IllegalArgumentException(format("expected at least %d argument(s), found %d", index+1, args.size()));
-    }
 
-    return ConversionUtils.convert(args.get(index), clazz);
-  }
 
   /**
    * Creates the ColumnBuilder to use in accessing the profile data.
@@ -371,7 +205,7 @@ public class GetProfile implements StellarFunction {
   private ColumnBuilder getColumnBuilder(Map<String, Object> global) {
     ColumnBuilder columnBuilder;
 
-    String columnFamily = (String) global.getOrDefault(PROFILER_COLUMN_FAMILY, PROFILER_COLUMN_FAMILY_DEFAULT);
+    String columnFamily = PROFILER_COLUMN_FAMILY.get(global, String.class);
     columnBuilder = new ValueOnlyColumnBuilder(columnFamily);
 
     return columnBuilder;
@@ -384,18 +218,16 @@ public class GetProfile implements StellarFunction {
   private RowKeyBuilder getRowKeyBuilder(Map<String, Object> global) {
 
     // how long is the profile period?
-    String configuredDuration = (String) global.getOrDefault(PROFILER_PERIOD, PROFILER_PERIOD_DEFAULT);
-    long duration = Long.parseLong(configuredDuration);
+    long duration = PROFILER_PERIOD.get(global, Long.class);
     LOG.debug("profiler client: {}={}", PROFILER_PERIOD, duration);
 
     // which units are used to define the profile period?
-    String configuredUnits = (String) global.getOrDefault(PROFILER_PERIOD_UNITS, PROFILER_PERIOD_UNITS_DEFAULT);
+    String configuredUnits = PROFILER_PERIOD_UNITS.get(global, String.class);
     TimeUnit units = TimeUnit.valueOf(configuredUnits);
     LOG.debug("profiler client: {}={}", PROFILER_PERIOD_UNITS, units);
 
     // what is the salt divisor?
-    String configuredSaltDivisor = (String) global.getOrDefault(PROFILER_SALT_DIVISOR, PROFILER_SALT_DIVISOR_DEFAULT);
-    int saltDivisor = Integer.parseInt(configuredSaltDivisor);
+    Integer saltDivisor = PROFILER_SALT_DIVISOR.get(global, Integer.class);
     LOG.debug("profiler client: {}={}", PROFILER_SALT_DIVISOR, saltDivisor);
 
     return new SaltyRowKeyBuilder(saltDivisor, duration, units);
@@ -408,7 +240,7 @@ public class GetProfile implements StellarFunction {
    */
   private HTableInterface getTable(Map<String, Object> global) {
 
-    String tableName = (String) global.getOrDefault(PROFILER_HBASE_TABLE, PROFILER_HBASE_TABLE_DEFAULT);
+    String tableName = PROFILER_HBASE_TABLE.get(global, String.class);
     TableProvider provider = getTableProvider(global);
 
     try {
@@ -424,7 +256,7 @@ public class GetProfile implements StellarFunction {
    * @param global The global configuration.
    */
   private TableProvider getTableProvider(Map<String, Object> global) {
-    String clazzName = (String) global.getOrDefault(PROFILER_HBASE_TABLE_PROVIDER, HTableProvider.class.getName());
+    String clazzName = PROFILER_HBASE_TABLE_PROVIDER.get(global, String.class);
 
     TableProvider provider;
     try {

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/57c38af1/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/ProfilerConfig.java
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/ProfilerConfig.java b/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/ProfilerConfig.java
new file mode 100644
index 0000000..f409ca8
--- /dev/null
+++ b/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/ProfilerConfig.java
@@ -0,0 +1,104 @@
+/*
+ *
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *
+ */
+
+package org.apache.metron.profiler.client.stellar;
+
+import org.apache.metron.common.utils.ConversionUtils;
+import org.apache.metron.hbase.HTableProvider;
+
+import java.util.Map;
+
+public enum ProfilerConfig {
+  /**
+   * A global property that defines the name of the HBase table used to store profile data.
+   */
+  PROFILER_HBASE_TABLE("profiler.client.hbase.table", "profiler", String.class),
+
+  /**
+   * A global property that defines the name of the column family used to store profile data.
+   */
+  PROFILER_COLUMN_FAMILY("profiler.client.hbase.column.family", "P", String.class),
+
+  /**
+   * A global property that defines the name of the HBaseTableProvider implementation class.
+   */
+  PROFILER_HBASE_TABLE_PROVIDER("hbase.provider.impl", HTableProvider.class.getName(), String.class),
+
+  /**
+   * A global property that defines the duration of each profile period.  This value
+   * should be defined along with 'profiler.client.period.duration.units'.
+   */
+  PROFILER_PERIOD("profiler.client.period.duration", 15L, Long.class),
+
+  /**
+   * A global property that defines the units of the profile period duration.  This value
+   * should be defined along with 'profiler.client.period.duration'.
+   */
+  PROFILER_PERIOD_UNITS("profiler.client.period.duration.units", "MINUTES", String.class),
+
+  /**
+   * A global property that defines the salt divisor used to store profile data.
+   */
+  PROFILER_SALT_DIVISOR("profiler.client.salt.divisor", 1000L, Long.class);
+
+  String key;
+  Object defaultValue;
+  Class<?> valueType;
+  ProfilerConfig(String key, Object defaultValue, Class<?> valueType) {
+    this.key = key;
+    this.defaultValue = defaultValue;
+    this.valueType = valueType;
+  }
+
+  public String getKey() {
+    return key;
+  }
+
+  public Object getDefault() {
+    return getDefault(valueType);
+  }
+
+  public <T> T getDefault(Class<T> clazz) {
+    return defaultValue == null?null:ConversionUtils.convert(defaultValue, clazz);
+  }
+
+  public Object get(Map<String, Object> profilerConfig) {
+    return getOrDefault(profilerConfig, defaultValue);
+  }
+
+  public Object getOrDefault(Map<String, Object> profilerConfig, Object defaultValue) {
+    return getOrDefault(profilerConfig, defaultValue, valueType);
+  }
+
+  public <T> T get(Map<String, Object> profilerConfig, Class<T> clazz) {
+    return getOrDefault(profilerConfig, defaultValue, clazz);
+  }
+
+  public <T> T getOrDefault(Map<String, Object> profilerConfig, Object defaultValue, Class<T> clazz) {
+    Object o = profilerConfig.getOrDefault(key, defaultValue);
+    return o == null?null:ConversionUtils.convert(o, clazz);
+  }
+
+  @Override
+  public String toString() {
+    return key;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/57c38af1/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/Util.java
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/Util.java b/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/Util.java
new file mode 100644
index 0000000..ab22967
--- /dev/null
+++ b/metron-analytics/metron-profiler-client/src/main/java/org/apache/metron/profiler/client/stellar/Util.java
@@ -0,0 +1,118 @@
+/*
+ *
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *
+ */
+package org.apache.metron.profiler.client.stellar;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.metron.common.dsl.Context;
+import org.apache.metron.common.dsl.ParseException;
+import org.apache.metron.common.utils.ConversionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import static java.lang.String.format;
+import static org.apache.metron.common.dsl.Context.Capabilities.GLOBAL_CONFIG;
+
+public class Util {
+  private static final Logger LOG = LoggerFactory.getLogger(Util.class);
+
+  /**
+   * Ensure that the required capabilities are defined.
+   * @param context The context to validate.
+   * @param required The required capabilities.
+   * @throws IllegalStateException if all of the required capabilities are not present in the Context.
+   */
+  public static void validateCapabilities(Context context, Context.Capabilities[] required) throws IllegalStateException {
+
+    // collect the name of each missing capability
+    String missing = Stream
+            .of(required)
+            .filter(c -> !context.getCapability(c).isPresent())
+            .map(c -> c.toString())
+            .collect(Collectors.joining(", "));
+
+    if(StringUtils.isNotBlank(missing) || context == null) {
+      throw new IllegalStateException("missing required context: " + missing);
+    }
+  }
+
+  /**
+   * Merge the configuration parameter override Map into the config from global context,
+   * and return the result.  This has to be done on each call, because either may have changed.
+   *
+   * Only the six recognized profiler client config parameters may be set,
+   * all other key-value pairs in either Map will be ignored.
+   *
+   * Type violations cause a Stellar ParseException.
+   *
+   * @param context - from which we get the global config Map.
+   * @param configOverridesMap - Map of overrides as described above.
+   * @return effective config Map with overrides applied.
+   * @throws ParseException - if any override values are of wrong type.
+   */
+  public static Map<String, Object> getEffectiveConfig(Context context , Map configOverridesMap ) throws ParseException {
+    // ensure the required capabilities are defined
+    final Context.Capabilities[] required = { GLOBAL_CONFIG };
+    validateCapabilities(context, required);
+    @SuppressWarnings("unchecked")
+    Map<String, Object> global = (Map<String, Object>) context.getCapability(GLOBAL_CONFIG).get();
+
+    Map<String, Object> result = new HashMap<>(6);
+
+    // extract the relevant parameters from global, the overrides and the defaults
+    for (ProfilerConfig k : ProfilerConfig.values()) {
+      Object globalValue = global.containsKey(k.key)?ConversionUtils.convert(global.get(k.key), k.valueType):null;
+      Object overrideValue = configOverridesMap == null?null:k.getOrDefault(configOverridesMap, null);
+      Object defaultValue = k.defaultValue;
+      if(overrideValue != null) {
+        result.put(k.key, overrideValue);
+      }
+      else if(globalValue != null) {
+        result.put(k.key, globalValue);
+      }
+      else if(defaultValue != null) {
+        result.put(k.key, defaultValue);
+      }
+    }
+    return result;
+  }
+
+
+  /**
+   * Get an argument from a list of arguments.
+   * @param index The index within the list of arguments.
+   * @param clazz The type expected.
+   * @param args All of the arguments.
+   * @param <T> The type of the argument expected.
+   */
+  public static <T> T getArg(int index, Class<T> clazz, List<Object> args) {
+    if(index >= args.size()) {
+      throw new IllegalArgumentException(format("expected at least %d argument(s), found %d", index+1, args.size()));
+    }
+
+    return ConversionUtils.convert(args.get(index), clazz);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/57c38af1/metron-analytics/metron-profiler-client/src/test/java/org/apache/metron/profiler/client/GetProfileTest.java
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-profiler-client/src/test/java/org/apache/metron/profiler/client/GetProfileTest.java b/metron-analytics/metron-profiler-client/src/test/java/org/apache/metron/profiler/client/GetProfileTest.java
index 960795b..e1ebdbd 100644
--- a/metron-analytics/metron-profiler-client/src/test/java/org/apache/metron/profiler/client/GetProfileTest.java
+++ b/metron-analytics/metron-profiler-client/src/test/java/org/apache/metron/profiler/client/GetProfileTest.java
@@ -28,6 +28,7 @@ import org.apache.metron.common.dsl.functions.resolver.SingletonFunctionResolver
 import org.apache.metron.common.dsl.ParseException;
 import org.apache.metron.hbase.TableProvider;
 import org.apache.metron.profiler.ProfileMeasurement;
+import org.apache.metron.profiler.client.stellar.FixedLookback;
 import org.apache.metron.profiler.client.stellar.GetProfile;
 import org.apache.metron.profiler.hbase.ColumnBuilder;
 import org.apache.metron.profiler.hbase.RowKeyBuilder;
@@ -49,12 +50,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
-import static org.apache.metron.profiler.client.stellar.GetProfile.PROFILER_COLUMN_FAMILY;
-import static org.apache.metron.profiler.client.stellar.GetProfile.PROFILER_HBASE_TABLE;
-import static org.apache.metron.profiler.client.stellar.GetProfile.PROFILER_HBASE_TABLE_PROVIDER;
-import static org.apache.metron.profiler.client.stellar.GetProfile.PROFILER_PERIOD;
-import static org.apache.metron.profiler.client.stellar.GetProfile.PROFILER_PERIOD_UNITS;
-import static org.apache.metron.profiler.client.stellar.GetProfile.PROFILER_SALT_DIVISOR;
+import static org.apache.metron.profiler.client.stellar.ProfilerConfig.*;
 
 /**
  * Tests the GetProfile class.
@@ -114,18 +110,19 @@ public class GetProfileTest {
 
     // global properties
     Map<String, Object> global = new HashMap<String, Object>() {{
-      put(PROFILER_HBASE_TABLE, tableName);
-      put(PROFILER_COLUMN_FAMILY, columnFamily);
-      put(PROFILER_HBASE_TABLE_PROVIDER, MockTableProvider.class.getName());
-      put(PROFILER_PERIOD, Long.toString(periodDuration));
-      put(PROFILER_PERIOD_UNITS, periodUnits.toString());
-      put(PROFILER_SALT_DIVISOR, Integer.toString(saltDivisor));
+      put(PROFILER_HBASE_TABLE.getKey(), tableName);
+      put(PROFILER_COLUMN_FAMILY.getKey(), columnFamily);
+      put(PROFILER_HBASE_TABLE_PROVIDER.getKey(), MockTableProvider.class.getName());
+      put(PROFILER_PERIOD.getKey(), Long.toString(periodDuration));
+      put(PROFILER_PERIOD_UNITS.getKey(), periodUnits.toString());
+      put(PROFILER_SALT_DIVISOR.getKey(), Integer.toString(saltDivisor));
     }};
 
     // create the stellar execution environment
     executor = new DefaultStellarExecutor(
             new SimpleFunctionResolver()
-                    .withClass(GetProfile.class),
+                    .withClass(GetProfile.class)
+                    .withClass(FixedLookback.class),
             new Context.Builder()
                     .with(Context.Capabilities.GLOBAL_CONFIG, () -> global)
                     .build());
@@ -154,12 +151,12 @@ public class GetProfileTest {
 
     // global properties
     Map<String, Object> global = new HashMap<String, Object>() {{
-      put(PROFILER_HBASE_TABLE, tableName);
-      put(PROFILER_COLUMN_FAMILY, columnFamily);
-      put(PROFILER_HBASE_TABLE_PROVIDER, MockTableProvider.class.getName());
-      put(PROFILER_PERIOD, Long.toString(periodDuration2));
-      put(PROFILER_PERIOD_UNITS, periodUnits2.toString());
-      put(PROFILER_SALT_DIVISOR, Integer.toString(saltDivisor2));
+      put(PROFILER_HBASE_TABLE.getKey(), tableName);
+      put(PROFILER_COLUMN_FAMILY.getKey(), columnFamily);
+      put(PROFILER_HBASE_TABLE_PROVIDER.getKey(), MockTableProvider.class.getName());
+      put(PROFILER_PERIOD.getKey(), Long.toString(periodDuration2));
+      put(PROFILER_PERIOD_UNITS.getKey(), periodUnits2.toString());
+      put(PROFILER_SALT_DIVISOR.getKey(), Integer.toString(saltDivisor2));
     }};
 
     // create the modified context
@@ -170,7 +167,8 @@ public class GetProfileTest {
     // create the stellar execution environment
     executor = new DefaultStellarExecutor(
             new SimpleFunctionResolver()
-                    .withClass(GetProfile.class),
+                    .withClass(GetProfile.class)
+                    .withClass(FixedLookback.class),
             context2);
 
     return context2; //because there is no executor.getContext() method
@@ -197,7 +195,7 @@ public class GetProfileTest {
     profileWriter.write(m, count, group, val -> expectedValue);
 
     // execute - read the profile values - no groups
-    String expr = "PROFILE_GET('profile1', 'entity1', 4, 'HOURS')";
+    String expr = "PROFILE_GET('profile1', 'entity1', PROFILE_FIXED(4, 'HOURS'))";
     @SuppressWarnings("unchecked")
     List<Integer> result = run(expr, List.class);
 
@@ -228,7 +226,7 @@ public class GetProfileTest {
     state.put("groups", group);
 
     // execute - read the profile values
-    String expr = "PROFILE_GET('profile1', 'entity1', 4, 'HOURS', ['weekends'])";
+    String expr = "PROFILE_GET('profile1', 'entity1', PROFILE_FIXED(4, 'HOURS'), ['weekends'])";
     @SuppressWarnings("unchecked")
     List<Integer> result = run(expr, List.class);
 
@@ -236,7 +234,7 @@ public class GetProfileTest {
     Assert.assertEquals(count, result.size());
 
     // test the deprecated but allowed "varargs" form of groups specification
-    expr = "PROFILE_GET('profile1', 'entity1', 4, 'HOURS', 'weekends')";
+    expr = "PROFILE_GET('profile1', 'entity1', PROFILE_FIXED(4, 'HOURS'), 'weekends')";
     result = run(expr, List.class);
 
     // validate - expect to read all values from the past 4 hours
@@ -266,7 +264,7 @@ public class GetProfileTest {
     state.put("groups", group);
 
     // execute - read the profile values
-    String expr = "PROFILE_GET('profile1', 'entity1', 4, 'HOURS', ['weekdays', 'tuesday'])";
+    String expr = "PROFILE_GET('profile1', 'entity1', PROFILE_FIXED(4, 'HOURS'), ['weekdays', 'tuesday'])";
     @SuppressWarnings("unchecked")
     List<Integer> result = run(expr, List.class);
 
@@ -274,7 +272,7 @@ public class GetProfileTest {
     Assert.assertEquals(count, result.size());
 
     // test the deprecated but allowed "varargs" form of groups specification
-    expr = "PROFILE_GET('profile1', 'entity1', 4, 'HOURS', 'weekdays', 'tuesday')";
+    expr = "PROFILE_GET('profile1', 'entity1', PROFILE_FIXED(4, 'HOURS'), 'weekdays', 'tuesday')";
     result = run(expr, List.class);
 
     // validate - expect to read all values from the past 4 hours
@@ -295,7 +293,7 @@ public class GetProfileTest {
     SingletonFunctionResolver.getInstance().initialize(empty);
 
     // validate - function should be unable to initialize
-    String expr = "PROFILE_GET('profile1', 'entity1', 1000, 'SECONDS', groups)";
+    String expr = "PROFILE_GET('profile1', 'entity1', PROFILE_FIXED(1000, 'SECONDS'), groups)";
     run(expr, List.class);
   }
 
@@ -321,7 +319,7 @@ public class GetProfileTest {
     state.put("groups", group);
 
     // execute - read the profile values
-    String expr = "PROFILE_GET('profile1', 'entity1', 4, 'SECONDS')";
+    String expr = "PROFILE_GET('profile1', 'entity1', PROFILE_FIXED(4, 'SECONDS'))";
     @SuppressWarnings("unchecked")
     List<Integer> result = run(expr, List.class);
 
@@ -353,13 +351,13 @@ public class GetProfileTest {
     // validate it is changed in significant way
     @SuppressWarnings("unchecked")
     Map<String, Object> global = (Map<String, Object>) context2.getCapability(Context.Capabilities.GLOBAL_CONFIG).get();
-    Assert.assertEquals(global.get(PROFILER_PERIOD), Long.toString(periodDuration2));
+    Assert.assertEquals(PROFILER_PERIOD.get(global), periodDuration2);
     Assert.assertNotEquals(periodDuration, periodDuration2);
 
     // execute - read the profile values - with (wrong) default global config values.
     // No error message at this time, but returns empty results list, because
     // row keys are not correctly calculated.
-    String expr = "PROFILE_GET('profile1', 'entity1', 4, 'HOURS')";
+    String expr = "PROFILE_GET('profile1', 'entity1', PROFILE_FIXED(4, 'HOURS'))";
     @SuppressWarnings("unchecked")
     List<Integer> result = run(expr, List.class);
 
@@ -368,10 +366,11 @@ public class GetProfileTest {
 
     // execute - read the profile values - with config_override.
     // first two override values are strings, third is deliberately a number.
-    expr = "PROFILE_GET('profile1', 'entity1', 4, 'HOURS', [], {"
-            + "'profiler.client.period.duration' : '" + periodDuration + "', "
+    String overrides = "{'profiler.client.period.duration' : '" + periodDuration + "', "
             + "'profiler.client.period.duration.units' : '" + periodUnits.toString() + "', "
-            + "'profiler.client.salt.divisor' : " + saltDivisor + " })";
+            + "'profiler.client.salt.divisor' : " + saltDivisor + " }";
+    expr = "PROFILE_GET('profile1', 'entity1', PROFILE_FIXED(4, 'HOURS', " + overrides + "), [], " + overrides + ")"
+            ;
     result = run(expr, List.class);
 
     // validate - expect to read all values from the past 4 hours
@@ -407,15 +406,17 @@ public class GetProfileTest {
     // validate it is changed in significant way
     @SuppressWarnings("unchecked")
     Map<String, Object> global = (Map<String, Object>) context2.getCapability(Context.Capabilities.GLOBAL_CONFIG).get();
-    Assert.assertEquals(global.get(PROFILER_PERIOD), Long.toString(periodDuration2));
+    Assert.assertEquals(global.get(PROFILER_PERIOD.getKey()), Long.toString(periodDuration2));
     Assert.assertNotEquals(periodDuration, periodDuration2);
 
     // execute - read the profile values - with config_override.
     // first two override values are strings, third is deliberately a number.
-    String expr = "PROFILE_GET('profile1', 'entity1', 4, 'HOURS', ['weekends'], {"
-            + "'profiler.client.period.duration' : '" + periodDuration + "', "
+    String overrides = "{'profiler.client.period.duration' : '" + periodDuration + "', "
             + "'profiler.client.period.duration.units' : '" + periodUnits.toString() + "', "
-            + "'profiler.client.salt.divisor' : " + saltDivisor + " })";
+            + "'profiler.client.salt.divisor' : " + saltDivisor + " }";
+    String expr = "PROFILE_GET('profile1', 'entity1'" +
+            ", PROFILE_FIXED(4, 'HOURS', " + overrides + "), ['weekends'], " +
+            overrides + ")";
     @SuppressWarnings("unchecked")
     List<Integer> result = run(expr, List.class);
 
@@ -425,7 +426,7 @@ public class GetProfileTest {
     // execute - read the profile values - with (wrong) default global config values.
     // No error message at this time, but returns empty results list, because
     // row keys are not correctly calculated.
-    expr = "PROFILE_GET('profile1', 'entity1', 4, 'HOURS', ['weekends'])";
+    expr = "PROFILE_GET('profile1', 'entity1', PROFILE_FIXED(4, 'HOURS'), ['weekends'])";
     result = run(expr, List.class);
 
     // validate - expect to fail to read any values

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/57c38af1/metron-analytics/metron-profiler-common/src/main/java/org/apache/metron/profiler/ProfilePeriod.java
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-profiler-common/src/main/java/org/apache/metron/profiler/ProfilePeriod.java b/metron-analytics/metron-profiler-common/src/main/java/org/apache/metron/profiler/ProfilePeriod.java
index c466919..f916d65 100644
--- a/metron-analytics/metron-profiler-common/src/main/java/org/apache/metron/profiler/ProfilePeriod.java
+++ b/metron-analytics/metron-profiler-common/src/main/java/org/apache/metron/profiler/ProfilePeriod.java
@@ -20,7 +20,12 @@
 
 package org.apache.metron.profiler;
 
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
 import java.util.concurrent.TimeUnit;
+import java.util.function.Function;
+import java.util.function.Predicate;
 
 import static java.lang.String.format;
 
@@ -41,6 +46,7 @@ public class ProfilePeriod {
    */
   private long durationMillis;
 
+
   /**
    * @param epochMillis A timestamp contained somewhere within the profile period.
    * @param duration The duration of each profile period.
@@ -51,7 +57,6 @@ public class ProfilePeriod {
       throw new IllegalArgumentException(format(
               "period duration must be greater than 0; got '%d %s'", duration, units));
     }
-
     this.durationMillis = units.toMillis(duration);
     this.period = epochMillis / durationMillis;
   }
@@ -75,6 +80,7 @@ public class ProfilePeriod {
     return period;
   }
 
+
   public long getDurationMillis() {
     return durationMillis;
   }
@@ -103,4 +109,23 @@ public class ProfilePeriod {
             ", durationMillis=" + durationMillis +
             '}';
   }
+
+  public static <T> List<T> visitPeriods(long startEpochMillis
+                                           , long endEpochMillis
+                                           , long duration
+                                           , TimeUnit units
+                                           , Optional<Predicate<ProfilePeriod>> inclusionPredicate
+                                           , Function<ProfilePeriod,T> transformation
+                                           )
+  {
+    ProfilePeriod period = new ProfilePeriod(startEpochMillis, duration, units);
+    List<T> ret = new ArrayList<>();
+    while(period.getStartTimeMillis() <= endEpochMillis) {
+      if(!inclusionPredicate.isPresent() || inclusionPredicate.get().test(period)) {
+        ret.add(transformation.apply(period));
+      }
+      period = period.next();
+    }
+    return ret;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/57c38af1/metron-analytics/metron-profiler-common/src/main/java/org/apache/metron/profiler/hbase/RowKeyBuilder.java
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-profiler-common/src/main/java/org/apache/metron/profiler/hbase/RowKeyBuilder.java b/metron-analytics/metron-profiler-common/src/main/java/org/apache/metron/profiler/hbase/RowKeyBuilder.java
index b53a1ac..e49bb0a 100644
--- a/metron-analytics/metron-profiler-common/src/main/java/org/apache/metron/profiler/hbase/RowKeyBuilder.java
+++ b/metron-analytics/metron-profiler-common/src/main/java/org/apache/metron/profiler/hbase/RowKeyBuilder.java
@@ -21,6 +21,7 @@
 package org.apache.metron.profiler.hbase;
 
 import org.apache.metron.profiler.ProfileMeasurement;
+import org.apache.metron.profiler.ProfilePeriod;
 
 import java.io.Serializable;
 import java.util.List;
@@ -56,4 +57,19 @@ public interface RowKeyBuilder extends Serializable {
    * @return All of the row keys necessary to retrieve the profile measurements.
    */
   List<byte[]> rowKeys(String profile, String entity, List<Object> groups, long start, long end);
+
+  /**
+   * Builds a list of row keys necessary to retrieve a profile's measurements over
+   * a time horizon.
+   *
+   * This method is useful when attempting to read ProfileMeasurements stored in HBase.
+   *
+   * @param profile The name of the profile.
+   * @param entity The name of the entity.
+   * @param groups The group(s) used to sort the profile data.
+   * @param periods The profile measurement periods to compute the rowkeys for
+   * @return All of the row keys necessary to retrieve the profile measurements.
+   */
+  List<byte[]> rowKeys(String profile, String entity, List<Object> groups, Iterable<ProfilePeriod> periods);
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/57c38af1/metron-analytics/metron-profiler-common/src/main/java/org/apache/metron/profiler/hbase/SaltyRowKeyBuilder.java
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-profiler-common/src/main/java/org/apache/metron/profiler/hbase/SaltyRowKeyBuilder.java b/metron-analytics/metron-profiler-common/src/main/java/org/apache/metron/profiler/hbase/SaltyRowKeyBuilder.java
index 4e2b44f..b01fc28 100644
--- a/metron-analytics/metron-profiler-common/src/main/java/org/apache/metron/profiler/hbase/SaltyRowKeyBuilder.java
+++ b/metron-analytics/metron-profiler-common/src/main/java/org/apache/metron/profiler/hbase/SaltyRowKeyBuilder.java
@@ -29,6 +29,7 @@ import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Optional;
 import java.util.concurrent.TimeUnit;
 
 /**
@@ -81,24 +82,40 @@ public class SaltyRowKeyBuilder implements RowKeyBuilder {
    */
   @Override
   public List<byte[]> rowKeys(String profile, String entity, List<Object> groups, long start, long end) {
-    List<byte[]> rowKeys = new ArrayList<>();
-
     // be forgiving of out-of-order start and end times; order is critical to this algorithm
     end = Math.max(start, end);
     start = Math.min(start, end);
 
     // find the starting period and advance until the end time is reached
-    ProfilePeriod period = new ProfilePeriod(start, periodDurationMillis, TimeUnit.MILLISECONDS);
-    while(period.getStartTimeMillis() <= end) {
+    return ProfilePeriod.visitPeriods( start
+                                        , end
+                                        , periodDurationMillis
+                                        , TimeUnit.MILLISECONDS
+                                        , Optional.empty()
+                                        , period -> rowKey(profile, entity, period, groups)
+                                        );
 
-      byte[] k = rowKey(profile, entity, period, groups);
-      rowKeys.add(k);
+  }
 
-      // advance to the next period
-      period = period.next();
+  /**
+   * Builds a list of row keys necessary to retrieve a profile's measurements over
+   * a time horizon.
+   * <p>
+   * This method is useful when attempting to read ProfileMeasurements stored in HBase.
+   *
+   * @param profile    The name of the profile.
+   * @param entity     The name of the entity.
+   * @param groups     The group(s) used to sort the profile data.
+   * @param periods    The profile measurement periods to compute the rowkeys for
+   * @return All of the row keys necessary to retrieve the profile measurements.
+   */
+  @Override
+  public List<byte[]> rowKeys(String profile, String entity, List<Object> groups, Iterable<ProfilePeriod> periods) {
+    List<byte[]> ret = new ArrayList<>();
+    for(ProfilePeriod period : periods) {
+      ret.add(rowKey(profile, entity, period, groups));
     }
-
-    return rowKeys;
+    return ret;
   }
 
   /**
@@ -120,6 +137,18 @@ public class SaltyRowKeyBuilder implements RowKeyBuilder {
    * @return The HBase row key.
    */
   public byte[] rowKey(String profile, String entity, ProfilePeriod period, List<Object> groups) {
+    return rowKey(profile, entity, period.getPeriod(), groups);
+  }
+
+  /**
+   * Build the row key.
+   * @param profile The name of the profile.
+   * @param entity The name of the entity.
+   * @param period The measure period
+   * @param groups The groups.
+   * @return The HBase row key.
+   */
+  public byte[] rowKey(String profile, String entity, long period, List<Object> groups) {
 
     // row key = salt + prefix + group(s) + time
     byte[] salt = getSalt(period, saltDivisor);
@@ -161,25 +190,44 @@ public class SaltyRowKeyBuilder implements RowKeyBuilder {
     groups.forEach(g -> builder.append(g));
     return Bytes.toBytes(builder.toString());
   }
-
   /**
    * Builds the 'time' portion of the row key
    * @param period The ProfilePeriod in which the ProfileMeasurement was taken.
    */
   private static byte[] timeKey(ProfilePeriod period) {
-    long thePeriod = period.getPeriod();
-    return Bytes.toBytes(thePeriod);
+    return timeKey(period.getPeriod());
+  }
+
+  /**
+   * Builds the 'time' portion of the row key
+   * @param period the period
+   */
+  private static byte[] timeKey(long period) {
+    return Bytes.toBytes(period);
   }
 
   /**
    * Calculates a salt value that is used as part of the row key.
    *
-   * The salt is calculated as 'md5(timestamp) % N' where N is a configurable value that ideally
+   * The salt is calculated as 'md5(period) % N' where N is a configurable value that ideally
    * is close to the number of nodes in the Hbase cluster.
    *
    * @param period The period in which a profile measurement is taken.
    */
   public static byte[] getSalt(ProfilePeriod period, int saltDivisor) {
+    return getSalt(period.getPeriod(), saltDivisor);
+  }
+
+  /**
+   * Calculates a salt value that is used as part of the row key.
+   *
+   * The salt is calculated as 'md5(period) % N' where N is a configurable value that ideally
+   * is close to the number of nodes in the Hbase cluster.
+   *
+   * @param period The period
+   * @param saltDivisor The salt divisor
+   */
+  public static byte[] getSalt(long period, int saltDivisor) {
     try {
       // an MD5 is 16 bytes aka 128 bits
       MessageDigest digest = MessageDigest.getInstance("MD5");

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/57c38af1/metron-analytics/metron-profiler/README.md
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-profiler/README.md b/metron-analytics/metron-profiler/README.md
index 04e1c0d..dfff277 100644
--- a/metron-analytics/metron-profiler/README.md
+++ b/metron-analytics/metron-profiler/README.md
@@ -66,11 +66,11 @@ This section will describe the steps required to get your first profile running.
 
 1. Use the Profiler Client to read the profile data.  The below example `PROFILE_GET` command will read data written by the sample profile given above, if 10.0.0.1 is one of the input values for `ip_src_addr`.
 More information on configuring and using the client can be found [here](../metron-profiler-client).
-It is assumed that the PROFILE_GET client is correctly configured before using it.
+It is assumed that the `PROFILE_GET` client is correctly configured before using it.
     ```
     $ bin/stellar -z node1:2181
     
-    [Stellar]>>> PROFILE_GET( "test", "10.0.0.1", 30, "MINUTES")
+    [Stellar]>>> PROFILE_GET( "test", "10.0.0.1", PROFILE_FIXED(30, "MINUTES"))
     [451, 448]
     ```
 
@@ -334,7 +334,7 @@ Retrieve the last 30 minutes of profile measurements for a specific host.
 ```
 $ bin/stellar -z node1:2181
 
-[Stellar]>>> stats := PROFILE_GET( "example4", "10.0.0.1", 30, "MINUTES")
+[Stellar]>>> stats := PROFILE_GET( "example4", "10.0.0.1", PROFILE_FIXED(30, "MINUTES"))
 [Stellar]>>> stats
 [org.apache.metron.common.math.stats.OnlineStatisticsProvider@79fe4ab9, ...]
 ```

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/57c38af1/metron-analytics/metron-statistics/README.md
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-statistics/README.md b/metron-analytics/metron-statistics/README.md
index 257fd0b..f6ab15f 100644
--- a/metron-analytics/metron-statistics/README.md
+++ b/metron-analytics/metron-statistics/README.md
@@ -341,7 +341,7 @@ Create the following in
       "stellar" : {
         "config" : {
           "parser_score" : "OUTLIER_MAD_SCORE(OUTLIER_MAD_STATE_MERGE(
-PROFILE_GET( 'sketchy_mad', 'global', 10, 'MINUTES') ), value)"
+PROFILE_GET( 'sketchy_mad', 'global', PROFILE_FIXED(10, 'MINUTES')) ), value)"
          ,"is_alert" : "if parser_score > 3.5 then true else is_alert"
         }
       }
@@ -384,7 +384,7 @@ Create the following file at
       "onlyif": "true",
       "init" : {
         "s": "OUTLIER_MAD_STATE_MERGE(PROFILE_GET('sketchy_mad',
-'global', 5, 'MINUTES'))"
+'global', PROFILE_FIXED(5, 'MINUTES')))"
                },
       "update": {
         "s": "OUTLIER_MAD_ADD(s, value)"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/57c38af1/metron-platform/metron-common/README.md
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/README.md b/metron-platform/metron-common/README.md
index c24ae73..fbf3b50 100644
--- a/metron-platform/metron-common/README.md
+++ b/metron-platform/metron-common/README.md
@@ -124,6 +124,7 @@ The `!=` operator is the negation of the above.
 | [ `MAP_EXISTS`](#map_exists)                                                                       |
 | [ `MONTH`](#month)                                                                                 |
 | [ `PROFILE_GET`](#profile_get)                                                                     |
+| [ `PROFILE_FIXED`](#profile_fixed)                                                                     |
 | [ `PROTOCOL_TO_NAME`](#protocol_to_name)                                                           |
 | [ `REGEXP_MATCH`](#regexp_match)                                                                   |
 | [ `SPLIT`](#split)                                                                                 |
@@ -439,12 +440,19 @@ The `!=` operator is the negation of the above.
   * Input:
     * profile - The name of the profile.
     * entity - The name of the entity.
-    * durationAgo - How long ago should values be retrieved from?
-    * units - The units of 'durationAgo'.
+    * periods - The list of profile periods to grab.  These are ProfilePeriod objects.
     * groups_list - Optional, must correspond to the 'groupBy' list used in profile creation - List (in square brackets) of groupBy values used to filter the profile. Default is the empty list, meaning groupBy was not used when creating the profile.
     * config_overrides - Optional - Map (in curly braces) of name:value pairs, each overriding the global config parameter of the same name. Default is the empty Map, meaning no overrides.
   * Returns: The selected profile measurements.
 
+### `PROFILE_FIXED`
+  * Description: The profile periods associated with a fixed lookback starting from now
+  * Input:
+    * durationAgo - How long ago should values be retrieved from?
+    * units - The units of 'durationAgo'.
+    * config_overrides - Optional - Map (in curly braces) of name:value pairs, each overriding the global config parameter of the same name. Default is the empty Map, meaning no overrides.
+  * Returns: The selected profile measurement timestamps.  These are ProfilePeriod objects.
+
 ### `PROTOCOL_TO_NAME`
   * Description: Converts the IANA protocol number to the protocol name
   * Input:


[03/17] incubator-metron git commit: METRON-678: Multithread the flat file loader closes apache/incubator-metron#428

Posted by ce...@apache.org.
METRON-678: Multithread the flat file loader closes apache/incubator-metron#428


Project: http://git-wip-us.apache.org/repos/asf/incubator-metron/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-metron/commit/ad8724ee
Tree: http://git-wip-us.apache.org/repos/asf/incubator-metron/tree/ad8724ee
Diff: http://git-wip-us.apache.org/repos/asf/incubator-metron/diff/ad8724ee

Branch: refs/heads/Metron_0.3.1
Commit: ad8724eed0af784fcd6a822a11842d86aefc8832
Parents: cc29dca
Author: cstella <ce...@gmail.com>
Authored: Tue Jan 31 15:19:55 2017 -0500
Committer: cstella <ce...@gmail.com>
Committed: Tue Jan 31 15:19:55 2017 -0500

----------------------------------------------------------------------
 .../common/utils/file/ReaderSpliterator.java    | 232 +++++++++++++++++++
 .../utils/file/ReaderSpliteratorTest.java       | 185 +++++++++++++++
 .../metron-data-management/README.md            |  21 +-
 .../nonbulk/flatfile/ExtractorState.java        |  46 ++++
 .../SimpleEnrichmentFlatFileLoader.java         | 116 ++++++++--
 .../SimpleEnrichmentFlatFileLoaderTest.java     | 180 +++++++-------
 .../ElasticsearchIndexingIntegrationTest.java   |   1 +
 .../integration/IndexingIntegrationTest.java    |   2 +
 8 files changed, 669 insertions(+), 114 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/ad8724ee/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/file/ReaderSpliterator.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/file/ReaderSpliterator.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/file/ReaderSpliterator.java
new file mode 100644
index 0000000..20a40fa
--- /dev/null
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/file/ReaderSpliterator.java
@@ -0,0 +1,232 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.utils.file;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.util.Spliterator;
+import java.util.function.Consumer;
+import java.util.stream.Stream;
+import java.util.stream.StreamSupport;
+
+import static java.util.Spliterators.spliterator;
+
+/**
+ * A Spliterator which works well on sequential streams by constructing a
+ * fixed batch size split rather than inheriting the spliterator from BufferedReader.lines()
+ * which gives up and reports no size and has no strategy for batching.  This is a bug
+ * in Java 8 and will be fixed in Java 9.
+ *
+ * The ideas have been informed by https://www.airpair.com/java/posts/parallel-processing-of-io-based-data-with-java-streams
+ * except more specific to strings and motivated by a JDK 8 bug as
+ * described at http://bytefish.de/blog/jdk8_files_lines_parallel_stream/
+ */
+public class ReaderSpliterator implements Spliterator<String> {
+  private static int characteristics = NONNULL | ORDERED | IMMUTABLE;
+  private int batchSize ;
+  private BufferedReader reader;
+  public ReaderSpliterator(BufferedReader reader) {
+    this(reader, 128);
+  }
+
+  public ReaderSpliterator(BufferedReader reader, int batchSize) {
+    this.batchSize = batchSize;
+    this.reader = reader;
+  }
+
+  @Override
+  public void forEachRemaining(Consumer<? super String> action) {
+    if (action == null) {
+      throw new NullPointerException();
+    }
+    try {
+      for (String line = null; (line = reader.readLine()) != null;) {
+        action.accept(line);
+      }
+    } catch (RuntimeException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new IllegalStateException(e);
+    }
+  }
+  /**
+   * If a remaining element exists, performs the given action on it,
+   * returning {@code true}; else returns {@code false}.  If this
+   * Spliterator is {@link #ORDERED} the action is performed on the
+   * next element in encounter order.  Exceptions thrown by the
+   * action are relayed to the caller.
+   *
+   * @param action The action
+   * @return {@code false} if no remaining elements existed
+   * upon entry to this method, else {@code true}.
+   * @throws NullPointerException if the specified action is null
+   */
+  @Override
+  public boolean tryAdvance(Consumer<? super String> action) {
+    if (action == null) {
+      throw new NullPointerException();
+    }
+    try {
+      final String line = reader.readLine();
+      if (line == null) {
+        return false;
+      }
+      action.accept(line);
+      return true;
+    } catch (RuntimeException e) {
+      throw e;
+    } catch (Exception e) {
+      throw new IllegalStateException(e);
+    }
+  }
+
+  /**
+   * If this spliterator can be partitioned, returns a Spliterator
+   * covering elements, that will, upon return from this method, not
+   * be covered by this Spliterator.
+   * <p>
+   * <p>If this Spliterator is {@link #ORDERED}, the returned Spliterator
+   * must cover a strict prefix of the elements.
+   * <p>
+   * <p>Unless this Spliterator covers an infinite number of elements,
+   * repeated calls to {@code trySplit()} must eventually return {@code null}.
+   * Upon non-null return:
+   * <ul>
+   * <li>the value reported for {@code estimateSize()} before splitting,
+   * must, after splitting, be greater than or equal to {@code estimateSize()}
+   * for this and the returned Spliterator; and</li>
+   * <li>if this Spliterator is {@code SUBSIZED}, then {@code estimateSize()}
+   * for this spliterator before splitting must be equal to the sum of
+   * {@code estimateSize()} for this and the returned Spliterator after
+   * splitting.</li>
+   * </ul>
+   * <p>
+   * <p>This method may return {@code null} for any reason,
+   * including emptiness, inability to split after traversal has
+   * commenced, data structure constraints, and efficiency
+   * considerations.
+   *
+   * @return a {@code Spliterator} covering some portion of the
+   * elements, or {@code null} if this spliterator cannot be split
+   * @apiNote An ideal {@code trySplit} method efficiently (without
+   * traversal) divides its elements exactly in half, allowing
+   * balanced parallel computation.  Many departures from this ideal
+   * remain highly effective; for example, only approximately
+   * splitting an approximately balanced tree, or for a tree in
+   * which leaf nodes may contain either one or two elements,
+   * failing to further split these nodes.  However, large
+   * deviations in balance and/or overly inefficient {@code
+   * trySplit} mechanics typically result in poor parallel
+   * performance.
+   */
+  @Override
+  public Spliterator<String> trySplit() {
+    final ConsumerWithLookback holder = new ConsumerWithLookback();
+    if (!tryAdvance(holder)) {
+      return null;
+    }
+    final String[] batch = new String[batchSize];
+    int j = 0;
+    do {
+      batch[j] = holder.value;
+    }
+    while (++j < batchSize && tryAdvance(holder));
+    return spliterator(batch, 0, j, characteristics() | SIZED);
+  }
+
+  /**
+   * Returns an estimate of the number of elements that would be
+   * encountered by a {@link #forEachRemaining} traversal, or returns {@link
+   * Long#MAX_VALUE} if infinite, unknown, or too expensive to compute.
+   * <p>
+   * <p>If this Spliterator is {@link #SIZED} and has not yet been partially
+   * traversed or split, or this Spliterator is {@link #SUBSIZED} and has
+   * not yet been partially traversed, this estimate must be an accurate
+   * count of elements that would be encountered by a complete traversal.
+   * Otherwise, this estimate may be arbitrarily inaccurate, but must decrease
+   * as specified across invocations of {@link #trySplit}.
+   *
+   * @return the estimated size, or {@code Long.MAX_VALUE} if infinite,
+   * unknown, or too expensive to compute.
+   * @apiNote Even an inexact estimate is often useful and inexpensive to compute.
+   * For example, a sub-spliterator of an approximately balanced binary tree
+   * may return a value that estimates the number of elements to be half of
+   * that of its parent; if the root Spliterator does not maintain an
+   * accurate count, it could estimate size to be the power of two
+   * corresponding to its maximum depth.
+   */
+  @Override
+  public long estimateSize() {
+    return Long.MAX_VALUE;
+  }
+
+  /**
+   * Returns a set of characteristics of this Spliterator and its
+   * elements. The result is represented as ORed values from {@link
+   * #ORDERED}, {@link #DISTINCT}, {@link #SORTED}, {@link #SIZED},
+   * {@link #NONNULL}, {@link #IMMUTABLE}, {@link #CONCURRENT},
+   * {@link #SUBSIZED}.  Repeated calls to {@code characteristics()} on
+   * a given spliterator, prior to or in-between calls to {@code trySplit},
+   * should always return the same result.
+   * <p>
+   * <p>If a Spliterator reports an inconsistent set of
+   * characteristics (either those returned from a single invocation
+   * or across multiple invocations), no guarantees can be made
+   * about any computation using this Spliterator.
+   *
+   * @return a representation of characteristics
+   * @apiNote The characteristics of a given spliterator before splitting
+   * may differ from the characteristics after splitting.  For specific
+   * examples see the characteristic values {@link #SIZED}, {@link #SUBSIZED}
+   * and {@link #CONCURRENT}.
+   */
+  @Override
+  public int characteristics() {
+    return characteristics;
+  }
+
+  static class ConsumerWithLookback implements Consumer<String> {
+    String value;
+    /**
+     * Performs this operation on the given argument.
+     *
+     * @param string the input argument
+     */
+    @Override
+    public void accept(String string) {
+      this.value = string;
+    }
+  }
+
+  public static Stream<String> lineStream(BufferedReader in, int batchSize) {
+    return lineStream(in, batchSize, false);
+  }
+
+  public static Stream<String> lineStream(BufferedReader in, int batchSize, boolean isParallel) {
+    return StreamSupport.stream(new ReaderSpliterator(in, batchSize), isParallel)
+                        .onClose(() -> {
+                          try {
+                            in.close();
+                          } catch (IOException e) {
+                            throw new UncheckedIOException(e);
+                          }
+                                       }
+                                );
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/ad8724ee/metron-platform/metron-common/src/test/java/org/apache/metron/common/utils/file/ReaderSpliteratorTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/test/java/org/apache/metron/common/utils/file/ReaderSpliteratorTest.java b/metron-platform/metron-common/src/test/java/org/apache/metron/common/utils/file/ReaderSpliteratorTest.java
new file mode 100644
index 0000000..965840f
--- /dev/null
+++ b/metron-platform/metron-common/src/test/java/org/apache/metron/common/utils/file/ReaderSpliteratorTest.java
@@ -0,0 +1,185 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.utils.file;
+
+import org.adrianwalker.multilinestring.Multiline;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.*;
+import java.nio.file.Files;
+import java.nio.file.OpenOption;
+import java.nio.file.StandardOpenOption;
+import java.util.Map;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ForkJoinPool;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+public class ReaderSpliteratorTest {
+  /**
+   foo
+   bar
+   grok
+   foo
+   the
+   and
+   grok
+   foo
+   bar
+   */
+  @Multiline
+  public static String data;
+  public static final File dataFile = new File("target/readerspliteratortest.data");
+
+  @BeforeClass
+  public static void setup() throws IOException {
+    if(dataFile.exists()) {
+      dataFile.delete();
+    }
+    Files.write(dataFile.toPath(), data.getBytes(), StandardOpenOption.CREATE_NEW, StandardOpenOption.TRUNCATE_EXISTING);
+    dataFile.deleteOnExit();
+  }
+
+  public static BufferedReader getReader() throws FileNotFoundException {
+    return new BufferedReader(new FileReader(dataFile));
+  }
+
+  @Test
+  public void testParallelStreamSmallBatch() throws FileNotFoundException {
+    try( Stream<String> stream = ReaderSpliterator.lineStream(getReader(), 2)) {
+
+      Map<String, Integer> count =
+              stream.parallel().map( s -> s.trim())
+                      .collect(Collectors.toMap(s -> s, s -> 1, Integer::sum));
+      Assert.assertEquals(5, count.size());
+      Assert.assertEquals(3, (int)count.get("foo"));
+      Assert.assertEquals(2, (int)count.get("bar"));
+      Assert.assertEquals(1, (int)count.get("and"));
+      Assert.assertEquals(1, (int)count.get("the"));
+    }
+  }
+
+  @Test
+  public void testParallelStreamLargeBatch() throws FileNotFoundException {
+    try( Stream<String> stream = ReaderSpliterator.lineStream(getReader(), 100)) {
+      Map<String, Integer> count =
+              stream.parallel().map(s -> s.trim())
+                      .collect(Collectors.toMap(s -> s, s -> 1, Integer::sum));
+      Assert.assertEquals(5, count.size());
+      Assert.assertEquals(3, (int) count.get("foo"));
+      Assert.assertEquals(2, (int) count.get("bar"));
+      Assert.assertEquals(1, (int) count.get("and"));
+      Assert.assertEquals(1, (int) count.get("the"));
+    }
+  }
+
+  @Test
+  public void testSequentialStreamLargeBatch() throws FileNotFoundException {
+    try( Stream<String> stream = ReaderSpliterator.lineStream(getReader(), 100)) {
+      Map<String, Integer> count =
+              stream.map(s -> s.trim())
+                      .collect(Collectors.toMap(s -> s, s -> 1, Integer::sum));
+      Assert.assertEquals(5, count.size());
+      Assert.assertEquals(3, (int) count.get("foo"));
+      Assert.assertEquals(2, (int) count.get("bar"));
+      Assert.assertEquals(1, (int) count.get("and"));
+      Assert.assertEquals(1, (int) count.get("the"));
+    }
+  }
+
+  @Test
+  public void testActuallyParallel() throws ExecutionException, InterruptedException, FileNotFoundException {
+    //With 9 elements and a batch of 2, we should only ceil(9/2) = 5 batches, so at most min(5, 2) = 2 threads will be used
+    try( Stream<String> stream = ReaderSpliterator.lineStream(getReader(), 2)) {
+      ForkJoinPool forkJoinPool = new ForkJoinPool(2);
+      forkJoinPool.submit(() -> {
+                Map<String, Integer> threads =
+                        stream.parallel().map(s -> Thread.currentThread().getName())
+                                .collect(Collectors.toMap(s -> s, s -> 1, Integer::sum));
+                Assert.assertTrue(threads.size() <= 2);
+              }
+      ).get();
+    }
+  }
+
+  @Test
+  public void testActuallyParallel_mediumBatch() throws ExecutionException, InterruptedException, FileNotFoundException {
+    //With 9 elements and a batch of 2, we should only ceil(9/2) = 5 batches, so at most 5 threads of the pool of 10 will be used
+    try( Stream<String> stream = ReaderSpliterator.lineStream(getReader(), 2)) {
+      ForkJoinPool forkJoinPool = new ForkJoinPool(10);
+      forkJoinPool.submit(() -> {
+                Map<String, Integer> threads =
+                        stream.parallel().map(s -> Thread.currentThread().getName())
+                                .collect(Collectors.toMap(s -> s, s -> 1, Integer::sum));
+                Assert.assertTrue(threads.size() <= (int) Math.ceil(9.0 / 2) && threads.size() > 1);
+              }
+      ).get();
+    }
+  }
+
+  @Test
+  public void testActuallyParallel_mediumBatchNotImplicitlyParallel() throws ExecutionException, InterruptedException, FileNotFoundException {
+    //Since this is not parallel and we're not making the stream itself parallel, we should only use one thread from the thread pool.
+    try( Stream<String> stream = ReaderSpliterator.lineStream(getReader(), 2, false)) {
+      ForkJoinPool forkJoinPool = new ForkJoinPool(10);
+      forkJoinPool.submit(() -> {
+                Map<String, Integer> threads =
+                        stream.map(s -> Thread.currentThread().getName())
+                                .collect(Collectors.toMap(s -> s, s -> 1, Integer::sum));
+                Assert.assertTrue(threads.size() == 1);
+              }
+      ).get();
+    }
+  }
+
+  @Test
+  public void testActuallyParallel_mediumBatchImplicitlyParallel() throws ExecutionException, InterruptedException, FileNotFoundException {
+    //With 9 elements and a batch of 2, we should only ceil(9/2) = 5 batches, so at most 5 threads of the pool of 10 will be used
+    //despite not calling .parallel() on the stream, we are constructing the stream to be implicitly parallel
+    try( Stream<String> stream = ReaderSpliterator.lineStream(getReader(), 2, true)) {
+      ForkJoinPool forkJoinPool = new ForkJoinPool(10);
+      forkJoinPool.submit(() -> {
+                Map<String, Integer> threads =
+                        stream.map(s -> Thread.currentThread().getName())
+                                .collect(Collectors.toMap(s -> s, s -> 1, Integer::sum));
+                Assert.assertTrue(threads.size() <= (int) Math.ceil(9.0 / 2) && threads.size() > 1);
+              }
+      ).get();
+    }
+  }
+
+  @Test
+  public void testActuallyParallel_bigBatch() throws ExecutionException, InterruptedException, FileNotFoundException {
+    //With 9 elements and a batch of 10, we should only have one batch, so only one thread will be used
+    //despite the thread pool size of 2.
+    try( Stream<String> stream = ReaderSpliterator.lineStream(getReader(), 10)) {
+      ForkJoinPool forkJoinPool = new ForkJoinPool(2);
+      forkJoinPool.submit(() -> {
+                Map<String, Integer> threads =
+                        stream.parallel().map(s -> Thread.currentThread().getName())
+                                .collect(Collectors.toMap(s -> s, s -> 1, Integer::sum));
+                Assert.assertEquals(1, threads.size());
+              }
+      ).get();
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/ad8724ee/metron-platform/metron-data-management/README.md
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/README.md b/metron-platform/metron-data-management/README.md
index a0c0164..26dd472 100644
--- a/metron-platform/metron-data-management/README.md
+++ b/metron-platform/metron-data-management/README.md
@@ -240,16 +240,17 @@ each document to be considered as input to the Extractor.
 
 The parameters for the utility are as follows:
 
-| Short Code | Long Code           | Is Required? | Description                                                                                                                                                                          |
-|------------|---------------------|--------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
-| -h         |                     | No           | Generate the help screen/set of options                                                                                                                                              |
-| -e         | --extractor_config  | Yes          | JSON Document describing the extractor for this input data source                                                                                                                    |
-| -t         | --hbase_table       | Yes          | The HBase table to import into                                                                                                                                                       |
-| -c         | --hbase_cf          | Yes          | The HBase table column family to import into                                                                                                                                         |
-| -i         | --input             | Yes          | The input data location on local disk.  If this is a file, then that file will be loaded.  If this is a directory, then the files will be loaded recursively under that directory. |
-| -l         | --log4j             | No           | The log4j properties file to load                                                                                                                                                    |
-| -n         | --enrichment_config | No           | The JSON document describing the enrichments to configure.  Unlike other loaders, this is run first if specified.                                                                    |
-
+| Short Code | Long Code           | Is Required? | Description                                                                                                                                                                         |   |
+|------------|---------------------|--------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---|
+| -h         |                     | No           | Generate the help screen/set of options                                                                                                                                             |   |
+| -e         | --extractor_config  | Yes          | JSON Document describing the extractor for this input data source                                                                                                                   |   |
+| -t         | --hbase_table       | Yes          | The HBase table to import into                                                                                                                                                      |   |
+| -c         | --hbase_cf          | Yes          | The HBase table column family to import into                                                                                                                                        |   |
+| -i         | --input             | Yes          | The input data location on local disk.  If this is a file, then that file will be loaded.  If this is a directory, then the files will be loaded recursively under that directory. |   |
+| -l         | --log4j             | No           | The log4j properties file to load                                                                                                                                                   |   |
+| -n         | --enrichment_config | No           | The JSON document describing the enrichments to configure.  Unlike other loaders, this is run first if specified.                                                                   |   |
+| -p         | --threads           | No           | The number of threads to use when extracting data.  The default is the number of cores.                                                                                             |   |
+| -b         | --batchSize         | No           | The batch size to use for HBase puts                                                                                                                                                |   |
 ### GeoLite2 Loader
 
 The shell script `$METRON_HOME/bin/geo_enrichment_load.sh` will retrieve MaxMind GeoLite2 data and load data into HDFS, and update the configuration.

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/ad8724ee/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/ExtractorState.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/ExtractorState.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/ExtractorState.java
new file mode 100644
index 0000000..e44eb27
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/ExtractorState.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.nonbulk.flatfile;
+
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.metron.dataloads.extractor.Extractor;
+import org.apache.metron.enrichment.converter.HbaseConverter;
+
+public class ExtractorState {
+  private HTableInterface table;
+  private Extractor extractor;
+  private HbaseConverter converter;
+
+  public ExtractorState(HTableInterface table, Extractor extractor, HbaseConverter converter) {
+    this.table = table;
+    this.extractor = extractor;
+    this.converter = converter;
+  }
+
+  public HTableInterface getTable() {
+    return table;
+  }
+
+  public Extractor getExtractor() {
+    return extractor;
+  }
+
+  public HbaseConverter getConverter() {
+    return converter;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/ad8724ee/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoader.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoader.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoader.java
index 0c7501a..9992422 100644
--- a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoader.java
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoader.java
@@ -20,6 +20,7 @@ package org.apache.metron.dataloads.nonbulk.flatfile;
 import com.google.common.base.Function;
 import com.google.common.base.Joiner;
 import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Iterables;
 import org.apache.commons.cli.*;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
@@ -28,6 +29,8 @@ import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.log4j.PropertyConfigurator;
+import org.apache.metron.common.utils.ConversionUtils;
+import org.apache.metron.common.utils.file.ReaderSpliterator;
 import org.apache.metron.dataloads.extractor.Extractor;
 import org.apache.metron.dataloads.extractor.ExtractorHandler;
 import org.apache.metron.dataloads.extractor.inputformat.WholeFileFormat;
@@ -39,13 +42,13 @@ import org.apache.metron.enrichment.lookup.LookupKV;
 import org.apache.metron.common.utils.JSONUtils;
 
 import javax.annotation.Nullable;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
+import java.io.*;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Stack;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ForkJoinPool;
+import java.util.stream.Stream;
 
 public class SimpleEnrichmentFlatFileLoader {
   private static abstract class OptionHandler implements Function<String, Option> {}
@@ -111,6 +114,26 @@ public class SimpleEnrichmentFlatFileLoader {
         return o;
       }
     })
+    ,NUM_THREADS("p", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "threads", true, "The number of threads to use when extracting data.  The default is the number of cores of your machine.");
+        o.setArgName("NUM_THREADS");
+        o.setRequired(false);
+        return o;
+      }
+    })
+    ,BATCH_SIZE("b", new OptionHandler() {
+      @Nullable
+      @Override
+      public Option apply(@Nullable String s) {
+        Option o = new Option(s, "batchSize", true, "The batch size to use for HBase puts");
+        o.setArgName("SIZE");
+        o.setRequired(false);
+        return o;
+      }
+    })
     ,INPUT("i", new OptionHandler() {
       @Nullable
       @Override
@@ -207,25 +230,55 @@ public class SimpleEnrichmentFlatFileLoader {
     return ret;
   }
 
-
-  public void loadFile( File inputFile
-                      , Extractor extractor
-                      , HTableInterface table
-                      , String cf
-                      , HbaseConverter converter
-                      , boolean lineByLine
-                      ) throws IOException
+  public void load( final Iterable<Stream<String>> streams
+                  , final ThreadLocal<ExtractorState> state
+                  , final String cf
+                  , int numThreads
+                  )
   {
+    for(Stream<String> stream : streams) {
+      try {
+        ForkJoinPool forkJoinPool = new ForkJoinPool(numThreads);
+        forkJoinPool.submit(() ->
+          stream.parallel().forEach(input -> {
+            ExtractorState es = state.get();
+            try {
+              es.getTable().put(extract(input, es.getExtractor(), cf, es.getConverter()));
+            } catch (IOException e) {
+              throw new IllegalStateException("Unable to continue: " + e.getMessage(), e);
+            }
+            }
+                                   )
+        ).get();
+      } catch (InterruptedException e) {
+        throw new IllegalStateException(e.getMessage(), e);
+      } catch (ExecutionException e) {
+        throw new IllegalStateException(e.getMessage(), e);
+      } finally {
+        stream.close();
+      }
+    }
+  }
+
+  private static Iterable<Stream<String>> streamify(List<File> files, int batchSize, boolean lineByLine) throws FileNotFoundException {
+    List<Stream<String>> ret = new ArrayList<>();
     if(!lineByLine) {
-      table.put(extract(FileUtils.readFileToString(inputFile), extractor, cf, converter));
+      ret.add(files.stream().map(f -> {
+        try {
+          return FileUtils.readFileToString(f);
+        } catch (IOException e) {
+          throw new IllegalStateException("File " + f.getName() + " not found.");
+        }
+      }));
     }
     else {
-      BufferedReader br = new BufferedReader(new FileReader(inputFile));
-      for(String line = null;(line = br.readLine()) != null;) {
-        table.put(extract(line, extractor, cf, converter));
+      for(File f : files) {
+        ret.add(ReaderSpliterator.lineStream(new BufferedReader(new FileReader(f)), batchSize));
       }
     }
+    return ret;
   }
+
   public static void main(String... argv) throws Exception {
     Configuration conf = HBaseConfiguration.create();
     String[] otherArgs = new GenericOptionsParser(conf, argv).getRemainingArgs();
@@ -237,23 +290,40 @@ public class SimpleEnrichmentFlatFileLoader {
     ExtractorHandler handler = ExtractorHandler.load(
             FileUtils.readFileToString(new File(LoadOptions.EXTRACTOR_CONFIG.get(cli)))
     );
+    int batchSize = 128;
+    if(LoadOptions.BATCH_SIZE.has(cli)) {
+      batchSize = ConversionUtils.convert(LoadOptions.BATCH_SIZE.get(cli), Integer.class);
+    }
+    int numThreads = Runtime.getRuntime().availableProcessors();
+    if(LoadOptions.NUM_THREADS.has(cli)) {
+      numThreads = ConversionUtils.convert(LoadOptions.NUM_THREADS.get(cli), Integer.class);
+    }
     boolean lineByLine = !handler.getInputFormatHandler().getClass().equals(WholeFileFormat.class);
-    Extractor e = handler.getExtractor();
     SensorEnrichmentUpdateConfig sensorEnrichmentUpdateConfig = null;
     if(LoadOptions.ENRICHMENT_CONFIG.has(cli)) {
       sensorEnrichmentUpdateConfig = JSONUtils.INSTANCE.load( new File(LoadOptions.ENRICHMENT_CONFIG.get(cli))
               , SensorEnrichmentUpdateConfig.class
       );
     }
-    HbaseConverter converter = new EnrichmentConverter();
     List<File> inputFiles = getFiles(new File(LoadOptions.INPUT.get(cli)));
     SimpleEnrichmentFlatFileLoader loader = new SimpleEnrichmentFlatFileLoader();
-    HTableInterface table = loader.getProvider()
-            .getTable(conf, LoadOptions.HBASE_TABLE.get(cli));
+    ThreadLocal<ExtractorState> state = new ThreadLocal<ExtractorState>() {
+      @Override
+      protected ExtractorState initialValue() {
+        try {
+          ExtractorHandler handler = ExtractorHandler.load(
+            FileUtils.readFileToString(new File(LoadOptions.EXTRACTOR_CONFIG.get(cli)))
+          );
+          HTableInterface table = loader.getProvider().getTable(conf, LoadOptions.HBASE_TABLE.get(cli));
+          return new ExtractorState(table, handler.getExtractor(), new EnrichmentConverter());
+        } catch (IOException e1) {
+          throw new IllegalStateException("Unable to get table: " + e1);
+        }
+      }
+    };
+
+    loader.load(streamify(inputFiles, batchSize, lineByLine), state, LoadOptions.HBASE_CF.get(cli), numThreads);
 
-    for (File f : inputFiles) {
-      loader.loadFile(f, e, table, LoadOptions.HBASE_CF.get(cli), converter, lineByLine);
-    }
     if(sensorEnrichmentUpdateConfig != null) {
       sensorEnrichmentUpdateConfig.updateSensorConfigs();
     }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/ad8724ee/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoaderTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoaderTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoaderTest.java
index b4891aa..4ffb91a 100644
--- a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoaderTest.java
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoaderTest.java
@@ -17,6 +17,7 @@
  */
 package org.apache.metron.dataloads.nonbulk.flatfile;
 
+import com.google.common.collect.ImmutableList;
 import org.adrianwalker.multilinestring.Multiline;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.PosixParser;
@@ -56,91 +57,108 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.stream.Stream;
 
 public class SimpleEnrichmentFlatFileLoaderTest {
 
-    private HBaseTestingUtility testUtil;
-
-    /** The test table. */
-    private HTable testTable;
-    private String tableName = "enrichment";
-    private String cf = "cf";
-    private String csvFile="input.csv";
-    private String extractorJson = "extractor.json";
-    private String enrichmentJson = "enrichment_config.json";
-    private String log4jProperty = "log4j";
-
-    Configuration config = null;
-    /**
-     {
-        "config" : {
-            "columns" : {
-                "host" : 0,
-                "meta" : 2
-            },
-            "indicator_column" : "host",
-            "separator" : ",",
-            "type" : "enrichment"
-        },
-        "extractor" : "CSV"
-     }
-     */
-    @Multiline
-    private static String extractorConfig;
-
-    @Before
-    public void setup() throws Exception {
-       Map.Entry<HBaseTestingUtility, Configuration> kv = HBaseUtil.INSTANCE.create(true);
-        config = kv.getValue();
-        testUtil = kv.getKey();
-        testTable = testUtil.createTable(Bytes.toBytes(tableName), Bytes.toBytes(cf));
+  private HBaseTestingUtility testUtil;
+
+  /** The test table. */
+  private HTable testTable;
+  private String tableName = "enrichment";
+  private String cf = "cf";
+  private String csvFile="input.csv";
+  private String extractorJson = "extractor.json";
+  private String enrichmentJson = "enrichment_config.json";
+  private String log4jProperty = "log4j";
+
+  Configuration config = null;
+  /**
+   {
+      "config" : {
+        "columns" : {
+          "host" : 0,
+          "meta" : 2
+                    },
+        "indicator_column" : "host",
+        "separator" : ",",
+        "type" : "enrichment"
+                 },
+      "extractor" : "CSV"
    }
-
-    @After
-    public void teardown() throws Exception {
-        HBaseUtil.INSTANCE.teardown(testUtil);
-    }
-
-    @Test
-    public void testCommandLine() throws Exception {
-        Configuration conf = HBaseConfiguration.create();
-
-        String[] argv = {"-c cf", "-t enrichment", "-e extractor.json", "-n enrichment_config.json", "-l log4j", "-i input.csv"};
-        String[] otherArgs = new GenericOptionsParser(conf, argv).getRemainingArgs();
-
-        CommandLine cli = SimpleEnrichmentFlatFileLoader.LoadOptions.parse(new PosixParser(), otherArgs);
-        Assert.assertEquals(extractorJson,SimpleEnrichmentFlatFileLoader.LoadOptions.EXTRACTOR_CONFIG.get(cli).trim());
-        Assert.assertEquals(cf, SimpleEnrichmentFlatFileLoader.LoadOptions.HBASE_CF.get(cli).trim());
-        Assert.assertEquals(tableName,SimpleEnrichmentFlatFileLoader.LoadOptions.HBASE_TABLE.get(cli).trim());
-        Assert.assertEquals(enrichmentJson,SimpleEnrichmentFlatFileLoader.LoadOptions.ENRICHMENT_CONFIG.get(cli).trim());
-        Assert.assertEquals(csvFile,SimpleEnrichmentFlatFileLoader.LoadOptions.INPUT.get(cli).trim());
-        Assert.assertEquals(log4jProperty, SimpleEnrichmentFlatFileLoader.LoadOptions.LOG4J_PROPERTIES.get(cli).trim());
-    }
-
-    @Test
-    public void test() throws Exception {
-
-        Assert.assertNotNull(testTable);
-        String contents = "google.com,1,foo";
-
-        EnrichmentConverter converter = new EnrichmentConverter();
-        ExtractorHandler handler = ExtractorHandler.load(extractorConfig);
-        Extractor e = handler.getExtractor();
-        File file = new File (contents);
-        SimpleEnrichmentFlatFileLoader loader = new SimpleEnrichmentFlatFileLoader();
-        testTable.put(loader.extract(contents, e, cf, converter));
-
-        ResultScanner scanner = testTable.getScanner(Bytes.toBytes(cf));
-        List<LookupKV<EnrichmentKey, EnrichmentValue>> results = new ArrayList<>();
-        for(Result r : scanner) {
-            results.add(converter.fromResult(r, cf));
-        }
-        Assert.assertEquals(1, results.size());
-        Assert.assertEquals(results.get(0).getKey().indicator, "google.com");
-        Assert.assertEquals(results.get(0).getKey().type, "enrichment");
-        Assert.assertEquals(results.get(0).getValue().getMetadata().size(), 2);
-        Assert.assertEquals(results.get(0).getValue().getMetadata().get("meta"), "foo");
-        Assert.assertEquals(results.get(0).getValue().getMetadata().get("host"), "google.com");
+   */
+  @Multiline
+  private static String extractorConfig;
+
+  @Before
+  public void setup() throws Exception {
+    Map.Entry<HBaseTestingUtility, Configuration> kv = HBaseUtil.INSTANCE.create(true);
+    config = kv.getValue();
+    testUtil = kv.getKey();
+    testTable = testUtil.createTable(Bytes.toBytes(tableName), Bytes.toBytes(cf));
+  }
+
+  @After
+  public void teardown() throws Exception {
+    HBaseUtil.INSTANCE.teardown(testUtil);
+  }
+
+  @Test
+  public void testCommandLine() throws Exception {
+    Configuration conf = HBaseConfiguration.create();
+
+    String[] argv = { "-c cf", "-t enrichment"
+            , "-e extractor.json", "-n enrichment_config.json"
+            , "-l log4j", "-i input.csv"
+            , "-p 2", "-b 128"
+    };
+    String[] otherArgs = new GenericOptionsParser(conf, argv).getRemainingArgs();
+
+    CommandLine cli = SimpleEnrichmentFlatFileLoader.LoadOptions.parse(new PosixParser(), otherArgs);
+    Assert.assertEquals(extractorJson,SimpleEnrichmentFlatFileLoader.LoadOptions.EXTRACTOR_CONFIG.get(cli).trim());
+    Assert.assertEquals(cf, SimpleEnrichmentFlatFileLoader.LoadOptions.HBASE_CF.get(cli).trim());
+    Assert.assertEquals(tableName,SimpleEnrichmentFlatFileLoader.LoadOptions.HBASE_TABLE.get(cli).trim());
+    Assert.assertEquals(enrichmentJson,SimpleEnrichmentFlatFileLoader.LoadOptions.ENRICHMENT_CONFIG.get(cli).trim());
+    Assert.assertEquals(csvFile,SimpleEnrichmentFlatFileLoader.LoadOptions.INPUT.get(cli).trim());
+    Assert.assertEquals(log4jProperty, SimpleEnrichmentFlatFileLoader.LoadOptions.LOG4J_PROPERTIES.get(cli).trim());
+    Assert.assertEquals("2", SimpleEnrichmentFlatFileLoader.LoadOptions.NUM_THREADS.get(cli).trim());
+    Assert.assertEquals("128", SimpleEnrichmentFlatFileLoader.LoadOptions.BATCH_SIZE.get(cli).trim());
+  }
+
+  @Test
+  public void test() throws Exception {
+
+    Assert.assertNotNull(testTable);
+    String contents = "google.com,1,foo";
+
+    EnrichmentConverter converter = new EnrichmentConverter();
+    ExtractorHandler handler = ExtractorHandler.load(extractorConfig);
+    Extractor e = handler.getExtractor();
+    SimpleEnrichmentFlatFileLoader loader = new SimpleEnrichmentFlatFileLoader();
+    Stream<String> contentStreams = ImmutableList.of(contents).stream();
+    ThreadLocal<ExtractorState> state = new ThreadLocal<ExtractorState>() {
+      @Override
+      protected ExtractorState initialValue() {
+        return new ExtractorState(testTable, e, converter);
+      }
+    };
+    loader.load(ImmutableList.of(contentStreams)
+               , state
+               , cf
+               , 2
+               );
+
+    ResultScanner scanner = testTable.getScanner(Bytes.toBytes(cf));
+    List<LookupKV<EnrichmentKey, EnrichmentValue>> results = new ArrayList<>();
+    for(Result r : scanner) {
+      results.add(converter.fromResult(r, cf));
     }
+    Assert.assertEquals(1, results.size());
+    Assert.assertEquals(results.get(0).getKey().indicator, "google.com");
+    Assert.assertEquals(results.get(0).getKey().type, "enrichment");
+    Assert.assertEquals(results.get(0).getValue().getMetadata().size(), 2);
+    Assert.assertEquals(results.get(0).getValue().getMetadata().get("meta"), "foo");
+    Assert.assertEquals(results.get(0).getValue().getMetadata().get("host"), "google.com");
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/ad8724ee/metron-platform/metron-elasticsearch/src/test/java/org/apache/metron/elasticsearch/integration/ElasticsearchIndexingIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-elasticsearch/src/test/java/org/apache/metron/elasticsearch/integration/ElasticsearchIndexingIntegrationTest.java b/metron-platform/metron-elasticsearch/src/test/java/org/apache/metron/elasticsearch/integration/ElasticsearchIndexingIntegrationTest.java
index 7e9f231..acc1565 100644
--- a/metron-platform/metron-elasticsearch/src/test/java/org/apache/metron/elasticsearch/integration/ElasticsearchIndexingIntegrationTest.java
+++ b/metron-platform/metron-elasticsearch/src/test/java/org/apache/metron/elasticsearch/integration/ElasticsearchIndexingIntegrationTest.java
@@ -85,6 +85,7 @@ public class ElasticsearchIndexingIntegrationTest extends IndexingIntegrationTes
             return ReadinessState.READY;
           }
         } else {
+          System.out.println("Missed index...");
           return ReadinessState.NOT_READY;
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/ad8724ee/metron-platform/metron-indexing/src/test/java/org/apache/metron/indexing/integration/IndexingIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-indexing/src/test/java/org/apache/metron/indexing/integration/IndexingIntegrationTest.java b/metron-platform/metron-indexing/src/test/java/org/apache/metron/indexing/integration/IndexingIntegrationTest.java
index 03ae9ff..a93c442 100644
--- a/metron-platform/metron-indexing/src/test/java/org/apache/metron/indexing/integration/IndexingIntegrationTest.java
+++ b/metron-platform/metron-indexing/src/test/java/org/apache/metron/indexing/integration/IndexingIntegrationTest.java
@@ -205,6 +205,7 @@ public abstract class IndexingIntegrationTest extends BaseIntegrationTest {
   private void waitForIndex(String zookeeperQuorum) throws Exception {
     try(CuratorFramework client = getClient(zookeeperQuorum)) {
       client.start();
+      System.out.println("Waiting for zookeeper...");
       byte[] bytes = null;
       do {
         try {
@@ -216,6 +217,7 @@ public abstract class IndexingIntegrationTest extends BaseIntegrationTest {
         }
       }
       while(bytes == null || bytes.length == 0);
+      System.out.println("Found index config in zookeeper...");
     }
   }
 



[02/17] incubator-metron git commit: METRON-676 Create Zeppelin Notebook for YAF Telemetry (nickwallen) closes apache/incubator-metron#427

Posted by ce...@apache.org.
METRON-676 Create Zeppelin Notebook for YAF Telemetry (nickwallen) closes apache/incubator-metron#427


Project: http://git-wip-us.apache.org/repos/asf/incubator-metron/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-metron/commit/cc29dcab
Tree: http://git-wip-us.apache.org/repos/asf/incubator-metron/tree/cc29dcab
Diff: http://git-wip-us.apache.org/repos/asf/incubator-metron/diff/cc29dcab

Branch: refs/heads/Metron_0.3.1
Commit: cc29dcab841d6b06c338da829d3f3a8541cdef5b
Parents: f3ca3c0
Author: nickwallen <ni...@nickallen.org>
Authored: Tue Jan 31 14:52:35 2017 -0500
Committer: Nick Allen <ni...@nickallen.org>
Committed: Tue Jan 31 14:52:35 2017 -0500

----------------------------------------------------------------------
 metron-deployment/packaging/docker/rpm-docker/SPECS/metron.spec     | 1 +
 .../src/main/config/zeppelin/metron/metron-yaf-telemetry.json       | 1 +
 2 files changed, 2 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/cc29dcab/metron-deployment/packaging/docker/rpm-docker/SPECS/metron.spec
----------------------------------------------------------------------
diff --git a/metron-deployment/packaging/docker/rpm-docker/SPECS/metron.spec b/metron-deployment/packaging/docker/rpm-docker/SPECS/metron.spec
index 8a6607b..5c5881c 100644
--- a/metron-deployment/packaging/docker/rpm-docker/SPECS/metron.spec
+++ b/metron-deployment/packaging/docker/rpm-docker/SPECS/metron.spec
@@ -261,6 +261,7 @@ This package installs the Metron Indexing files
 %{metron_home}/config/zookeeper/indexing/websphere.json
 %{metron_home}/config/zookeeper/indexing/yaf.json
 %{metron_home}/config/zookeeper/indexing/asa.json
+%{metron_home}/config/zeppelin/metron/metron-yaf-telemetry.json
 
 # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 


[07/17] incubator-metron git commit: METRON-658: Updated Grammar to Handle More Uses of in/not in Expressions closes apache/incubator-metron#430

Posted by ce...@apache.org.
METRON-658: Updated Grammar to Handle More Uses of in/not in Expressions closes apache/incubator-metron#430


Project: http://git-wip-us.apache.org/repos/asf/incubator-metron/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-metron/commit/8340c0e2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-metron/tree/8340c0e2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-metron/diff/8340c0e2

Branch: refs/heads/Metron_0.3.1
Commit: 8340c0e25d04f13c2ded2cb37159d9644d00ccff
Parents: fd77ec3
Author: JJ <jj...@gmail.com>
Authored: Fri Feb 3 11:01:23 2017 -0500
Committer: cstella <ce...@gmail.com>
Committed: Fri Feb 3 11:01:23 2017 -0500

----------------------------------------------------------------------
 .../metron/common/stellar/generated/Stellar.g4  |  190 ++--
 .../metron/common/stellar/StellarCompiler.java  |   49 +-
 .../stellar/generated/StellarBaseListener.java  |   74 +-
 .../common/stellar/generated/StellarLexer.java  |  316 ++---
 .../stellar/generated/StellarListener.java      |  144 ++-
 .../common/stellar/generated/StellarParser.java | 1075 ++++++++++--------
 .../metron/common/stellar/StellarTest.java      |   84 +-
 7 files changed, 1080 insertions(+), 852 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/8340c0e2/metron-platform/metron-common/src/main/antlr4/org/apache/metron/common/stellar/generated/Stellar.g4
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/antlr4/org/apache/metron/common/stellar/generated/Stellar.g4 b/metron-platform/metron-common/src/main/antlr4/org/apache/metron/common/stellar/generated/Stellar.g4
index 98af17f..3005323 100644
--- a/metron-platform/metron-common/src/main/antlr4/org/apache/metron/common/stellar/generated/Stellar.g4
+++ b/metron-platform/metron-common/src/main/antlr4/org/apache/metron/common/stellar/generated/Stellar.g4
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -20,7 +20,7 @@ grammar Stellar;
 
 @header {
 //CHECKSTYLE:OFF
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -45,7 +45,6 @@ DOUBLE_QUOTE : '"';
 SINGLE_QUOTE : '\'';
 COMMA : ',';
 PERIOD : '.';
-fragment EOL : '\n';
 
 AND : 'and' | '&&' | 'AND';
 OR : 'or' | '||' | 'OR';
@@ -90,13 +89,13 @@ DOUBLE_LITERAL :
   | INT_LITERAL EXPONENT D?
   | INT_LITERAL EXPONENT? D
   ;
-FLOAT_LITERAL  :
+FLOAT_LITERAL :
   INT_LITERAL PERIOD DIGIT* EXPONENT? F
   | MINUS? PERIOD DIGIT+ EXPONENT? F
   | INT_LITERAL EXPONENT? F
   ;
-LONG_LITERAL  : INT_LITERAL L ;
-IDENTIFIER : [a-zA-Z_][a-zA-Z_\.:0-9]* ;
+LONG_LITERAL : INT_LITERAL L;
+IDENTIFIER : [a-zA-Z_][a-zA-Z_\.:0-9]*;
 
 STRING_LITERAL :
   DOUBLE_QUOTE SCHAR* DOUBLE_QUOTE
@@ -106,9 +105,9 @@ STRING_LITERAL :
 // COMMENT and WS are stripped from the output token stream by sending
 // to a different channel 'skip'
 
-COMMENT : '//' .+? (EOL|EOF) -> skip ;
+COMMENT : '//' .+? (EOL|EOF) -> skip;
 
-WS : [ \r\t\u000C\n]+ -> skip ;
+WS : [ \r\t\u000C\n]+ -> skip;
 
 fragment ZERO: '0';
 fragment FIRST_DIGIT: '1'..'9';
@@ -118,7 +117,7 @@ fragment D: ('d'|'D');
 fragment E: ('e'|'E');
 fragment F: ('f'|'F');
 fragment L: ('l'|'L');
-
+fragment EOL : '\n';
 
 /* Parser rules */
 
@@ -126,76 +125,105 @@ transformation : transformation_expr EOF;
 
 transformation_expr:
    conditional_expr #ConditionalExpr
-  |  LPAREN transformation_expr RPAREN #TransformationExpr
-  | arithmetic_expr               # ArithExpression
+  | LPAREN transformation_expr RPAREN #TransformationExpr
+  | arithmetic_expr # ArithExpression
   | transformation_entity #TransformationEntity
-  | comparison_expr               # ComparisonExpression
-  ;
-conditional_expr :  comparison_expr QUESTION transformation_expr COLON transformation_expr #TernaryFuncWithoutIf
-                 | IF comparison_expr THEN transformation_expr ELSE transformation_expr #TernaryFuncWithIf
-                 ;
-
-comparison_expr : identifier_operand comp_operator identifier_operand # ComparisonExpressionWithOperator
-                | identifier_operand IN identifier_operand #InExpression
-                | identifier_operand NIN identifier_operand #NInExpression
-                | comparison_expr AND comparison_expr #LogicalExpressionAnd
-                | comparison_expr OR comparison_expr #LogicalExpressionOr
-                | NOT LPAREN comparison_expr RPAREN #NotFunc
-                | LPAREN comparison_expr RPAREN # ComparisonExpressionParens
-                | identifier_operand #operand
-                ;
-transformation_entity : identifier_operand
-  ;
-comp_operator : (EQ | NEQ | LT | LTE | GT | GTE) # ComparisonOp
-              ;
-arith_operator_addition : (PLUS | MINUS) # ArithOp_plus
-               ;
-arith_operator_mul : (MUL | DIV) # ArithOp_mul
-               ;
-func_args : LPAREN op_list RPAREN
-          | LPAREN RPAREN
-          ;
-op_list : identifier_operand
-        | op_list COMMA identifier_operand
-        | conditional_expr
-        | op_list COMMA conditional_expr
-        ;
-list_entity : LBRACKET op_list RBRACKET
-            | LBRACKET RBRACKET;
-
-kv_list : identifier_operand COLON transformation_expr
-        | kv_list COMMA identifier_operand COLON transformation_expr
-        ;
-
-map_entity : LBRACE kv_list RBRACE
-           | LBRACE RBRACE;
-
-arithmetic_expr: arithmetic_expr_mul #ArithExpr_solo
-               | arithmetic_expr PLUS arithmetic_expr_mul #ArithExpr_plus
-               | arithmetic_expr MINUS arithmetic_expr_mul #ArithExpr_minus
-                ;
-arithmetic_expr_mul : arithmetic_operands #ArithExpr_mul_solo
-                    | arithmetic_expr_mul MUL arithmetic_expr_mul #ArithExpr_mul
-                    | arithmetic_expr_mul DIV arithmetic_expr_mul #ArithExpr_div
-                    ;
-
-functions : IDENTIFIER func_args #TransformationFunc
-          ;
-arithmetic_operands : functions #NumericFunctions
-                    | DOUBLE_LITERAL #DoubleLiteral
-                    | INT_LITERAL #IntLiteral
-                    | LONG_LITERAL #LongLiteral
-                    | FLOAT_LITERAL #FloatLiteral
-                    | IDENTIFIER #Variable
-                    | LPAREN arithmetic_expr RPAREN #ParenArith
-                    | LPAREN conditional_expr RPAREN#condExpr
-                    ;
-identifier_operand : (TRUE | FALSE) # LogicalConst
-                   | arithmetic_expr #ArithmeticOperands
-                   | STRING_LITERAL # StringLiteral
-                   | list_entity #List
-                   | map_entity #MapConst
-                   | NULL #NullConst
-                   | EXISTS LPAREN IDENTIFIER RPAREN #ExistsFunc
-                   | LPAREN conditional_expr RPAREN#condExpr_paren
-                   ;
+  | comparison_expr # ComparisonExpression
+  | logical_expr #LogicalExpression
+  | in_expr #InExpression
+  ;
+
+conditional_expr :
+  logical_expr QUESTION transformation_expr COLON transformation_expr #TernaryFuncWithoutIf
+  | IF logical_expr THEN transformation_expr ELSE transformation_expr #TernaryFuncWithIf
+  ;
+
+logical_expr:
+  b_expr AND logical_expr #LogicalExpressionAnd
+  | b_expr OR logical_expr #LogicalExpressionOr
+  | b_expr #BoleanExpression
+  ;
+
+b_expr:
+  comparison_expr
+  | in_expr
+  ;
+
+in_expr:
+  identifier_operand IN b_expr #InExpressionStatement
+  | identifier_operand NIN b_expr #NInExpressionStatement
+  ;
+
+comparison_expr :
+  comparison_expr comp_operator comparison_expr #ComparisonExpressionWithOperator
+  | NOT LPAREN logical_expr RPAREN #NotFunc
+  | LPAREN logical_expr RPAREN #ComparisonExpressionParens
+  | identifier_operand #operand
+  ;
+
+transformation_entity : identifier_operand;
+
+comp_operator : (EQ | NEQ | LT | LTE | GT | GTE) # ComparisonOp;
+
+func_args :
+  LPAREN op_list RPAREN
+  | LPAREN RPAREN
+  ;
+
+op_list :
+  identifier_operand
+  | op_list COMMA identifier_operand
+  | conditional_expr
+  | op_list COMMA conditional_expr
+  ;
+
+list_entity :
+  LBRACKET op_list RBRACKET
+  | LBRACKET RBRACKET
+  ;
+
+kv_list :
+  identifier_operand COLON transformation_expr
+  | kv_list COMMA identifier_operand COLON transformation_expr
+  ;
+
+map_entity :
+  LBRACE kv_list RBRACE
+  | LBRACE RBRACE
+  ;
+
+arithmetic_expr:
+  arithmetic_expr_mul #ArithExpr_solo
+  | arithmetic_expr PLUS arithmetic_expr_mul #ArithExpr_plus
+  | arithmetic_expr MINUS arithmetic_expr_mul #ArithExpr_minus
+  ;
+
+arithmetic_expr_mul :
+  arithmetic_operands #ArithExpr_mul_solo
+  | arithmetic_expr_mul MUL arithmetic_expr_mul #ArithExpr_mul
+  | arithmetic_expr_mul DIV arithmetic_expr_mul #ArithExpr_div
+  ;
+
+functions : IDENTIFIER func_args #TransformationFunc;
+
+arithmetic_operands :
+  functions #NumericFunctions
+  | DOUBLE_LITERAL #DoubleLiteral
+  | INT_LITERAL #IntLiteral
+  | LONG_LITERAL #LongLiteral
+  | FLOAT_LITERAL #FloatLiteral
+  | IDENTIFIER #Variable
+  | LPAREN arithmetic_expr RPAREN #ParenArith
+  | LPAREN conditional_expr RPAREN #condExpr
+  ;
+
+identifier_operand :
+  (TRUE | FALSE) #LogicalConst
+  | arithmetic_expr #ArithmeticOperands
+  | STRING_LITERAL # StringLiteral
+  | list_entity #List
+  | map_entity #MapConst
+  | NULL #NullConst
+  | EXISTS LPAREN IDENTIFIER RPAREN #ExistsFunc
+  | LPAREN conditional_expr RPAREN #condExpr_paren
+  ;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/8340c0e2/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/StellarCompiler.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/StellarCompiler.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/StellarCompiler.java
index 4822b31..4af299e 100644
--- a/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/StellarCompiler.java
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/StellarCompiler.java
@@ -57,20 +57,20 @@ public class StellarCompiler extends StellarBaseListener {
   private final NumberLiteralEvaluator numberLiteralEvaluator;
   private final ComparisonExpressionWithOperatorEvaluator comparisonExpressionWithOperatorEvaluator;
 
-  public StellarCompiler(VariableResolver variableResolver,
-                         FunctionResolver functionResolver,
-                         Context context,
-                         Stack<Token<?>> tokenStack,
-                         ArithmeticEvaluator arithmeticEvaluator,
-                         NumberLiteralEvaluator numberLiteralEvaluator,
-                         ComparisonExpressionWithOperatorEvaluator comparisonExpressionWithOperatorEvaluator) {
+  public StellarCompiler(final VariableResolver variableResolver,
+                         final FunctionResolver functionResolver,
+                         final Context context,
+                         final Stack<Token<?>> tokenStack,
+                         final ArithmeticEvaluator arithmeticEvaluator,
+                         final NumberLiteralEvaluator numberLiteralEvaluator,
+                         final ComparisonExpressionWithOperatorEvaluator comparisonExpressionWithOperatorEvaluator) {
     this.variableResolver = variableResolver;
     this.functionResolver = functionResolver;
     this.context = context;
     this.tokenStack = tokenStack;
     this.arithmeticEvaluator = arithmeticEvaluator;
     this.numberLiteralEvaluator = numberLiteralEvaluator;
-    this. comparisonExpressionWithOperatorEvaluator = comparisonExpressionWithOperatorEvaluator;
+    this.comparisonExpressionWithOperatorEvaluator = comparisonExpressionWithOperatorEvaluator;
   }
 
   @Override
@@ -78,22 +78,22 @@ public class StellarCompiler extends StellarBaseListener {
     tokenStack.clear();
   }
 
-  private boolean handleIn(Token<?> left, Token<?> right) {
+  private boolean handleIn(final Token<?> left, final Token<?> right) {
     Object key = right.getValue();
 
 
     if (left.getValue() != null) {
-      if(left.getValue() instanceof String && key instanceof String) {
-        return ((String)left.getValue()).contains(key.toString());
+      if (left.getValue() instanceof String && key instanceof String) {
+        return ((String) left.getValue()).contains(key.toString());
       }
-      else if(left.getValue() instanceof Collection) {
-        return ((Collection)left.getValue()).contains(key);
+      else if (left.getValue() instanceof Collection) {
+        return ((Collection) left.getValue()).contains(key);
       }
-      else if(left.getValue() instanceof Map) {
-        return ((Map)left.getValue()).containsKey(key);
+      else if (left.getValue() instanceof Map) {
+        return ((Map) left.getValue()).containsKey(key);
       }
       else {
-        if(key == null) {
+        if (key == null) {
           return key == left.getValue();
         }
         else {
@@ -145,7 +145,7 @@ public class StellarCompiler extends StellarBaseListener {
     Token<?> elseExpr = popStack();
     Token<?> thenExpr = popStack();
     Token<?> ifExpr = popStack();
-    boolean b = ((Token<Boolean>) ifExpr).getValue();
+    @SuppressWarnings("unchecked") boolean b = ((Token<Boolean>) ifExpr).getValue();
     if (b) {
       tokenStack.push(thenExpr);
     } else {
@@ -164,14 +164,14 @@ public class StellarCompiler extends StellarBaseListener {
   }
 
   @Override
-  public void exitInExpression(StellarParser.InExpressionContext ctx) {
+  public void exitInExpressionStatement(StellarParser.InExpressionStatementContext ctx) {
     Token<?> left = popStack();
     Token<?> right = popStack();
     tokenStack.push(new Token<>(handleIn(left, right), Boolean.class));
   }
 
   @Override
-  public void exitNInExpression(StellarParser.NInExpressionContext ctx) {
+  public void exitNInExpressionStatement(StellarParser.NInExpressionStatementContext ctx) {
     Token<?> left = popStack();
     Token<?> right = popStack();
     tokenStack.push(new Token<>(!handleIn(left, right), Boolean.class));
@@ -230,7 +230,7 @@ public class StellarCompiler extends StellarBaseListener {
 
   @Override
   public void exitLogicalConst(StellarParser.LogicalConstContext ctx) {
-    Boolean b = null;
+    Boolean b;
     switch (ctx.getText().toUpperCase()) {
       case "TRUE":
         b = true;
@@ -244,7 +244,7 @@ public class StellarCompiler extends StellarBaseListener {
     tokenStack.push(new Token<>(b, Boolean.class));
   }
 
-  private boolean booleanOp(Token<?> left, Token<?> right, BooleanOp op, String opName) {
+  private boolean booleanOp(final Token<?> left, final Token<?> right, final BooleanOp op, final String opName) {
     Boolean l = ConversionUtils.convert(left.getValue(), Boolean.class);
     Boolean r = ConversionUtils.convert(right.getValue(), Boolean.class);
     if (l == null || r == null) {
@@ -277,7 +277,8 @@ public class StellarCompiler extends StellarBaseListener {
    * @param token The token containing the function arguments.
    * @return
    */
-  private List<Object> getFunctionArguments(Token<?> token) {
+  @SuppressWarnings("unchecked")
+  private List<Object> getFunctionArguments(final Token<?> token) {
     if (token.getUnderlyingType().equals(List.class)) {
       return (List<Object>) token.getValue();
 
@@ -404,8 +405,8 @@ public class StellarCompiler extends StellarBaseListener {
   }
 
   public Object getResult() throws ParseException {
-    if(actualException != null) {
-      throw new ParseException("Unable to execute: " +actualException.getMessage(), actualException);
+    if (actualException != null) {
+      throw new ParseException("Unable to execute: " + actualException.getMessage(), actualException);
     }
     if (tokenStack.empty()) {
       throw new ParseException("Invalid predicate: Empty stack.");

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/8340c0e2/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarBaseListener.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarBaseListener.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarBaseListener.java
index e0a8770..c13930d 100644
--- a/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarBaseListener.java
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarBaseListener.java
@@ -2,7 +2,7 @@
 package org.apache.metron.common.stellar.generated;
 
 //CHECKSTYLE:OFF
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -109,6 +109,30 @@ public class StellarBaseListener implements StellarListener {
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
+	@Override public void enterLogicalExpression(StellarParser.LogicalExpressionContext ctx) { }
+	/**
+	 * {@inheritDoc}
+	 *
+	 * <p>The default implementation does nothing.</p>
+	 */
+	@Override public void exitLogicalExpression(StellarParser.LogicalExpressionContext ctx) { }
+	/**
+	 * {@inheritDoc}
+	 *
+	 * <p>The default implementation does nothing.</p>
+	 */
+	@Override public void enterInExpression(StellarParser.InExpressionContext ctx) { }
+	/**
+	 * {@inheritDoc}
+	 *
+	 * <p>The default implementation does nothing.</p>
+	 */
+	@Override public void exitInExpression(StellarParser.InExpressionContext ctx) { }
+	/**
+	 * {@inheritDoc}
+	 *
+	 * <p>The default implementation does nothing.</p>
+	 */
 	@Override public void enterTernaryFuncWithoutIf(StellarParser.TernaryFuncWithoutIfContext ctx) { }
 	/**
 	 * {@inheritDoc}
@@ -133,145 +157,145 @@ public class StellarBaseListener implements StellarListener {
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void enterNotFunc(StellarParser.NotFuncContext ctx) { }
+	@Override public void enterLogicalExpressionAnd(StellarParser.LogicalExpressionAndContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void exitNotFunc(StellarParser.NotFuncContext ctx) { }
+	@Override public void exitLogicalExpressionAnd(StellarParser.LogicalExpressionAndContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void enterComparisonExpressionParens(StellarParser.ComparisonExpressionParensContext ctx) { }
+	@Override public void enterLogicalExpressionOr(StellarParser.LogicalExpressionOrContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void exitComparisonExpressionParens(StellarParser.ComparisonExpressionParensContext ctx) { }
+	@Override public void exitLogicalExpressionOr(StellarParser.LogicalExpressionOrContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void enterInExpression(StellarParser.InExpressionContext ctx) { }
+	@Override public void enterBoleanExpression(StellarParser.BoleanExpressionContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void exitInExpression(StellarParser.InExpressionContext ctx) { }
+	@Override public void exitBoleanExpression(StellarParser.BoleanExpressionContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void enterComparisonExpressionWithOperator(StellarParser.ComparisonExpressionWithOperatorContext ctx) { }
+	@Override public void enterB_expr(StellarParser.B_exprContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void exitComparisonExpressionWithOperator(StellarParser.ComparisonExpressionWithOperatorContext ctx) { }
+	@Override public void exitB_expr(StellarParser.B_exprContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void enterLogicalExpressionAnd(StellarParser.LogicalExpressionAndContext ctx) { }
+	@Override public void enterInExpressionStatement(StellarParser.InExpressionStatementContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void exitLogicalExpressionAnd(StellarParser.LogicalExpressionAndContext ctx) { }
+	@Override public void exitInExpressionStatement(StellarParser.InExpressionStatementContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void enterNInExpression(StellarParser.NInExpressionContext ctx) { }
+	@Override public void enterNInExpressionStatement(StellarParser.NInExpressionStatementContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void exitNInExpression(StellarParser.NInExpressionContext ctx) { }
+	@Override public void exitNInExpressionStatement(StellarParser.NInExpressionStatementContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void enterLogicalExpressionOr(StellarParser.LogicalExpressionOrContext ctx) { }
+	@Override public void enterNotFunc(StellarParser.NotFuncContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void exitLogicalExpressionOr(StellarParser.LogicalExpressionOrContext ctx) { }
+	@Override public void exitNotFunc(StellarParser.NotFuncContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void enterOperand(StellarParser.OperandContext ctx) { }
+	@Override public void enterComparisonExpressionParens(StellarParser.ComparisonExpressionParensContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void exitOperand(StellarParser.OperandContext ctx) { }
+	@Override public void exitComparisonExpressionParens(StellarParser.ComparisonExpressionParensContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void enterTransformation_entity(StellarParser.Transformation_entityContext ctx) { }
+	@Override public void enterComparisonExpressionWithOperator(StellarParser.ComparisonExpressionWithOperatorContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void exitTransformation_entity(StellarParser.Transformation_entityContext ctx) { }
+	@Override public void exitComparisonExpressionWithOperator(StellarParser.ComparisonExpressionWithOperatorContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void enterComparisonOp(StellarParser.ComparisonOpContext ctx) { }
+	@Override public void enterOperand(StellarParser.OperandContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void exitComparisonOp(StellarParser.ComparisonOpContext ctx) { }
+	@Override public void exitOperand(StellarParser.OperandContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void enterArithOp_plus(StellarParser.ArithOp_plusContext ctx) { }
+	@Override public void enterTransformation_entity(StellarParser.Transformation_entityContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void exitArithOp_plus(StellarParser.ArithOp_plusContext ctx) { }
+	@Override public void exitTransformation_entity(StellarParser.Transformation_entityContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void enterArithOp_mul(StellarParser.ArithOp_mulContext ctx) { }
+	@Override public void enterComparisonOp(StellarParser.ComparisonOpContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *
 	 * <p>The default implementation does nothing.</p>
 	 */
-	@Override public void exitArithOp_mul(StellarParser.ArithOp_mulContext ctx) { }
+	@Override public void exitComparisonOp(StellarParser.ComparisonOpContext ctx) { }
 	/**
 	 * {@inheritDoc}
 	 *

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/8340c0e2/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarLexer.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarLexer.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarLexer.java
index 83e6434..ff2e9cb 100644
--- a/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarLexer.java
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarLexer.java
@@ -2,7 +2,7 @@
 package org.apache.metron.common.stellar.generated;
 
 //CHECKSTYLE:OFF
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -49,13 +49,13 @@ public class StellarLexer extends Lexer {
 	};
 
 	public static final String[] ruleNames = {
-		"DOUBLE_QUOTE", "SINGLE_QUOTE", "COMMA", "PERIOD", "EOL", "AND", "OR", 
-		"NOT", "TRUE", "FALSE", "EQ", "NEQ", "LT", "LTE", "GT", "GTE", "QUESTION", 
-		"COLON", "IF", "THEN", "ELSE", "NULL", "MINUS", "PLUS", "DIV", "MUL", 
-		"LBRACE", "RBRACE", "LBRACKET", "RBRACKET", "LPAREN", "RPAREN", "IN", 
-		"NIN", "EXISTS", "EXPONENT", "INT_LITERAL", "DOUBLE_LITERAL", "FLOAT_LITERAL", 
-		"LONG_LITERAL", "IDENTIFIER", "STRING_LITERAL", "COMMENT", "WS", "ZERO", 
-		"FIRST_DIGIT", "DIGIT", "SCHAR", "D", "E", "F", "L"
+		"DOUBLE_QUOTE", "SINGLE_QUOTE", "COMMA", "PERIOD", "AND", "OR", "NOT", 
+		"TRUE", "FALSE", "EQ", "NEQ", "LT", "LTE", "GT", "GTE", "QUESTION", "COLON", 
+		"IF", "THEN", "ELSE", "NULL", "MINUS", "PLUS", "DIV", "MUL", "LBRACE", 
+		"RBRACE", "LBRACKET", "RBRACKET", "LPAREN", "RPAREN", "IN", "NIN", "EXISTS", 
+		"EXPONENT", "INT_LITERAL", "DOUBLE_LITERAL", "FLOAT_LITERAL", "LONG_LITERAL", 
+		"IDENTIFIER", "STRING_LITERAL", "COMMENT", "WS", "ZERO", "FIRST_DIGIT", 
+		"DIGIT", "SCHAR", "D", "E", "F", "L", "EOL"
 	};
 
 	private static final String[] _LITERAL_NAMES = {
@@ -133,156 +133,156 @@ public class StellarLexer extends Lexer {
 		"\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+
 		"\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4"+
 		",\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t"+
-		"\64\4\65\t\65\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\7\3\7"+
-		"\3\7\3\7\3\7\3\7\5\7~\n\7\3\b\3\b\3\b\3\b\3\b\3\b\5\b\u0086\n\b\3\t\3"+
-		"\t\3\t\3\t\3\t\3\t\5\t\u008e\n\t\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\5\n\u0098"+
-		"\n\n\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\5\13\u00a4\n\13"+
-		"\3\f\3\f\3\f\3\r\3\r\3\r\3\16\3\16\3\17\3\17\3\17\3\20\3\20\3\21\3\21"+
-		"\3\21\3\22\3\22\3\23\3\23\3\24\3\24\3\24\3\24\5\24\u00be\n\24\3\25\3\25"+
-		"\3\25\3\25\3\25\3\25\3\25\3\25\5\25\u00c8\n\25\3\26\3\26\3\26\3\26\3\26"+
-		"\3\26\3\26\3\26\5\26\u00d2\n\26\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27"+
-		"\5\27\u00dc\n\27\3\30\3\30\3\31\3\31\3\32\3\32\3\33\3\33\3\34\3\34\3\35"+
-		"\3\35\3\36\3\36\3\37\3\37\3 \3 \3!\3!\3\"\3\"\3\"\3\"\5\"\u00f6\n\"\3"+
-		"#\3#\3#\3#\3#\3#\3#\3#\3#\3#\3#\3#\5#\u0104\n#\3$\3$\3$\3$\3$\3$\3$\3"+
-		"$\3$\3$\3$\3$\5$\u0112\n$\3%\3%\3%\5%\u0117\n%\3%\6%\u011a\n%\r%\16%\u011b"+
-		"\3&\5&\u011f\n&\3&\3&\5&\u0123\n&\3&\3&\7&\u0127\n&\f&\16&\u012a\13&\5"+
-		"&\u012c\n&\3\'\3\'\3\'\7\'\u0131\n\'\f\'\16\'\u0134\13\'\3\'\5\'\u0137"+
-		"\n\'\3\'\5\'\u013a\n\'\3\'\3\'\6\'\u013e\n\'\r\'\16\'\u013f\3\'\5\'\u0143"+
-		"\n\'\3\'\5\'\u0146\n\'\3\'\3\'\3\'\5\'\u014b\n\'\3\'\3\'\5\'\u014f\n\'"+
-		"\3\'\3\'\5\'\u0153\n\'\3(\3(\3(\7(\u0158\n(\f(\16(\u015b\13(\3(\5(\u015e"+
-		"\n(\3(\3(\3(\5(\u0163\n(\3(\3(\6(\u0167\n(\r(\16(\u0168\3(\5(\u016c\n"+
-		"(\3(\3(\3(\3(\5(\u0172\n(\3(\3(\5(\u0176\n(\3)\3)\3)\3*\3*\7*\u017d\n"+
-		"*\f*\16*\u0180\13*\3+\3+\7+\u0184\n+\f+\16+\u0187\13+\3+\3+\3+\3+\7+\u018d"+
-		"\n+\f+\16+\u0190\13+\3+\3+\5+\u0194\n+\3,\3,\3,\3,\6,\u019a\n,\r,\16,"+
-		"\u019b\3,\3,\5,\u01a0\n,\3,\3,\3-\6-\u01a5\n-\r-\16-\u01a6\3-\3-\3.\3"+
-		".\3/\3/\3\60\3\60\3\61\3\61\3\62\3\62\3\63\3\63\3\64\3\64\3\65\3\65\3"+
-		"\u019b\2\66\3\3\5\4\7\5\t\6\13\2\r\7\17\b\21\t\23\n\25\13\27\f\31\r\33"+
-		"\16\35\17\37\20!\21#\22%\23\'\24)\25+\26-\27/\30\61\31\63\32\65\33\67"+
-		"\349\35;\36=\37? A!C\"E#G$I%K&M\'O(Q)S*U+W,Y-[\2]\2_\2a\2c\2e\2g\2i\2"+
-		"\3\2\n\5\2C\\aac|\b\2\60\60\62<C\\^^aac|\5\2\13\f\16\17\"\"\7\2\f\f\17"+
-		"\17$$))^^\4\2FFff\4\2GGgg\4\2HHhh\4\2NNnn\u01df\2\3\3\2\2\2\2\5\3\2\2"+
-		"\2\2\7\3\2\2\2\2\t\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23"+
-		"\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2"+
-		"\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2"+
-		"\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3"+
-		"\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2"+
-		"\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2"+
-		"\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\3k"+
-		"\3\2\2\2\5m\3\2\2\2\7o\3\2\2\2\tq\3\2\2\2\13s\3\2\2\2\r}\3\2\2\2\17\u0085"+
-		"\3\2\2\2\21\u008d\3\2\2\2\23\u0097\3\2\2\2\25\u00a3\3\2\2\2\27\u00a5\3"+
-		"\2\2\2\31\u00a8\3\2\2\2\33\u00ab\3\2\2\2\35\u00ad\3\2\2\2\37\u00b0\3\2"+
-		"\2\2!\u00b2\3\2\2\2#\u00b5\3\2\2\2%\u00b7\3\2\2\2\'\u00bd\3\2\2\2)\u00c7"+
-		"\3\2\2\2+\u00d1\3\2\2\2-\u00db\3\2\2\2/\u00dd\3\2\2\2\61\u00df\3\2\2\2"+
-		"\63\u00e1\3\2\2\2\65\u00e3\3\2\2\2\67\u00e5\3\2\2\29\u00e7\3\2\2\2;\u00e9"+
-		"\3\2\2\2=\u00eb\3\2\2\2?\u00ed\3\2\2\2A\u00ef\3\2\2\2C\u00f5\3\2\2\2E"+
-		"\u0103\3\2\2\2G\u0111\3\2\2\2I\u0113\3\2\2\2K\u012b\3\2\2\2M\u0152\3\2"+
-		"\2\2O\u0175\3\2\2\2Q\u0177\3\2\2\2S\u017a\3\2\2\2U\u0193\3\2\2\2W\u0195"+
-		"\3\2\2\2Y\u01a4\3\2\2\2[\u01aa\3\2\2\2]\u01ac\3\2\2\2_\u01ae\3\2\2\2a"+
-		"\u01b0\3\2\2\2c\u01b2\3\2\2\2e\u01b4\3\2\2\2g\u01b6\3\2\2\2i\u01b8\3\2"+
-		"\2\2kl\7$\2\2l\4\3\2\2\2mn\7)\2\2n\6\3\2\2\2op\7.\2\2p\b\3\2\2\2qr\7\60"+
-		"\2\2r\n\3\2\2\2st\7\f\2\2t\f\3\2\2\2uv\7c\2\2vw\7p\2\2w~\7f\2\2xy\7(\2"+
-		"\2y~\7(\2\2z{\7C\2\2{|\7P\2\2|~\7F\2\2}u\3\2\2\2}x\3\2\2\2}z\3\2\2\2~"+
-		"\16\3\2\2\2\177\u0080\7q\2\2\u0080\u0086\7t\2\2\u0081\u0082\7~\2\2\u0082"+
-		"\u0086\7~\2\2\u0083\u0084\7Q\2\2\u0084\u0086\7T\2\2\u0085\177\3\2\2\2"+
-		"\u0085\u0081\3\2\2\2\u0085\u0083\3\2\2\2\u0086\20\3\2\2\2\u0087\u0088"+
-		"\7p\2\2\u0088\u0089\7q\2\2\u0089\u008e\7v\2\2\u008a\u008b\7P\2\2\u008b"+
-		"\u008c\7Q\2\2\u008c\u008e\7V\2\2\u008d\u0087\3\2\2\2\u008d\u008a\3\2\2"+
-		"\2\u008e\22\3\2\2\2\u008f\u0090\7v\2\2\u0090\u0091\7t\2\2\u0091\u0092"+
-		"\7w\2\2\u0092\u0098\7g\2\2\u0093\u0094\7V\2\2\u0094\u0095\7T\2\2\u0095"+
-		"\u0096\7W\2\2\u0096\u0098\7G\2\2\u0097\u008f\3\2\2\2\u0097\u0093\3\2\2"+
-		"\2\u0098\24\3\2\2\2\u0099\u009a\7h\2\2\u009a\u009b\7c\2\2\u009b\u009c"+
-		"\7n\2\2\u009c\u009d\7u\2\2\u009d\u00a4\7g\2\2\u009e\u009f\7H\2\2\u009f"+
-		"\u00a0\7C\2\2\u00a0\u00a1\7N\2\2\u00a1\u00a2\7U\2\2\u00a2\u00a4\7G\2\2"+
-		"\u00a3\u0099\3\2\2\2\u00a3\u009e\3\2\2\2\u00a4\26\3\2\2\2\u00a5\u00a6"+
-		"\7?\2\2\u00a6\u00a7\7?\2\2\u00a7\30\3\2\2\2\u00a8\u00a9\7#\2\2\u00a9\u00aa"+
-		"\7?\2\2\u00aa\32\3\2\2\2\u00ab\u00ac\7>\2\2\u00ac\34\3\2\2\2\u00ad\u00ae"+
-		"\7>\2\2\u00ae\u00af\7?\2\2\u00af\36\3\2\2\2\u00b0\u00b1\7@\2\2\u00b1 "+
-		"\3\2\2\2\u00b2\u00b3\7@\2\2\u00b3\u00b4\7?\2\2\u00b4\"\3\2\2\2\u00b5\u00b6"+
-		"\7A\2\2\u00b6$\3\2\2\2\u00b7\u00b8\7<\2\2\u00b8&\3\2\2\2\u00b9\u00ba\7"+
-		"K\2\2\u00ba\u00be\7H\2\2\u00bb\u00bc\7k\2\2\u00bc\u00be\7h\2\2\u00bd\u00b9"+
-		"\3\2\2\2\u00bd\u00bb\3\2\2\2\u00be(\3\2\2\2\u00bf\u00c0\7V\2\2\u00c0\u00c1"+
-		"\7J\2\2\u00c1\u00c2\7G\2\2\u00c2\u00c8\7P\2\2\u00c3\u00c4\7v\2\2\u00c4"+
-		"\u00c5\7j\2\2\u00c5\u00c6\7g\2\2\u00c6\u00c8\7p\2\2\u00c7\u00bf\3\2\2"+
-		"\2\u00c7\u00c3\3\2\2\2\u00c8*\3\2\2\2\u00c9\u00ca\7G\2\2\u00ca\u00cb\7"+
-		"N\2\2\u00cb\u00cc\7U\2\2\u00cc\u00d2\7G\2\2\u00cd\u00ce\7g\2\2\u00ce\u00cf"+
-		"\7n\2\2\u00cf\u00d0\7u\2\2\u00d0\u00d2\7g\2\2\u00d1\u00c9\3\2\2\2\u00d1"+
-		"\u00cd\3\2\2\2\u00d2,\3\2\2\2\u00d3\u00d4\7p\2\2\u00d4\u00d5\7w\2\2\u00d5"+
-		"\u00d6\7n\2\2\u00d6\u00dc\7n\2\2\u00d7\u00d8\7P\2\2\u00d8\u00d9\7W\2\2"+
-		"\u00d9\u00da\7N\2\2\u00da\u00dc\7N\2\2\u00db\u00d3\3\2\2\2\u00db\u00d7"+
-		"\3\2\2\2\u00dc.\3\2\2\2\u00dd\u00de\7/\2\2\u00de\60\3\2\2\2\u00df\u00e0"+
-		"\7-\2\2\u00e0\62\3\2\2\2\u00e1\u00e2\7\61\2\2\u00e2\64\3\2\2\2\u00e3\u00e4"+
-		"\7,\2\2\u00e4\66\3\2\2\2\u00e5\u00e6\7}\2\2\u00e68\3\2\2\2\u00e7\u00e8"+
-		"\7\177\2\2\u00e8:\3\2\2\2\u00e9\u00ea\7]\2\2\u00ea<\3\2\2\2\u00eb\u00ec"+
-		"\7_\2\2\u00ec>\3\2\2\2\u00ed\u00ee\7*\2\2\u00ee@\3\2\2\2\u00ef\u00f0\7"+
-		"+\2\2\u00f0B\3\2\2\2\u00f1\u00f2\7k\2\2\u00f2\u00f6\7p\2\2\u00f3\u00f4"+
-		"\7K\2\2\u00f4\u00f6\7P\2\2\u00f5\u00f1\3\2\2\2\u00f5\u00f3\3\2\2\2\u00f6"+
-		"D\3\2\2\2\u00f7\u00f8\7p\2\2\u00f8\u00f9\7q\2\2\u00f9\u00fa\7v\2\2\u00fa"+
-		"\u00fb\7\"\2\2\u00fb\u00fc\7k\2\2\u00fc\u0104\7p\2\2\u00fd\u00fe\7P\2"+
-		"\2\u00fe\u00ff\7Q\2\2\u00ff\u0100\7V\2\2\u0100\u0101\7\"\2\2\u0101\u0102"+
-		"\7K\2\2\u0102\u0104\7P\2\2\u0103\u00f7\3\2\2\2\u0103\u00fd\3\2\2\2\u0104"+
-		"F\3\2\2\2\u0105\u0106\7g\2\2\u0106\u0107\7z\2\2\u0107\u0108\7k\2\2\u0108"+
-		"\u0109\7u\2\2\u0109\u010a\7v\2\2\u010a\u0112\7u\2\2\u010b\u010c\7G\2\2"+
-		"\u010c\u010d\7Z\2\2\u010d\u010e\7K\2\2\u010e\u010f\7U\2\2\u010f\u0110"+
-		"\7V\2\2\u0110\u0112\7U\2\2\u0111\u0105\3\2\2\2\u0111\u010b\3\2\2\2\u0112"+
-		"H\3\2\2\2\u0113\u0116\5e\63\2\u0114\u0117\5\61\31\2\u0115\u0117\5/\30"+
-		"\2\u0116\u0114\3\2\2\2\u0116\u0115\3\2\2\2\u0116\u0117\3\2\2\2\u0117\u0119"+
-		"\3\2\2\2\u0118\u011a\5_\60\2\u0119\u0118\3\2\2\2\u011a\u011b\3\2\2\2\u011b"+
-		"\u0119\3\2\2\2\u011b\u011c\3\2\2\2\u011cJ\3\2\2\2\u011d\u011f\5/\30\2"+
-		"\u011e\u011d\3\2\2\2\u011e\u011f\3\2\2\2\u011f\u0120\3\2\2\2\u0120\u012c"+
-		"\5[.\2\u0121\u0123\5/\30\2\u0122\u0121\3\2\2\2\u0122\u0123\3\2\2\2\u0123"+
-		"\u0124\3\2\2\2\u0124\u0128\5]/\2\u0125\u0127\5_\60\2\u0126\u0125\3\2\2"+
-		"\2\u0127\u012a\3\2\2\2\u0128\u0126\3\2\2\2\u0128\u0129\3\2\2\2\u0129\u012c"+
-		"\3\2\2\2\u012a\u0128\3\2\2\2\u012b\u011e\3\2\2\2\u012b\u0122\3\2\2\2\u012c"+
-		"L\3\2\2\2\u012d\u012e\5K&\2\u012e\u0132\5\t\5\2\u012f\u0131\5_\60\2\u0130"+
-		"\u012f\3\2\2\2\u0131\u0134\3\2\2\2\u0132\u0130\3\2\2\2\u0132\u0133\3\2"+
-		"\2\2\u0133\u0136\3\2\2\2\u0134\u0132\3\2\2\2\u0135\u0137\5I%\2\u0136\u0135"+
-		"\3\2\2\2\u0136\u0137\3\2\2\2\u0137\u0139\3\2\2\2\u0138\u013a\5c\62\2\u0139"+
-		"\u0138\3\2\2\2\u0139\u013a\3\2\2\2\u013a\u0153\3\2\2\2\u013b\u013d\5\t"+
-		"\5\2\u013c\u013e\5_\60\2\u013d\u013c\3\2\2\2\u013e\u013f\3\2\2\2\u013f"+
-		"\u013d\3\2\2\2\u013f\u0140\3\2\2\2\u0140\u0142\3\2\2\2\u0141\u0143\5I"+
-		"%\2\u0142\u0141\3\2\2\2\u0142\u0143\3\2\2\2\u0143\u0145\3\2\2\2\u0144"+
-		"\u0146\5c\62\2\u0145\u0144\3\2\2\2\u0145\u0146\3\2\2\2\u0146\u0153\3\2"+
-		"\2\2\u0147\u0148\5K&\2\u0148\u014a\5I%\2\u0149\u014b\5c\62\2\u014a\u0149"+
-		"\3\2\2\2\u014a\u014b\3\2\2\2\u014b\u0153\3\2\2\2\u014c\u014e\5K&\2\u014d"+
-		"\u014f\5I%\2\u014e\u014d\3\2\2\2\u014e\u014f\3\2\2\2\u014f\u0150\3\2\2"+
-		"\2\u0150\u0151\5c\62\2\u0151\u0153\3\2\2\2\u0152\u012d\3\2\2\2\u0152\u013b"+
-		"\3\2\2\2\u0152\u0147\3\2\2\2\u0152\u014c\3\2\2\2\u0153N\3\2\2\2\u0154"+
-		"\u0155\5K&\2\u0155\u0159\5\t\5\2\u0156\u0158\5_\60\2\u0157\u0156\3\2\2"+
-		"\2\u0158\u015b\3\2\2\2\u0159\u0157\3\2\2\2\u0159\u015a\3\2\2\2\u015a\u015d"+
-		"\3\2\2\2\u015b\u0159\3\2\2\2\u015c\u015e\5I%\2\u015d\u015c\3\2\2\2\u015d"+
-		"\u015e\3\2\2\2\u015e\u015f\3\2\2\2\u015f\u0160\5g\64\2\u0160\u0176\3\2"+
-		"\2\2\u0161\u0163\5/\30\2\u0162\u0161\3\2\2\2\u0162\u0163\3\2\2\2\u0163"+
-		"\u0164\3\2\2\2\u0164\u0166\5\t\5\2\u0165\u0167\5_\60\2\u0166\u0165\3\2"+
-		"\2\2\u0167\u0168\3\2\2\2\u0168\u0166\3\2\2\2\u0168\u0169\3\2\2\2\u0169"+
-		"\u016b\3\2\2\2\u016a\u016c\5I%\2\u016b\u016a\3\2\2\2\u016b\u016c\3\2\2"+
-		"\2\u016c\u016d\3\2\2\2\u016d\u016e\5g\64\2\u016e\u0176\3\2\2\2\u016f\u0171"+
-		"\5K&\2\u0170\u0172\5I%\2\u0171\u0170\3\2\2\2\u0171\u0172\3\2\2\2\u0172"+
-		"\u0173\3\2\2\2\u0173\u0174\5g\64\2\u0174\u0176\3\2\2\2\u0175\u0154\3\2"+
-		"\2\2\u0175\u0162\3\2\2\2\u0175\u016f\3\2\2\2\u0176P\3\2\2\2\u0177\u0178"+
-		"\5K&\2\u0178\u0179\5i\65\2\u0179R\3\2\2\2\u017a\u017e\t\2\2\2\u017b\u017d"+
-		"\t\3\2\2\u017c\u017b\3\2\2\2\u017d\u0180\3\2\2\2\u017e\u017c\3\2\2\2\u017e"+
-		"\u017f\3\2\2\2\u017fT\3\2\2\2\u0180\u017e\3\2\2\2\u0181\u0185\5\3\2\2"+
-		"\u0182\u0184\5a\61\2\u0183\u0182\3\2\2\2\u0184\u0187\3\2\2\2\u0185\u0183"+
-		"\3\2\2\2\u0185\u0186\3\2\2\2\u0186\u0188\3\2\2\2\u0187\u0185\3\2\2\2\u0188"+
-		"\u0189\5\3\2\2\u0189\u0194\3\2\2\2\u018a\u018e\5\5\3\2\u018b\u018d\5a"+
-		"\61\2\u018c\u018b\3\2\2\2\u018d\u0190\3\2\2\2\u018e\u018c\3\2\2\2\u018e"+
-		"\u018f\3\2\2\2\u018f\u0191\3\2\2\2\u0190\u018e\3\2\2\2\u0191\u0192\5\5"+
-		"\3\2\u0192\u0194\3\2\2\2\u0193\u0181\3\2\2\2\u0193\u018a\3\2\2\2\u0194"+
-		"V\3\2\2\2\u0195\u0196\7\61\2\2\u0196\u0197\7\61\2\2\u0197\u0199\3\2\2"+
-		"\2\u0198\u019a\13\2\2\2\u0199\u0198\3\2\2\2\u019a\u019b\3\2\2\2\u019b"+
-		"\u019c\3\2\2\2\u019b\u0199\3\2\2\2\u019c\u019f\3\2\2\2\u019d\u01a0\5\13"+
-		"\6\2\u019e\u01a0\7\2\2\3\u019f\u019d\3\2\2\2\u019f\u019e\3\2\2\2\u01a0"+
-		"\u01a1\3\2\2\2\u01a1\u01a2\b,\2\2\u01a2X\3\2\2\2\u01a3\u01a5\t\4\2\2\u01a4"+
-		"\u01a3\3\2\2\2\u01a5\u01a6\3\2\2\2\u01a6\u01a4\3\2\2\2\u01a6\u01a7\3\2"+
-		"\2\2\u01a7\u01a8\3\2\2\2\u01a8\u01a9\b-\2\2\u01a9Z\3\2\2\2\u01aa\u01ab"+
-		"\7\62\2\2\u01ab\\\3\2\2\2\u01ac\u01ad\4\63;\2\u01ad^\3\2\2\2\u01ae\u01af"+
-		"\4\62;\2\u01af`\3\2\2\2\u01b0\u01b1\n\5\2\2\u01b1b\3\2\2\2\u01b2\u01b3"+
-		"\t\6\2\2\u01b3d\3\2\2\2\u01b4\u01b5\t\7\2\2\u01b5f\3\2\2\2\u01b6\u01b7"+
-		"\t\b\2\2\u01b7h\3\2\2\2\u01b8\u01b9\t\t\2\2\u01b9j\3\2\2\2,\2}\u0085\u008d"+
-		"\u0097\u00a3\u00bd\u00c7\u00d1\u00db\u00f5\u0103\u0111\u0116\u011b\u011e"+
-		"\u0122\u0128\u012b\u0132\u0136\u0139\u013f\u0142\u0145\u014a\u014e\u0152"+
-		"\u0159\u015d\u0162\u0168\u016b\u0171\u0175\u017e\u0185\u018e\u0193\u019b"+
-		"\u019f\u01a6\3\b\2\2";
+		"\64\4\65\t\65\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3\6"+
+		"\3\6\3\6\5\6|\n\6\3\7\3\7\3\7\3\7\3\7\3\7\5\7\u0084\n\7\3\b\3\b\3\b\3"+
+		"\b\3\b\3\b\5\b\u008c\n\b\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\5\t\u0096\n\t"+
+		"\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\3\n\5\n\u00a2\n\n\3\13\3\13\3\13"+
+		"\3\f\3\f\3\f\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3\20\3\20\3\20\3\21\3\21"+
+		"\3\22\3\22\3\23\3\23\3\23\3\23\5\23\u00bc\n\23\3\24\3\24\3\24\3\24\3\24"+
+		"\3\24\3\24\3\24\5\24\u00c6\n\24\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25"+
+		"\5\25\u00d0\n\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\5\26\u00da\n"+
+		"\26\3\27\3\27\3\30\3\30\3\31\3\31\3\32\3\32\3\33\3\33\3\34\3\34\3\35\3"+
+		"\35\3\36\3\36\3\37\3\37\3 \3 \3!\3!\3!\3!\5!\u00f4\n!\3\"\3\"\3\"\3\""+
+		"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3\"\5\"\u0102\n\"\3#\3#\3#\3#\3#\3#\3#\3"+
+		"#\3#\3#\3#\3#\5#\u0110\n#\3$\3$\3$\5$\u0115\n$\3$\6$\u0118\n$\r$\16$\u0119"+
+		"\3%\5%\u011d\n%\3%\3%\5%\u0121\n%\3%\3%\7%\u0125\n%\f%\16%\u0128\13%\5"+
+		"%\u012a\n%\3&\3&\3&\7&\u012f\n&\f&\16&\u0132\13&\3&\5&\u0135\n&\3&\5&"+
+		"\u0138\n&\3&\3&\6&\u013c\n&\r&\16&\u013d\3&\5&\u0141\n&\3&\5&\u0144\n"+
+		"&\3&\3&\3&\5&\u0149\n&\3&\3&\5&\u014d\n&\3&\3&\5&\u0151\n&\3\'\3\'\3\'"+
+		"\7\'\u0156\n\'\f\'\16\'\u0159\13\'\3\'\5\'\u015c\n\'\3\'\3\'\3\'\5\'\u0161"+
+		"\n\'\3\'\3\'\6\'\u0165\n\'\r\'\16\'\u0166\3\'\5\'\u016a\n\'\3\'\3\'\3"+
+		"\'\3\'\5\'\u0170\n\'\3\'\3\'\5\'\u0174\n\'\3(\3(\3(\3)\3)\7)\u017b\n)"+
+		"\f)\16)\u017e\13)\3*\3*\7*\u0182\n*\f*\16*\u0185\13*\3*\3*\3*\3*\7*\u018b"+
+		"\n*\f*\16*\u018e\13*\3*\3*\5*\u0192\n*\3+\3+\3+\3+\6+\u0198\n+\r+\16+"+
+		"\u0199\3+\3+\5+\u019e\n+\3+\3+\3,\6,\u01a3\n,\r,\16,\u01a4\3,\3,\3-\3"+
+		"-\3.\3.\3/\3/\3\60\3\60\3\61\3\61\3\62\3\62\3\63\3\63\3\64\3\64\3\65\3"+
+		"\65\3\u0199\2\66\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31"+
+		"\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65"+
+		"\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y\2[\2]\2_\2a\2c\2e\2g"+
+		"\2i\2\3\2\n\5\2C\\aac|\b\2\60\60\62<C\\^^aac|\5\2\13\f\16\17\"\"\7\2\f"+
+		"\f\17\17$$))^^\4\2FFff\4\2GGgg\4\2HHhh\4\2NNnn\u01df\2\3\3\2\2\2\2\5\3"+
+		"\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2"+
+		"\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3"+
+		"\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'"+
+		"\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63"+
+		"\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2"+
+		"?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3"+
+		"\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2W\3\2\2"+
+		"\2\3k\3\2\2\2\5m\3\2\2\2\7o\3\2\2\2\tq\3\2\2\2\13{\3\2\2\2\r\u0083\3\2"+
+		"\2\2\17\u008b\3\2\2\2\21\u0095\3\2\2\2\23\u00a1\3\2\2\2\25\u00a3\3\2\2"+
+		"\2\27\u00a6\3\2\2\2\31\u00a9\3\2\2\2\33\u00ab\3\2\2\2\35\u00ae\3\2\2\2"+
+		"\37\u00b0\3\2\2\2!\u00b3\3\2\2\2#\u00b5\3\2\2\2%\u00bb\3\2\2\2\'\u00c5"+
+		"\3\2\2\2)\u00cf\3\2\2\2+\u00d9\3\2\2\2-\u00db\3\2\2\2/\u00dd\3\2\2\2\61"+
+		"\u00df\3\2\2\2\63\u00e1\3\2\2\2\65\u00e3\3\2\2\2\67\u00e5\3\2\2\29\u00e7"+
+		"\3\2\2\2;\u00e9\3\2\2\2=\u00eb\3\2\2\2?\u00ed\3\2\2\2A\u00f3\3\2\2\2C"+
+		"\u0101\3\2\2\2E\u010f\3\2\2\2G\u0111\3\2\2\2I\u0129\3\2\2\2K\u0150\3\2"+
+		"\2\2M\u0173\3\2\2\2O\u0175\3\2\2\2Q\u0178\3\2\2\2S\u0191\3\2\2\2U\u0193"+
+		"\3\2\2\2W\u01a2\3\2\2\2Y\u01a8\3\2\2\2[\u01aa\3\2\2\2]\u01ac\3\2\2\2_"+
+		"\u01ae\3\2\2\2a\u01b0\3\2\2\2c\u01b2\3\2\2\2e\u01b4\3\2\2\2g\u01b6\3\2"+
+		"\2\2i\u01b8\3\2\2\2kl\7$\2\2l\4\3\2\2\2mn\7)\2\2n\6\3\2\2\2op\7.\2\2p"+
+		"\b\3\2\2\2qr\7\60\2\2r\n\3\2\2\2st\7c\2\2tu\7p\2\2u|\7f\2\2vw\7(\2\2w"+
+		"|\7(\2\2xy\7C\2\2yz\7P\2\2z|\7F\2\2{s\3\2\2\2{v\3\2\2\2{x\3\2\2\2|\f\3"+
+		"\2\2\2}~\7q\2\2~\u0084\7t\2\2\177\u0080\7~\2\2\u0080\u0084\7~\2\2\u0081"+
+		"\u0082\7Q\2\2\u0082\u0084\7T\2\2\u0083}\3\2\2\2\u0083\177\3\2\2\2\u0083"+
+		"\u0081\3\2\2\2\u0084\16\3\2\2\2\u0085\u0086\7p\2\2\u0086\u0087\7q\2\2"+
+		"\u0087\u008c\7v\2\2\u0088\u0089\7P\2\2\u0089\u008a\7Q\2\2\u008a\u008c"+
+		"\7V\2\2\u008b\u0085\3\2\2\2\u008b\u0088\3\2\2\2\u008c\20\3\2\2\2\u008d"+
+		"\u008e\7v\2\2\u008e\u008f\7t\2\2\u008f\u0090\7w\2\2\u0090\u0096\7g\2\2"+
+		"\u0091\u0092\7V\2\2\u0092\u0093\7T\2\2\u0093\u0094\7W\2\2\u0094\u0096"+
+		"\7G\2\2\u0095\u008d\3\2\2\2\u0095\u0091\3\2\2\2\u0096\22\3\2\2\2\u0097"+
+		"\u0098\7h\2\2\u0098\u0099\7c\2\2\u0099\u009a\7n\2\2\u009a\u009b\7u\2\2"+
+		"\u009b\u00a2\7g\2\2\u009c\u009d\7H\2\2\u009d\u009e\7C\2\2\u009e\u009f"+
+		"\7N\2\2\u009f\u00a0\7U\2\2\u00a0\u00a2\7G\2\2\u00a1\u0097\3\2\2\2\u00a1"+
+		"\u009c\3\2\2\2\u00a2\24\3\2\2\2\u00a3\u00a4\7?\2\2\u00a4\u00a5\7?\2\2"+
+		"\u00a5\26\3\2\2\2\u00a6\u00a7\7#\2\2\u00a7\u00a8\7?\2\2\u00a8\30\3\2\2"+
+		"\2\u00a9\u00aa\7>\2\2\u00aa\32\3\2\2\2\u00ab\u00ac\7>\2\2\u00ac\u00ad"+
+		"\7?\2\2\u00ad\34\3\2\2\2\u00ae\u00af\7@\2\2\u00af\36\3\2\2\2\u00b0\u00b1"+
+		"\7@\2\2\u00b1\u00b2\7?\2\2\u00b2 \3\2\2\2\u00b3\u00b4\7A\2\2\u00b4\"\3"+
+		"\2\2\2\u00b5\u00b6\7<\2\2\u00b6$\3\2\2\2\u00b7\u00b8\7K\2\2\u00b8\u00bc"+
+		"\7H\2\2\u00b9\u00ba\7k\2\2\u00ba\u00bc\7h\2\2\u00bb\u00b7\3\2\2\2\u00bb"+
+		"\u00b9\3\2\2\2\u00bc&\3\2\2\2\u00bd\u00be\7V\2\2\u00be\u00bf\7J\2\2\u00bf"+
+		"\u00c0\7G\2\2\u00c0\u00c6\7P\2\2\u00c1\u00c2\7v\2\2\u00c2\u00c3\7j\2\2"+
+		"\u00c3\u00c4\7g\2\2\u00c4\u00c6\7p\2\2\u00c5\u00bd\3\2\2\2\u00c5\u00c1"+
+		"\3\2\2\2\u00c6(\3\2\2\2\u00c7\u00c8\7G\2\2\u00c8\u00c9\7N\2\2\u00c9\u00ca"+
+		"\7U\2\2\u00ca\u00d0\7G\2\2\u00cb\u00cc\7g\2\2\u00cc\u00cd\7n\2\2\u00cd"+
+		"\u00ce\7u\2\2\u00ce\u00d0\7g\2\2\u00cf\u00c7\3\2\2\2\u00cf\u00cb\3\2\2"+
+		"\2\u00d0*\3\2\2\2\u00d1\u00d2\7p\2\2\u00d2\u00d3\7w\2\2\u00d3\u00d4\7"+
+		"n\2\2\u00d4\u00da\7n\2\2\u00d5\u00d6\7P\2\2\u00d6\u00d7\7W\2\2\u00d7\u00d8"+
+		"\7N\2\2\u00d8\u00da\7N\2\2\u00d9\u00d1\3\2\2\2\u00d9\u00d5\3\2\2\2\u00da"+
+		",\3\2\2\2\u00db\u00dc\7/\2\2\u00dc.\3\2\2\2\u00dd\u00de\7-\2\2\u00de\60"+
+		"\3\2\2\2\u00df\u00e0\7\61\2\2\u00e0\62\3\2\2\2\u00e1\u00e2\7,\2\2\u00e2"+
+		"\64\3\2\2\2\u00e3\u00e4\7}\2\2\u00e4\66\3\2\2\2\u00e5\u00e6\7\177\2\2"+
+		"\u00e68\3\2\2\2\u00e7\u00e8\7]\2\2\u00e8:\3\2\2\2\u00e9\u00ea\7_\2\2\u00ea"+
+		"<\3\2\2\2\u00eb\u00ec\7*\2\2\u00ec>\3\2\2\2\u00ed\u00ee\7+\2\2\u00ee@"+
+		"\3\2\2\2\u00ef\u00f0\7k\2\2\u00f0\u00f4\7p\2\2\u00f1\u00f2\7K\2\2\u00f2"+
+		"\u00f4\7P\2\2\u00f3\u00ef\3\2\2\2\u00f3\u00f1\3\2\2\2\u00f4B\3\2\2\2\u00f5"+
+		"\u00f6\7p\2\2\u00f6\u00f7\7q\2\2\u00f7\u00f8\7v\2\2\u00f8\u00f9\7\"\2"+
+		"\2\u00f9\u00fa\7k\2\2\u00fa\u0102\7p\2\2\u00fb\u00fc\7P\2\2\u00fc\u00fd"+
+		"\7Q\2\2\u00fd\u00fe\7V\2\2\u00fe\u00ff\7\"\2\2\u00ff\u0100\7K\2\2\u0100"+
+		"\u0102\7P\2\2\u0101\u00f5\3\2\2\2\u0101\u00fb\3\2\2\2\u0102D\3\2\2\2\u0103"+
+		"\u0104\7g\2\2\u0104\u0105\7z\2\2\u0105\u0106\7k\2\2\u0106\u0107\7u\2\2"+
+		"\u0107\u0108\7v\2\2\u0108\u0110\7u\2\2\u0109\u010a\7G\2\2\u010a\u010b"+
+		"\7Z\2\2\u010b\u010c\7K\2\2\u010c\u010d\7U\2\2\u010d\u010e\7V\2\2\u010e"+
+		"\u0110\7U\2\2\u010f\u0103\3\2\2\2\u010f\u0109\3\2\2\2\u0110F\3\2\2\2\u0111"+
+		"\u0114\5c\62\2\u0112\u0115\5/\30\2\u0113\u0115\5-\27\2\u0114\u0112\3\2"+
+		"\2\2\u0114\u0113\3\2\2\2\u0114\u0115\3\2\2\2\u0115\u0117\3\2\2\2\u0116"+
+		"\u0118\5]/\2\u0117\u0116\3\2\2\2\u0118\u0119\3\2\2\2\u0119\u0117\3\2\2"+
+		"\2\u0119\u011a\3\2\2\2\u011aH\3\2\2\2\u011b\u011d\5-\27\2\u011c\u011b"+
+		"\3\2\2\2\u011c\u011d\3\2\2\2\u011d\u011e\3\2\2\2\u011e\u012a\5Y-\2\u011f"+
+		"\u0121\5-\27\2\u0120\u011f\3\2\2\2\u0120\u0121\3\2\2\2\u0121\u0122\3\2"+
+		"\2\2\u0122\u0126\5[.\2\u0123\u0125\5]/\2\u0124\u0123\3\2\2\2\u0125\u0128"+
+		"\3\2\2\2\u0126\u0124\3\2\2\2\u0126\u0127\3\2\2\2\u0127\u012a\3\2\2\2\u0128"+
+		"\u0126\3\2\2\2\u0129\u011c\3\2\2\2\u0129\u0120\3\2\2\2\u012aJ\3\2\2\2"+
+		"\u012b\u012c\5I%\2\u012c\u0130\5\t\5\2\u012d\u012f\5]/\2\u012e\u012d\3"+
+		"\2\2\2\u012f\u0132\3\2\2\2\u0130\u012e\3\2\2\2\u0130\u0131\3\2\2\2\u0131"+
+		"\u0134\3\2\2\2\u0132\u0130\3\2\2\2\u0133\u0135\5G$\2\u0134\u0133\3\2\2"+
+		"\2\u0134\u0135\3\2\2\2\u0135\u0137\3\2\2\2\u0136\u0138\5a\61\2\u0137\u0136"+
+		"\3\2\2\2\u0137\u0138\3\2\2\2\u0138\u0151\3\2\2\2\u0139\u013b\5\t\5\2\u013a"+
+		"\u013c\5]/\2\u013b\u013a\3\2\2\2\u013c\u013d\3\2\2\2\u013d\u013b\3\2\2"+
+		"\2\u013d\u013e\3\2\2\2\u013e\u0140\3\2\2\2\u013f\u0141\5G$\2\u0140\u013f"+
+		"\3\2\2\2\u0140\u0141\3\2\2\2\u0141\u0143\3\2\2\2\u0142\u0144\5a\61\2\u0143"+
+		"\u0142\3\2\2\2\u0143\u0144\3\2\2\2\u0144\u0151\3\2\2\2\u0145\u0146\5I"+
+		"%\2\u0146\u0148\5G$\2\u0147\u0149\5a\61\2\u0148\u0147\3\2\2\2\u0148\u0149"+
+		"\3\2\2\2\u0149\u0151\3\2\2\2\u014a\u014c\5I%\2\u014b\u014d\5G$\2\u014c"+
+		"\u014b\3\2\2\2\u014c\u014d\3\2\2\2\u014d\u014e\3\2\2\2\u014e\u014f\5a"+
+		"\61\2\u014f\u0151\3\2\2\2\u0150\u012b\3\2\2\2\u0150\u0139\3\2\2\2\u0150"+
+		"\u0145\3\2\2\2\u0150\u014a\3\2\2\2\u0151L\3\2\2\2\u0152\u0153\5I%\2\u0153"+
+		"\u0157\5\t\5\2\u0154\u0156\5]/\2\u0155\u0154\3\2\2\2\u0156\u0159\3\2\2"+
+		"\2\u0157\u0155\3\2\2\2\u0157\u0158\3\2\2\2\u0158\u015b\3\2\2\2\u0159\u0157"+
+		"\3\2\2\2\u015a\u015c\5G$\2\u015b\u015a\3\2\2\2\u015b\u015c\3\2\2\2\u015c"+
+		"\u015d\3\2\2\2\u015d\u015e\5e\63\2\u015e\u0174\3\2\2\2\u015f\u0161\5-"+
+		"\27\2\u0160\u015f\3\2\2\2\u0160\u0161\3\2\2\2\u0161\u0162\3\2\2\2\u0162"+
+		"\u0164\5\t\5\2\u0163\u0165\5]/\2\u0164\u0163\3\2\2\2\u0165\u0166\3\2\2"+
+		"\2\u0166\u0164\3\2\2\2\u0166\u0167\3\2\2\2\u0167\u0169\3\2\2\2\u0168\u016a"+
+		"\5G$\2\u0169\u0168\3\2\2\2\u0169\u016a\3\2\2\2\u016a\u016b\3\2\2\2\u016b"+
+		"\u016c\5e\63\2\u016c\u0174\3\2\2\2\u016d\u016f\5I%\2\u016e\u0170\5G$\2"+
+		"\u016f\u016e\3\2\2\2\u016f\u0170\3\2\2\2\u0170\u0171\3\2\2\2\u0171\u0172"+
+		"\5e\63\2\u0172\u0174\3\2\2\2\u0173\u0152\3\2\2\2\u0173\u0160\3\2\2\2\u0173"+
+		"\u016d\3\2\2\2\u0174N\3\2\2\2\u0175\u0176\5I%\2\u0176\u0177\5g\64\2\u0177"+
+		"P\3\2\2\2\u0178\u017c\t\2\2\2\u0179\u017b\t\3\2\2\u017a\u0179\3\2\2\2"+
+		"\u017b\u017e\3\2\2\2\u017c\u017a\3\2\2\2\u017c\u017d\3\2\2\2\u017dR\3"+
+		"\2\2\2\u017e\u017c\3\2\2\2\u017f\u0183\5\3\2\2\u0180\u0182\5_\60\2\u0181"+
+		"\u0180\3\2\2\2\u0182\u0185\3\2\2\2\u0183\u0181\3\2\2\2\u0183\u0184\3\2"+
+		"\2\2\u0184\u0186\3\2\2\2\u0185\u0183\3\2\2\2\u0186\u0187\5\3\2\2\u0187"+
+		"\u0192\3\2\2\2\u0188\u018c\5\5\3\2\u0189\u018b\5_\60\2\u018a\u0189\3\2"+
+		"\2\2\u018b\u018e\3\2\2\2\u018c\u018a\3\2\2\2\u018c\u018d\3\2\2\2\u018d"+
+		"\u018f\3\2\2\2\u018e\u018c\3\2\2\2\u018f\u0190\5\5\3\2\u0190\u0192\3\2"+
+		"\2\2\u0191\u017f\3\2\2\2\u0191\u0188\3\2\2\2\u0192T\3\2\2\2\u0193\u0194"+
+		"\7\61\2\2\u0194\u0195\7\61\2\2\u0195\u0197\3\2\2\2\u0196\u0198\13\2\2"+
+		"\2\u0197\u0196\3\2\2\2\u0198\u0199\3\2\2\2\u0199\u019a\3\2\2\2\u0199\u0197"+
+		"\3\2\2\2\u019a\u019d\3\2\2\2\u019b\u019e\5i\65\2\u019c\u019e\7\2\2\3\u019d"+
+		"\u019b\3\2\2\2\u019d\u019c\3\2\2\2\u019e\u019f\3\2\2\2\u019f\u01a0\b+"+
+		"\2\2\u01a0V\3\2\2\2\u01a1\u01a3\t\4\2\2\u01a2\u01a1\3\2\2\2\u01a3\u01a4"+
+		"\3\2\2\2\u01a4\u01a2\3\2\2\2\u01a4\u01a5\3\2\2\2\u01a5\u01a6\3\2\2\2\u01a6"+
+		"\u01a7\b,\2\2\u01a7X\3\2\2\2\u01a8\u01a9\7\62\2\2\u01a9Z\3\2\2\2\u01aa"+
+		"\u01ab\4\63;\2\u01ab\\\3\2\2\2\u01ac\u01ad\4\62;\2\u01ad^\3\2\2\2\u01ae"+
+		"\u01af\n\5\2\2\u01af`\3\2\2\2\u01b0\u01b1\t\6\2\2\u01b1b\3\2\2\2\u01b2"+
+		"\u01b3\t\7\2\2\u01b3d\3\2\2\2\u01b4\u01b5\t\b\2\2\u01b5f\3\2\2\2\u01b6"+
+		"\u01b7\t\t\2\2\u01b7h\3\2\2\2\u01b8\u01b9\7\f\2\2\u01b9j\3\2\2\2,\2{\u0083"+
+		"\u008b\u0095\u00a1\u00bb\u00c5\u00cf\u00d9\u00f3\u0101\u010f\u0114\u0119"+
+		"\u011c\u0120\u0126\u0129\u0130\u0134\u0137\u013d\u0140\u0143\u0148\u014c"+
+		"\u0150\u0157\u015b\u0160\u0166\u0169\u016f\u0173\u017c\u0183\u018c\u0191"+
+		"\u0199\u019d\u01a4\3\b\2\2";
 	public static final ATN _ATN =
 		new ATNDeserializer().deserialize(_serializedATN.toCharArray());
 	static {

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/8340c0e2/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarListener.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarListener.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarListener.java
index 8755e2a..bf3c272 100644
--- a/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarListener.java
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarListener.java
@@ -2,7 +2,7 @@
 package org.apache.metron.common.stellar.generated;
 
 //CHECKSTYLE:OFF
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -99,6 +99,30 @@ public interface StellarListener extends ParseTreeListener {
 	 */
 	void exitComparisonExpression(StellarParser.ComparisonExpressionContext ctx);
 	/**
+	 * Enter a parse tree produced by the {@code LogicalExpression}
+	 * labeled alternative in {@link StellarParser#transformation_expr}.
+	 * @param ctx the parse tree
+	 */
+	void enterLogicalExpression(StellarParser.LogicalExpressionContext ctx);
+	/**
+	 * Exit a parse tree produced by the {@code LogicalExpression}
+	 * labeled alternative in {@link StellarParser#transformation_expr}.
+	 * @param ctx the parse tree
+	 */
+	void exitLogicalExpression(StellarParser.LogicalExpressionContext ctx);
+	/**
+	 * Enter a parse tree produced by the {@code InExpression}
+	 * labeled alternative in {@link StellarParser#transformation_expr}.
+	 * @param ctx the parse tree
+	 */
+	void enterInExpression(StellarParser.InExpressionContext ctx);
+	/**
+	 * Exit a parse tree produced by the {@code InExpression}
+	 * labeled alternative in {@link StellarParser#transformation_expr}.
+	 * @param ctx the parse tree
+	 */
+	void exitInExpression(StellarParser.InExpressionContext ctx);
+	/**
 	 * Enter a parse tree produced by the {@code TernaryFuncWithoutIf}
 	 * labeled alternative in {@link StellarParser#conditional_expr}.
 	 * @param ctx the parse tree
@@ -123,89 +147,111 @@ public interface StellarListener extends ParseTreeListener {
 	 */
 	void exitTernaryFuncWithIf(StellarParser.TernaryFuncWithIfContext ctx);
 	/**
-	 * Enter a parse tree produced by the {@code NotFunc}
-	 * labeled alternative in {@link StellarParser#comparison_expr}.
+	 * Enter a parse tree produced by the {@code LogicalExpressionAnd}
+	 * labeled alternative in {@link StellarParser#logical_expr}.
 	 * @param ctx the parse tree
 	 */
-	void enterNotFunc(StellarParser.NotFuncContext ctx);
+	void enterLogicalExpressionAnd(StellarParser.LogicalExpressionAndContext ctx);
 	/**
-	 * Exit a parse tree produced by the {@code NotFunc}
-	 * labeled alternative in {@link StellarParser#comparison_expr}.
+	 * Exit a parse tree produced by the {@code LogicalExpressionAnd}
+	 * labeled alternative in {@link StellarParser#logical_expr}.
 	 * @param ctx the parse tree
 	 */
-	void exitNotFunc(StellarParser.NotFuncContext ctx);
+	void exitLogicalExpressionAnd(StellarParser.LogicalExpressionAndContext ctx);
 	/**
-	 * Enter a parse tree produced by the {@code ComparisonExpressionParens}
-	 * labeled alternative in {@link StellarParser#comparison_expr}.
+	 * Enter a parse tree produced by the {@code LogicalExpressionOr}
+	 * labeled alternative in {@link StellarParser#logical_expr}.
 	 * @param ctx the parse tree
 	 */
-	void enterComparisonExpressionParens(StellarParser.ComparisonExpressionParensContext ctx);
+	void enterLogicalExpressionOr(StellarParser.LogicalExpressionOrContext ctx);
 	/**
-	 * Exit a parse tree produced by the {@code ComparisonExpressionParens}
-	 * labeled alternative in {@link StellarParser#comparison_expr}.
+	 * Exit a parse tree produced by the {@code LogicalExpressionOr}
+	 * labeled alternative in {@link StellarParser#logical_expr}.
 	 * @param ctx the parse tree
 	 */
-	void exitComparisonExpressionParens(StellarParser.ComparisonExpressionParensContext ctx);
+	void exitLogicalExpressionOr(StellarParser.LogicalExpressionOrContext ctx);
 	/**
-	 * Enter a parse tree produced by the {@code InExpression}
-	 * labeled alternative in {@link StellarParser#comparison_expr}.
+	 * Enter a parse tree produced by the {@code BoleanExpression}
+	 * labeled alternative in {@link StellarParser#logical_expr}.
 	 * @param ctx the parse tree
 	 */
-	void enterInExpression(StellarParser.InExpressionContext ctx);
+	void enterBoleanExpression(StellarParser.BoleanExpressionContext ctx);
 	/**
-	 * Exit a parse tree produced by the {@code InExpression}
-	 * labeled alternative in {@link StellarParser#comparison_expr}.
+	 * Exit a parse tree produced by the {@code BoleanExpression}
+	 * labeled alternative in {@link StellarParser#logical_expr}.
 	 * @param ctx the parse tree
 	 */
-	void exitInExpression(StellarParser.InExpressionContext ctx);
+	void exitBoleanExpression(StellarParser.BoleanExpressionContext ctx);
 	/**
-	 * Enter a parse tree produced by the {@code ComparisonExpressionWithOperator}
-	 * labeled alternative in {@link StellarParser#comparison_expr}.
+	 * Enter a parse tree produced by {@link StellarParser#b_expr}.
 	 * @param ctx the parse tree
 	 */
-	void enterComparisonExpressionWithOperator(StellarParser.ComparisonExpressionWithOperatorContext ctx);
+	void enterB_expr(StellarParser.B_exprContext ctx);
 	/**
-	 * Exit a parse tree produced by the {@code ComparisonExpressionWithOperator}
-	 * labeled alternative in {@link StellarParser#comparison_expr}.
+	 * Exit a parse tree produced by {@link StellarParser#b_expr}.
 	 * @param ctx the parse tree
 	 */
-	void exitComparisonExpressionWithOperator(StellarParser.ComparisonExpressionWithOperatorContext ctx);
+	void exitB_expr(StellarParser.B_exprContext ctx);
 	/**
-	 * Enter a parse tree produced by the {@code LogicalExpressionAnd}
+	 * Enter a parse tree produced by the {@code InExpressionStatement}
+	 * labeled alternative in {@link StellarParser#in_expr}.
+	 * @param ctx the parse tree
+	 */
+	void enterInExpressionStatement(StellarParser.InExpressionStatementContext ctx);
+	/**
+	 * Exit a parse tree produced by the {@code InExpressionStatement}
+	 * labeled alternative in {@link StellarParser#in_expr}.
+	 * @param ctx the parse tree
+	 */
+	void exitInExpressionStatement(StellarParser.InExpressionStatementContext ctx);
+	/**
+	 * Enter a parse tree produced by the {@code NInExpressionStatement}
+	 * labeled alternative in {@link StellarParser#in_expr}.
+	 * @param ctx the parse tree
+	 */
+	void enterNInExpressionStatement(StellarParser.NInExpressionStatementContext ctx);
+	/**
+	 * Exit a parse tree produced by the {@code NInExpressionStatement}
+	 * labeled alternative in {@link StellarParser#in_expr}.
+	 * @param ctx the parse tree
+	 */
+	void exitNInExpressionStatement(StellarParser.NInExpressionStatementContext ctx);
+	/**
+	 * Enter a parse tree produced by the {@code NotFunc}
 	 * labeled alternative in {@link StellarParser#comparison_expr}.
 	 * @param ctx the parse tree
 	 */
-	void enterLogicalExpressionAnd(StellarParser.LogicalExpressionAndContext ctx);
+	void enterNotFunc(StellarParser.NotFuncContext ctx);
 	/**
-	 * Exit a parse tree produced by the {@code LogicalExpressionAnd}
+	 * Exit a parse tree produced by the {@code NotFunc}
 	 * labeled alternative in {@link StellarParser#comparison_expr}.
 	 * @param ctx the parse tree
 	 */
-	void exitLogicalExpressionAnd(StellarParser.LogicalExpressionAndContext ctx);
+	void exitNotFunc(StellarParser.NotFuncContext ctx);
 	/**
-	 * Enter a parse tree produced by the {@code NInExpression}
+	 * Enter a parse tree produced by the {@code ComparisonExpressionParens}
 	 * labeled alternative in {@link StellarParser#comparison_expr}.
 	 * @param ctx the parse tree
 	 */
-	void enterNInExpression(StellarParser.NInExpressionContext ctx);
+	void enterComparisonExpressionParens(StellarParser.ComparisonExpressionParensContext ctx);
 	/**
-	 * Exit a parse tree produced by the {@code NInExpression}
+	 * Exit a parse tree produced by the {@code ComparisonExpressionParens}
 	 * labeled alternative in {@link StellarParser#comparison_expr}.
 	 * @param ctx the parse tree
 	 */
-	void exitNInExpression(StellarParser.NInExpressionContext ctx);
+	void exitComparisonExpressionParens(StellarParser.ComparisonExpressionParensContext ctx);
 	/**
-	 * Enter a parse tree produced by the {@code LogicalExpressionOr}
+	 * Enter a parse tree produced by the {@code ComparisonExpressionWithOperator}
 	 * labeled alternative in {@link StellarParser#comparison_expr}.
 	 * @param ctx the parse tree
 	 */
-	void enterLogicalExpressionOr(StellarParser.LogicalExpressionOrContext ctx);
+	void enterComparisonExpressionWithOperator(StellarParser.ComparisonExpressionWithOperatorContext ctx);
 	/**
-	 * Exit a parse tree produced by the {@code LogicalExpressionOr}
+	 * Exit a parse tree produced by the {@code ComparisonExpressionWithOperator}
 	 * labeled alternative in {@link StellarParser#comparison_expr}.
 	 * @param ctx the parse tree
 	 */
-	void exitLogicalExpressionOr(StellarParser.LogicalExpressionOrContext ctx);
+	void exitComparisonExpressionWithOperator(StellarParser.ComparisonExpressionWithOperatorContext ctx);
 	/**
 	 * Enter a parse tree produced by the {@code operand}
 	 * labeled alternative in {@link StellarParser#comparison_expr}.
@@ -241,30 +287,6 @@ public interface StellarListener extends ParseTreeListener {
 	 */
 	void exitComparisonOp(StellarParser.ComparisonOpContext ctx);
 	/**
-	 * Enter a parse tree produced by the {@code ArithOp_plus}
-	 * labeled alternative in {@link StellarParser#arith_operator_addition}.
-	 * @param ctx the parse tree
-	 */
-	void enterArithOp_plus(StellarParser.ArithOp_plusContext ctx);
-	/**
-	 * Exit a parse tree produced by the {@code ArithOp_plus}
-	 * labeled alternative in {@link StellarParser#arith_operator_addition}.
-	 * @param ctx the parse tree
-	 */
-	void exitArithOp_plus(StellarParser.ArithOp_plusContext ctx);
-	/**
-	 * Enter a parse tree produced by the {@code ArithOp_mul}
-	 * labeled alternative in {@link StellarParser#arith_operator_mul}.
-	 * @param ctx the parse tree
-	 */
-	void enterArithOp_mul(StellarParser.ArithOp_mulContext ctx);
-	/**
-	 * Exit a parse tree produced by the {@code ArithOp_mul}
-	 * labeled alternative in {@link StellarParser#arith_operator_mul}.
-	 * @param ctx the parse tree
-	 */
-	void exitArithOp_mul(StellarParser.ArithOp_mulContext ctx);
-	/**
 	 * Enter a parse tree produced by {@link StellarParser#func_args}.
 	 * @param ctx the parse tree
 	 */


[13/17] incubator-metron git commit: METRON-682: Unify and Improve the Flat File Loader closes apache/incubator-metron#432

Posted by ce...@apache.org.
METRON-682: Unify and Improve the Flat File Loader closes apache/incubator-metron#432


Project: http://git-wip-us.apache.org/repos/asf/incubator-metron/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-metron/commit/1be4fcb0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-metron/tree/1be4fcb0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-metron/diff/1be4fcb0

Branch: refs/heads/Metron_0.3.1
Commit: 1be4fcb0243453863b6aefe0213fe9f0afed5718
Parents: a11e85c
Author: cstella <ce...@gmail.com>
Authored: Mon Feb 6 11:04:32 2017 -0500
Committer: cstella <ce...@gmail.com>
Committed: Mon Feb 6 11:04:32 2017 -0500

----------------------------------------------------------------------
 metron-analytics/metron-statistics/README.md    |   2 +-
 .../docker/rpm-docker/SPECS/metron.spec         |   1 -
 .../metron/common/utils/cli/OptionHandler.java  |  31 ++
 .../metron-data-management/README.md            |  22 +-
 .../dataloads/bulk/ThreatIntelBulkLoader.java   | 260 --------------
 .../dataloads/extractor/ExtractorHandler.java   |  10 +-
 .../extractor/inputformat/Formats.java          |  50 +--
 .../inputformat/InputFormatHandler.java         |   7 +-
 .../extractor/inputformat/WholeFileFormat.java  | 123 +++----
 .../nonbulk/flatfile/ExtractorState.java        |  16 +-
 .../dataloads/nonbulk/flatfile/LoadOptions.java | 261 ++++++++++++++
 .../SimpleEnrichmentFlatFileLoader.java         | 290 +--------------
 .../flatfile/importer/ImportStrategy.java       |  47 +++
 .../nonbulk/flatfile/importer/Importer.java     |  34 ++
 .../flatfile/importer/LocalImporter.java        | 177 ++++++++++
 .../flatfile/importer/MapReduceImporter.java    |  75 ++++
 .../nonbulk/flatfile/location/FileLocation.java |  57 +++
 .../nonbulk/flatfile/location/HDFSLocation.java |  75 ++++
 .../nonbulk/flatfile/location/Location.java     |  99 ++++++
 .../flatfile/location/LocationStrategy.java     |  67 ++++
 .../nonbulk/flatfile/location/RawLocation.java  |  57 +++
 .../nonbulk/flatfile/location/URLLocation.java  |  63 ++++
 .../src/main/scripts/flatfile_loader.sh         |  22 +-
 .../src/main/scripts/threatintel_bulk_load.sh   |  41 ---
 .../hbase/mr/BulkLoadMapperIntegrationTest.java | 140 --------
 .../LeastRecentlyUsedPrunerIntegrationTest.java |  35 +-
 ...EnrichmentFlatFileLoaderIntegrationTest.java | 349 +++++++++++++++++++
 .../SimpleEnrichmentFlatFileLoaderTest.java     | 164 ---------
 .../nonbulk/taxii/TaxiiIntegrationTest.java     |  13 +-
 .../integration/IndexingIntegrationTest.java    |   4 +-
 30 files changed, 1567 insertions(+), 1025 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-analytics/metron-statistics/README.md
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-statistics/README.md b/metron-analytics/metron-statistics/README.md
index 4d78839..cfd44f2 100644
--- a/metron-analytics/metron-statistics/README.md
+++ b/metron-analytics/metron-statistics/README.md
@@ -45,7 +45,7 @@ functions can be used from everywhere where Stellar is used.
 * Input:
   * number - The number to take the absolute value of
 * Returns: The absolute value of the number passed in.
-*
+
 #### `BIN`
 * Description: Computes the bin that the value is in given a set of bounds.
 * Input:

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-deployment/packaging/docker/rpm-docker/SPECS/metron.spec
----------------------------------------------------------------------
diff --git a/metron-deployment/packaging/docker/rpm-docker/SPECS/metron.spec b/metron-deployment/packaging/docker/rpm-docker/SPECS/metron.spec
index 5c5881c..9466b68 100644
--- a/metron-deployment/packaging/docker/rpm-docker/SPECS/metron.spec
+++ b/metron-deployment/packaging/docker/rpm-docker/SPECS/metron.spec
@@ -181,7 +181,6 @@ This package installs the Metron Parser files
 %{metron_home}/bin/flatfile_loader.sh
 %{metron_home}/bin/prune_elasticsearch_indices.sh
 %{metron_home}/bin/prune_hdfs_files.sh
-%{metron_home}/bin/threatintel_bulk_load.sh
 %{metron_home}/bin/threatintel_bulk_prune.sh
 %{metron_home}/bin/threatintel_taxii_load.sh
 %attr(0644,root,root) %{metron_home}/lib/metron-data-management-%{full_version}.jar

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/cli/OptionHandler.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/cli/OptionHandler.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/cli/OptionHandler.java
new file mode 100644
index 0000000..85e7520
--- /dev/null
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/utils/cli/OptionHandler.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.utils.cli;
+
+import com.google.common.base.Function;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.Option;
+
+import java.util.Optional;
+
+public abstract class OptionHandler<OPT_T extends Enum<OPT_T>> implements Function<String, Option>
+{
+  public Optional<Object> getValue(OPT_T option, CommandLine cli) {
+    return Optional.empty();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/README.md
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/README.md b/metron-platform/metron-data-management/README.md
index 26dd472..eaafda4 100644
--- a/metron-platform/metron-data-management/README.md
+++ b/metron-platform/metron-data-management/README.md
@@ -206,32 +206,16 @@ The parameters for the utility are as follows:
 | -n         | --enrichment_config       | No           | The JSON document describing the enrichments to configure.  Unlike other loaders, this is run first if specified.                                  |
 
 
-### Bulk Load from HDFS
-
-The shell script `$METRON_HOME/bin/threatintel_bulk_load.sh` will kick off a MR job to load data staged in HDFS into an HBase table.  Note: despite what
-the naming may suggest, this utility works for enrichment as well as threat intel due to the underlying infrastructure being the same.
-
-The parameters for the utility are as follows:
-
-| Short Code | Long Code           | Is Required? | Description                                                                                                       |
-|------------|---------------------|--------------|-------------------------------------------------------------------------------------------------------------------|
-| -h         |                     | No           | Generate the help screen/set of options                                                                           |
-| -e         | --extractor_config  | Yes          | JSON Document describing the extractor for this input data source                                                 |
-| -t         | --table             | Yes          | The HBase table to import into                                                                                    |
-| -f         | --column_family     | Yes          | The HBase table column family to import into                                                                      |
-| -i         | --input             | Yes          | The input data location on HDFS                                                                                   |
-| -n         | --enrichment_config | No           | The JSON document describing the enrichments to configure.  Unlike other loaders, this is run first if specified. |
-or threat intel.
 
 ### Flatfile Loader
 
-The shell script `$METRON_HOME/bin/flatfile_loader.sh` will read data from local disk and load the enrichment or threat intel data into an HBase table.  
+The shell script `$METRON_HOME/bin/flatfile_loader.sh` will read data from local disk, HDFS or URLs and load the enrichment or threat intel data into an HBase table.  
 Note: This utility works for enrichment as well as threat intel due to the underlying infrastructure being the same.
 
 One special thing to note here is that there is a special configuration
 parameter to the Extractor config that is only considered during this
 loader:
-* inputFormatHandler : This specifies how to consider the data.  The two implementations are `BY_LINE` and `org.apache.metron.dataloads.extractor.inputformat.WholeFileFormat`.
+* inputFormat : This specifies how to consider the data.  The two implementations are `BY_LINE` and `WHOLE_FILE`.
 
 The default is `BY_LINE`, which makes sense for a list of CSVs where
 each line indicates a unit of information which can be imported.
@@ -243,7 +227,9 @@ The parameters for the utility are as follows:
 | Short Code | Long Code           | Is Required? | Description                                                                                                                                                                         |   |
 |------------|---------------------|--------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---|
 | -h         |                     | No           | Generate the help screen/set of options                                                                                                                                             |   |
+| -q         | --quiet             | No           | Do not update progress
 | -e         | --extractor_config  | Yes          | JSON Document describing the extractor for this input data source                                                                                                                   |   |
+| -m         | --import_mode       | No           | The Import mode to use: LOCAL, MR.  Default: LOCAL                                                                                                                  |   |
 | -t         | --hbase_table       | Yes          | The HBase table to import into                                                                                                                                                      |   |
 | -c         | --hbase_cf          | Yes          | The HBase table column family to import into                                                                                                                                        |   |
 | -i         | --input             | Yes          | The input data location on local disk.  If this is a file, then that file will be loaded.  If this is a directory, then the files will be loaded recursively under that directory. |   |

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ThreatIntelBulkLoader.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ThreatIntelBulkLoader.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ThreatIntelBulkLoader.java
deleted file mode 100644
index 5ba0a91..0000000
--- a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/bulk/ThreatIntelBulkLoader.java
+++ /dev/null
@@ -1,260 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.dataloads.bulk;
-
-import com.google.common.base.Function;
-import com.google.common.base.Joiner;
-import com.google.common.io.Files;
-import org.apache.commons.cli.*;
-import org.apache.commons.cli.ParseException;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
-import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.util.GenericOptionsParser;
-import org.apache.metron.dataloads.extractor.ExtractorHandler;
-import org.apache.metron.dataloads.hbase.mr.BulkLoadMapper;
-import org.apache.metron.common.configuration.enrichment.SensorEnrichmentUpdateConfig;
-import org.apache.metron.enrichment.converter.HbaseConverter;
-import org.apache.metron.enrichment.converter.EnrichmentConverter;
-import org.apache.metron.common.utils.JSONUtils;
-
-import javax.annotation.Nullable;
-import java.io.File;
-import java.io.IOException;
-import java.nio.charset.Charset;
-import java.text.*;
-import java.util.Date;
-
-public class ThreatIntelBulkLoader  {
-  private static abstract class OptionHandler implements Function<String, Option> {}
-  public enum BulkLoadOptions {
-    HELP("h", new OptionHandler() {
-
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        return new Option(s, "help", false, "Generate Help screen");
-      }
-    })
-    ,TABLE("t", new OptionHandler() {
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        Option o = new Option(s, "table", true, "HBase table to import data into");
-        o.setRequired(true);
-        o.setArgName("HBASE_TABLE");
-        return o;
-      }
-    })
-    ,COLUMN_FAMILY("f", new OptionHandler() {
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        Option o = new Option(s, "column_family", true, "Column family of the HBase table to import into");
-        o.setRequired(true);
-        o.setArgName("CF_NAME");
-        return o;
-      }
-    })
-    ,EXTRACTOR_CONFIG("e", new OptionHandler() {
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        Option o = new Option(s, "extractor_config", true, "JSON Document describing the extractor for this input data source");
-        o.setArgName("JSON_FILE");
-        o.setRequired(true);
-        return o;
-      }
-    })
-    ,INPUT_DATA("i", new OptionHandler() {
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        Option o = new Option(s, "input", true, "Input directory in HDFS for the data to import into HBase");
-        o.setArgName("DIR");
-        o.setRequired(true);
-        return o;
-      }
-    })
-    ,AS_OF_TIME("a", new OptionHandler() {
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        Option o = new Option(s, "as_of", true, "The last read timestamp to mark the records with (omit for time of execution)");
-        o.setArgName("datetime");
-        o.setRequired(false);
-        return o;
-      }
-    })
-    ,AS_OF_TIME_FORMAT("z", new OptionHandler() {
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        Option o = new Option(s, "as_of_format", true, "The format of the as_of time (only used in conjunction with the as_of option)");
-        o.setArgName("format");
-        o.setRequired(false);
-        return o;
-      }
-    })
-    ,CONVERTER("c", new OptionHandler() {
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        Option o = new Option(s, "converter", true, "The HBase converter class to use (Default is threat intel)");
-        o.setArgName("class");
-        o.setRequired(false);
-        return o;
-      }
-    })
-    ,ENRICHMENT_CONFIG("n", new OptionHandler() {
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        Option o = new Option(s, "enrichment_config", true
-                , "JSON Document describing the enrichment configuration details." +
-                "  This is used to associate an enrichment type with a field type in zookeeper."
-        );
-        o.setArgName("JSON_FILE");
-        o.setRequired(false);
-        return o;
-      }
-    })
-    ;
-    Option option;
-    String shortCode;
-    BulkLoadOptions(String shortCode, OptionHandler optionHandler) {
-      this.shortCode = shortCode;
-      this.option = optionHandler.apply(shortCode);
-    }
-
-    public boolean has(CommandLine cli) {
-      return cli.hasOption(shortCode);
-    }
-
-    public String get(CommandLine cli) {
-      return cli.getOptionValue(shortCode);
-    }
-
-    public static CommandLine parse(CommandLineParser parser, String[] args) {
-      try {
-        CommandLine cli = parser.parse(getOptions(), args);
-        if(ThreatIntelBulkLoader.BulkLoadOptions.HELP.has(cli)) {
-          printHelp();
-          System.exit(0);
-        }
-        return cli;
-      } catch (ParseException e) {
-        System.err.println("Unable to parse args: " + Joiner.on(' ').join(args));
-        e.printStackTrace(System.err);
-        printHelp();
-        System.exit(-1);
-        return null;
-      }
-    }
-
-    public static void printHelp() {
-      HelpFormatter formatter = new HelpFormatter();
-      formatter.printHelp( "ThreatIntelBulkLoader", getOptions());
-    }
-
-    public static Options getOptions() {
-      Options ret = new Options();
-      for(BulkLoadOptions o : BulkLoadOptions.values()) {
-        ret.addOption(o.option);
-      }
-      return ret;
-    }
-  }
-
-  private static long getTimestamp(CommandLine cli) throws java.text.ParseException {
-    if(BulkLoadOptions.AS_OF_TIME.has(cli)) {
-      if(!BulkLoadOptions.AS_OF_TIME_FORMAT.has(cli)) {
-        throw new IllegalStateException("Unable to proceed: Specified as_of_time without an associated format.");
-      }
-      else {
-        DateFormat format = new SimpleDateFormat(BulkLoadOptions.AS_OF_TIME_FORMAT.get(cli));
-        Date d = format.parse(BulkLoadOptions.AS_OF_TIME.get(cli));
-        return d.getTime();
-      }
-    }
-    else {
-      return System.currentTimeMillis();
-    }
-  }
-  private static String readExtractorConfig(File configFile) throws IOException {
-    return Joiner.on("\n").join(Files.readLines(configFile, Charset.defaultCharset()));
-  }
-
-  public static Job createJob(Configuration conf, String input, String table, String cf, String extractorConfigContents, long ts, HbaseConverter converter) throws IOException {
-    Job job = new Job(conf);
-    job.setJobName("ThreatIntelBulkLoader: " + input + " => " +  table + ":" + cf);
-    System.out.println("Configuring " + job.getJobName());
-    job.setJarByClass(ThreatIntelBulkLoader.class);
-    job.setMapperClass(org.apache.metron.dataloads.hbase.mr.BulkLoadMapper.class);
-    job.setOutputFormatClass(TableOutputFormat.class);
-    job.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, table);
-    job.getConfiguration().set(BulkLoadMapper.COLUMN_FAMILY_KEY, cf);
-    job.getConfiguration().set(BulkLoadMapper.CONFIG_KEY, extractorConfigContents);
-    job.getConfiguration().set(BulkLoadMapper.LAST_SEEN_KEY, "" + ts);
-    job.getConfiguration().set(BulkLoadMapper.CONVERTER_KEY, converter.getClass().getName());
-    job.setOutputKeyClass(ImmutableBytesWritable.class);
-    job.setOutputValueClass(Put.class);
-    job.setNumReduceTasks(0);
-    ExtractorHandler handler = ExtractorHandler.load(extractorConfigContents);
-    handler.getInputFormatHandler().set(job, new Path(input), handler.getConfig());
-    return job;
-  }
-
-  public static void main(String... argv) throws Exception {
-    Configuration conf = HBaseConfiguration.create();
-    String[] otherArgs = new GenericOptionsParser(conf, argv).getRemainingArgs();
-
-    CommandLine cli = BulkLoadOptions.parse(new PosixParser(), otherArgs);
-    Long ts = getTimestamp(cli);
-    String input = BulkLoadOptions.INPUT_DATA.get(cli);
-    String table = BulkLoadOptions.TABLE.get(cli);
-    String cf = BulkLoadOptions.COLUMN_FAMILY.get(cli);
-    String extractorConfigContents = readExtractorConfig(new File(BulkLoadOptions.EXTRACTOR_CONFIG.get(cli)));
-    String converterClass = EnrichmentConverter.class.getName();
-    if(BulkLoadOptions.CONVERTER.has(cli)) {
-      converterClass = BulkLoadOptions.CONVERTER.get(cli);
-    }
-    SensorEnrichmentUpdateConfig sensorEnrichmentUpdateConfig = null;
-    if(BulkLoadOptions.ENRICHMENT_CONFIG.has(cli)) {
-      sensorEnrichmentUpdateConfig = JSONUtils.INSTANCE.load( new File(BulkLoadOptions.ENRICHMENT_CONFIG.get(cli))
-              , SensorEnrichmentUpdateConfig.class
-      );
-    }
-
-    HbaseConverter converter = (HbaseConverter) Class.forName(converterClass).getConstructor().newInstance();
-    Job job = createJob(conf, input, table, cf, extractorConfigContents, ts, converter);
-    System.out.println(conf);
-    boolean jobRet = job.waitForCompletion(true);
-    if(!jobRet) {
-      System.exit(1);
-    }
-    if(sensorEnrichmentUpdateConfig != null) {
-        sensorEnrichmentUpdateConfig.updateSensorConfigs();
-    }
-    System.exit(0);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/ExtractorHandler.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/ExtractorHandler.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/ExtractorHandler.java
index 89477d8..2e2f799 100644
--- a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/ExtractorHandler.java
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/ExtractorHandler.java
@@ -33,7 +33,7 @@ public class ExtractorHandler {
     final static ObjectMapper _mapper = new ObjectMapper();
     private Map<String, Object> config;
     private Extractor extractor;
-    private InputFormatHandler inputFormatHandler = Formats.BY_LINE;
+    private InputFormatHandler inputFormat = Formats.BY_LINE;
 
     public Map<String, Object> getConfig() {
         return config;
@@ -43,13 +43,13 @@ public class ExtractorHandler {
         this.config = config;
     }
 
-    public InputFormatHandler getInputFormatHandler() {
-        return inputFormatHandler;
+    public InputFormatHandler getInputFormat() {
+        return inputFormat;
     }
 
-    public void setInputFormatHandler(String handler) {
+    public void setInputFormat(String handler) {
         try {
-            this.inputFormatHandler= Formats.create(handler);
+            this.inputFormat= Formats.create(handler);
         } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) {
             throw new IllegalStateException("Unable to create an inputformathandler", e);
         }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/Formats.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/Formats.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/Formats.java
index b8be233..961e7d3 100644
--- a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/Formats.java
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/Formats.java
@@ -23,34 +23,34 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 
 import java.io.IOException;
 import java.lang.reflect.InvocationTargetException;
+import java.util.List;
 import java.util.Map;
+import java.util.stream.Collectors;
 
-public enum Formats implements InputFormatHandler{
-    BY_LINE(new InputFormatHandler() {
-        @Override
-        public void set(Job job, Path input, Map<String, Object> config) throws IOException {
+public enum Formats implements InputFormatHandler {
+  BY_LINE( (job, inputs, config) -> {
+      for(Path input : inputs) {
+        FileInputFormat.addInputPath(job, input);
+      }
+  }),
+  WHOLE_FILE( new WholeFileFormat());
+  InputFormatHandler _handler = null;
+  Formats(InputFormatHandler handler) {
+    this._handler = handler;
+  }
+  @Override
+  public void set(Job job, List<Path> path, Map<String, Object> config) throws IOException {
+    _handler.set(job, path, config);
+  }
 
-            FileInputFormat.addInputPath(job, input);
-        }
-    })
-    ;
-    InputFormatHandler _handler = null;
-    Formats(InputFormatHandler handler) {
-        this._handler = handler;
+  public static InputFormatHandler create(String handlerName) throws ClassNotFoundException, IllegalAccessException, InstantiationException, NoSuchMethodException, InvocationTargetException {
+    try {
+      InputFormatHandler ec = Formats.valueOf(handlerName)._handler;
+      return ec;
     }
-    @Override
-    public void set(Job job, Path path, Map<String, Object> config) throws IOException {
-        _handler.set(job, path, config);
-    }
-
-    public static InputFormatHandler create(String handlerName) throws ClassNotFoundException, IllegalAccessException, InstantiationException, NoSuchMethodException, InvocationTargetException {
-        try {
-            InputFormatHandler ec = Formats.valueOf(handlerName);
-            return ec;
-        }
-        catch(IllegalArgumentException iae) {
-            InputFormatHandler ex = (InputFormatHandler) Class.forName(handlerName).getConstructor().newInstance();
-            return ex;
-        }
+    catch(IllegalArgumentException iae) {
+      InputFormatHandler ex = (InputFormatHandler) Class.forName(handlerName).getConstructor().newInstance();
+      return ex;
     }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/InputFormatHandler.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/InputFormatHandler.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/InputFormatHandler.java
index 2287969..00e89c0 100644
--- a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/InputFormatHandler.java
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/InputFormatHandler.java
@@ -17,12 +17,17 @@
  */
 package org.apache.metron.dataloads.extractor.inputformat;
 
+import com.google.common.collect.ImmutableList;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.Job;
 
 import java.io.IOException;
+import java.util.List;
 import java.util.Map;
 
 public interface InputFormatHandler {
-    void set(Job job, Path input, Map<String, Object> config) throws IOException;
+  void set(Job job, List<Path> input, Map<String, Object> config) throws IOException;
+  default void set(Job job, Path input, Map<String, Object> config) throws IOException {
+    set(job, ImmutableList.of(input), config);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/WholeFileFormat.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/WholeFileFormat.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/WholeFileFormat.java
index e0a58ef..5dc8b53 100644
--- a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/WholeFileFormat.java
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/extractor/inputformat/WholeFileFormat.java
@@ -30,80 +30,83 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;
 
 import java.io.IOException;
+import java.util.List;
 import java.util.Map;
 
 public class WholeFileFormat implements InputFormatHandler {
 
-    public static class WholeFileRecordReader extends RecordReader<NullWritable, Text> {
-        private FileSplit fileSplit;
-        private Configuration conf;
-        private Text value = new Text();
-        private boolean processed = false;
+  public static class WholeFileRecordReader extends RecordReader<NullWritable, Text> {
+    private FileSplit fileSplit;
+    private Configuration conf;
+    private Text value = new Text();
+    private boolean processed = false;
 
-        @Override
-        public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
-            this.fileSplit = (FileSplit) split;
-            this.conf = context.getConfiguration();
-        }
-
-        @Override
-        public boolean nextKeyValue() throws IOException, InterruptedException {
-            if (!processed) {
-                byte[] contents = new byte[(int) fileSplit.getLength()];
-                Path file = fileSplit.getPath();
-                FileSystem fs = file.getFileSystem(conf);
-                FSDataInputStream in = null;
-                try {
-                    in = fs.open(file);
-                    IOUtils.readFully(in, contents, 0, contents.length);
-                    value.set(contents, 0, contents.length);
-                } finally {
-                    IOUtils.closeStream(in);
-                }
-                processed = true;
-                return true;
-            }
-            return false;
-        }
+    @Override
+    public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
+      this.fileSplit = (FileSplit) split;
+      this.conf = context.getConfiguration();
+    }
 
-        @Override
-        public NullWritable getCurrentKey() throws IOException, InterruptedException {
-            return NullWritable.get();
-        }
-        @Override
-        public Text getCurrentValue() throws IOException, InterruptedException{
-            return value;
+    @Override
+    public boolean nextKeyValue() throws IOException, InterruptedException {
+      if (!processed) {
+        byte[] contents = new byte[(int) fileSplit.getLength()];
+        Path file = fileSplit.getPath();
+        FileSystem fs = file.getFileSystem(conf);
+        FSDataInputStream in = null;
+        try {
+          in = fs.open(file);
+          IOUtils.readFully(in, contents, 0, contents.length);
+          value.set(contents, 0, contents.length);
+        } finally {
+          IOUtils.closeStream(in);
         }
+        processed = true;
+        return true;
+      }
+      return false;
+    }
 
-        @Override
-        public float getProgress() throws IOException {
-            return processed ? 1.0f : 0.0f;
-        }
+    @Override
+    public NullWritable getCurrentKey() throws IOException, InterruptedException {
+      return NullWritable.get();
+    }
+    @Override
+    public Text getCurrentValue() throws IOException, InterruptedException{
+      return value;
+    }
 
-        @Override
-        public void close() throws IOException{
-            //do nothing :)
-        }
+    @Override
+    public float getProgress() throws IOException {
+      return processed ? 1.0f : 0.0f;
     }
 
-    public static class WholeFileInputFormat extends FileInputFormat<NullWritable, Text> {
+    @Override
+    public void close() throws IOException{
+      //do nothing :)
+    }
+  }
 
-        @Override
-        protected boolean isSplitable(JobContext context, Path file) {
-            return false;
-        }
+  public static class WholeFileInputFormat extends FileInputFormat<NullWritable, Text> {
 
-        @Override
-        public RecordReader<NullWritable, Text> createRecordReader(
-                InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
-            WholeFileRecordReader reader = new WholeFileRecordReader();
-            reader.initialize(split, context);
-            return reader;
-        }
+    @Override
+    protected boolean isSplitable(JobContext context, Path file) {
+      return false;
     }
+
     @Override
-    public void set(Job job, Path input, Map<String, Object> config) throws IOException {
-        WholeFileInputFormat.setInputPaths(job, input);
-        job.setInputFormatClass(WholeFileInputFormat.class);
+    public RecordReader<NullWritable, Text> createRecordReader(
+            InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
+      WholeFileRecordReader reader = new WholeFileRecordReader();
+      reader.initialize(split, context);
+      return reader;
+    }
+  }
+  @Override
+  public void set(Job job, List<Path> inputs, Map<String, Object> config) throws IOException {
+    for(Path input : inputs) {
+      WholeFileInputFormat.addInputPath(job, input);
     }
+    job.setInputFormatClass(WholeFileInputFormat.class);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/ExtractorState.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/ExtractorState.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/ExtractorState.java
index e44eb27..168d251 100644
--- a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/ExtractorState.java
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/ExtractorState.java
@@ -17,19 +17,29 @@
  */
 package org.apache.metron.dataloads.nonbulk.flatfile;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hbase.client.HTableInterface;
 import org.apache.metron.dataloads.extractor.Extractor;
 import org.apache.metron.enrichment.converter.HbaseConverter;
 
+import java.io.IOException;
+
 public class ExtractorState {
   private HTableInterface table;
   private Extractor extractor;
   private HbaseConverter converter;
+  private FileSystem fs;
 
-  public ExtractorState(HTableInterface table, Extractor extractor, HbaseConverter converter) {
+  public ExtractorState(HTableInterface table, Extractor extractor, HbaseConverter converter, Configuration config) {
     this.table = table;
     this.extractor = extractor;
     this.converter = converter;
+    try {
+      this.fs = FileSystem.get(config);
+    } catch (IOException e) {
+      throw new IllegalStateException("Unable to retrieve hadoop file system: " + e.getMessage(), e);
+    }
   }
 
   public HTableInterface getTable() {
@@ -43,4 +53,8 @@ public class ExtractorState {
   public HbaseConverter getConverter() {
     return converter;
   }
+
+  public FileSystem getFileSystem() {
+    return fs;
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/LoadOptions.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/LoadOptions.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/LoadOptions.java
new file mode 100644
index 0000000..ddaf6a6
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/LoadOptions.java
@@ -0,0 +1,261 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.nonbulk.flatfile;
+
+import com.google.common.base.Joiner;
+import com.google.common.base.Splitter;
+import org.apache.commons.cli.*;
+import org.apache.commons.io.FileUtils;
+import org.apache.metron.common.utils.ConversionUtils;
+import org.apache.metron.common.utils.cli.OptionHandler;
+import org.apache.metron.dataloads.nonbulk.flatfile.importer.ImportStrategy;
+
+import javax.annotation.Nullable;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.Optional;
+
+public enum LoadOptions {
+  HELP("h", new OptionHandler<LoadOptions>() {
+
+    @Nullable
+    @Override
+    public Option apply(@Nullable String s) {
+      return new Option(s, "help", false, "Generate Help screen");
+    }
+  })
+  ,QUIET("q", new OptionHandler<LoadOptions>() {
+
+    @Nullable
+    @Override
+    public Option apply(@Nullable String s) {
+      return new Option(s, "quiet", false, "Do not update progress");
+    }
+
+    @Override
+    public Optional<Object> getValue(LoadOptions option, CommandLine cli) {
+      return Optional.of(option.has(cli));
+    }
+  })
+  , IMPORT_MODE("m", new OptionHandler<LoadOptions>() {
+    @Nullable
+    @Override
+    public Option apply(@Nullable String s) {
+      Option o = new Option(s, "import_mode", true
+                           , "The Import mode to use: " + Joiner.on(",").join(ImportStrategy.values())
+                           + ".  Default: " + ImportStrategy.LOCAL
+                           );
+      o.setArgName("MODE");
+      o.setRequired(false);
+      return o;
+    }
+
+    @Override
+    public Optional<Object> getValue(LoadOptions option, CommandLine cli) {
+      String mode = option.get(cli);
+      return Optional.of(ImportStrategy.getStrategy(mode).orElse(ImportStrategy.LOCAL));
+    }
+  })
+  ,HBASE_TABLE("t", new OptionHandler<LoadOptions>() {
+    @Nullable
+    @Override
+    public Option apply(@Nullable String s) {
+      Option o = new Option(s, "hbase_table", true, "HBase table to ingest the data into.");
+      o.setArgName("TABLE");
+      o.setRequired(true);
+      return o;
+    }
+
+    @Override
+    public Optional<Object> getValue(LoadOptions option, CommandLine cli) {
+      return Optional.ofNullable(option.get(cli).trim());
+    }
+  })
+  ,HBASE_CF("c", new OptionHandler<LoadOptions>() {
+    @Nullable
+    @Override
+    public Option apply(@Nullable String s) {
+      Option o = new Option(s, "hbase_cf", true, "HBase column family to ingest the data into.");
+      o.setArgName("CF");
+      o.setRequired(true);
+      return o;
+    }
+
+    @Override
+    public Optional<Object> getValue(LoadOptions option, CommandLine cli) {
+      return Optional.ofNullable(option.get(cli).trim());
+    }
+  })
+  ,EXTRACTOR_CONFIG("e", new OptionHandler<LoadOptions>() {
+    @Nullable
+    @Override
+    public Option apply(@Nullable String s) {
+      Option o = new Option(s, "extractor_config", true, "JSON Document describing the extractor for this input data source");
+      o.setArgName("JSON_FILE");
+      o.setRequired(true);
+      return o;
+    }
+
+    @Override
+    public Optional<Object> getValue(LoadOptions option, CommandLine cli) {
+      try {
+        return Optional.ofNullable(FileUtils.readFileToString(new File(option.get(cli).trim())));
+      } catch (IOException e) {
+        throw new IllegalStateException("Unable to retrieve extractor config from " + option.get(cli) + ": " + e.getMessage(), e);
+      }
+    }
+  })
+  ,ENRICHMENT_CONFIG("n", new OptionHandler<LoadOptions>() {
+    @Nullable
+    @Override
+    public Option apply(@Nullable String s) {
+      Option o = new Option(s, "enrichment_config", true
+              , "JSON Document describing the enrichment configuration details." +
+              "  This is used to associate an enrichment type with a field type in zookeeper."
+      );
+      o.setArgName("JSON_FILE");
+      o.setRequired(false);
+      return o;
+    }
+  })
+  ,LOG4J_PROPERTIES("l", new OptionHandler<LoadOptions>() {
+    @Nullable
+    @Override
+    public Option apply(@Nullable String s) {
+      Option o = new Option(s, "log4j", true, "The log4j properties file to load");
+      o.setArgName("FILE");
+      o.setRequired(false);
+      return o;
+    }
+  })
+  ,NUM_THREADS("p", new OptionHandler<LoadOptions>() {
+    @Nullable
+    @Override
+    public Option apply(@Nullable String s) {
+      Option o = new Option(s, "threads", true, "The number of threads to use when extracting data.  The default is the number of cores of your machine.");
+      o.setArgName("NUM_THREADS");
+      o.setRequired(false);
+      return o;
+    }
+
+    @Override
+    public Optional<Object> getValue(LoadOptions option, CommandLine cli) {
+      int numThreads = Runtime.getRuntime().availableProcessors();
+      if(option.has(cli)) {
+        numThreads = ConversionUtils.convert(option.get(cli), Integer.class);
+      }
+      return Optional.of(numThreads);
+    }
+  })
+  ,BATCH_SIZE("b", new OptionHandler<LoadOptions>() {
+    @Nullable
+    @Override
+    public Option apply(@Nullable String s) {
+      Option o = new Option(s, "batchSize", true, "The batch size to use for HBase puts");
+      o.setArgName("SIZE");
+      o.setRequired(false);
+      return o;
+    }
+
+    @Override
+    public Optional<Object> getValue(LoadOptions option, CommandLine cli) {
+      int batchSize = 128;
+      if(option.has(cli)) {
+        batchSize = ConversionUtils.convert(option.get(cli), Integer.class);
+      }
+      return Optional.of(batchSize);
+    }
+  })
+  ,INPUT("i", new OptionHandler<LoadOptions>() {
+    @Nullable
+    @Override
+    public Option apply(@Nullable String s) {
+      Option o = new Option(s, "input", true, "The CSV File to load");
+      o.setArgName("FILE");
+      o.setRequired(true);
+      return o;
+    }
+
+    @Override
+    public Optional<Object> getValue(LoadOptions option, CommandLine cli) {
+      List<String> inputs = new ArrayList<>();
+      for(String input : Splitter.on(",").split(Optional.ofNullable(option.get(cli)).orElse(""))) {
+        inputs.add(input.trim());
+      }
+      return Optional.of(inputs);
+    }
+  })
+  ;
+  Option option;
+  String shortCode;
+  OptionHandler<LoadOptions> handler;
+  LoadOptions(String shortCode, OptionHandler<LoadOptions> optionHandler) {
+    this.shortCode = shortCode;
+    this.handler = optionHandler;
+    this.option = optionHandler.apply(shortCode);
+  }
+
+  public boolean has(CommandLine cli) {
+    return cli.hasOption(shortCode);
+  }
+
+  public String get(CommandLine cli) {
+    return cli.getOptionValue(shortCode);
+  }
+
+  public static CommandLine parse(CommandLineParser parser, String[] args) {
+    try {
+      CommandLine cli = parser.parse(getOptions(), args);
+      if(HELP.has(cli)) {
+        printHelp();
+        System.exit(0);
+      }
+      return cli;
+    } catch (ParseException e) {
+      System.err.println("Unable to parse args: " + Joiner.on(' ').join(args));
+      e.printStackTrace(System.err);
+      printHelp();
+      System.exit(-1);
+      return null;
+    }
+  }
+
+  public static EnumMap<LoadOptions, Optional<Object> > createConfig(CommandLine cli) {
+    EnumMap<LoadOptions, Optional<Object> > ret = new EnumMap<>(LoadOptions.class);
+    for(LoadOptions option : values()) {
+      ret.put(option, option.handler.getValue(option, cli));
+    }
+    return ret;
+  }
+
+  public static void printHelp() {
+    HelpFormatter formatter = new HelpFormatter();
+    formatter.printHelp( "SimpleEnrichmentFlatFileLoader", getOptions());
+  }
+
+  public static Options getOptions() {
+    Options ret = new Options();
+    for(LoadOptions o : LoadOptions.values()) {
+      ret.addOption(o.option);
+    }
+    return ret;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoader.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoader.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoader.java
index 9992422..8ee11aa 100644
--- a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoader.java
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoader.java
@@ -17,312 +17,48 @@
  */
 package org.apache.metron.dataloads.nonbulk.flatfile;
 
-import com.google.common.base.Function;
-import com.google.common.base.Joiner;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Iterables;
 import org.apache.commons.cli.*;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.log4j.PropertyConfigurator;
-import org.apache.metron.common.utils.ConversionUtils;
-import org.apache.metron.common.utils.file.ReaderSpliterator;
-import org.apache.metron.dataloads.extractor.Extractor;
 import org.apache.metron.dataloads.extractor.ExtractorHandler;
-import org.apache.metron.dataloads.extractor.inputformat.WholeFileFormat;
 import org.apache.metron.common.configuration.enrichment.SensorEnrichmentUpdateConfig;
-import org.apache.metron.hbase.HTableProvider;
-import org.apache.metron.enrichment.converter.HbaseConverter;
-import org.apache.metron.enrichment.converter.EnrichmentConverter;
-import org.apache.metron.enrichment.lookup.LookupKV;
+import org.apache.metron.dataloads.nonbulk.flatfile.importer.ImportStrategy;
 import org.apache.metron.common.utils.JSONUtils;
 
-import javax.annotation.Nullable;
 import java.io.*;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Stack;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ForkJoinPool;
-import java.util.stream.Stream;
+import java.util.*;
 
 public class SimpleEnrichmentFlatFileLoader {
-  private static abstract class OptionHandler implements Function<String, Option> {}
-  public static enum LoadOptions {
-    HELP("h", new OptionHandler() {
 
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        return new Option(s, "help", false, "Generate Help screen");
-      }
-    })
-    ,HBASE_TABLE("t", new OptionHandler() {
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        Option o = new Option(s, "hbase_table", true, "HBase table to ingest the data into.");
-        o.setArgName("TABLE");
-        o.setRequired(true);
-        return o;
-      }
-    })
-    ,HBASE_CF("c", new OptionHandler() {
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        Option o = new Option(s, "hbase_cf", true, "HBase column family to ingest the data into.");
-        o.setArgName("CF");
-        o.setRequired(true);
-        return o;
-      }
-    })
-    ,EXTRACTOR_CONFIG("e", new OptionHandler() {
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        Option o = new Option(s, "extractor_config", true, "JSON Document describing the extractor for this input data source");
-        o.setArgName("JSON_FILE");
-        o.setRequired(true);
-        return o;
-      }
-    })
-    ,ENRICHMENT_CONFIG("n", new OptionHandler() {
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        Option o = new Option(s, "enrichment_config", true
-                , "JSON Document describing the enrichment configuration details." +
-                "  This is used to associate an enrichment type with a field type in zookeeper."
-        );
-        o.setArgName("JSON_FILE");
-        o.setRequired(false);
-        return o;
-      }
-    })
-    ,LOG4J_PROPERTIES("l", new OptionHandler() {
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        Option o = new Option(s, "log4j", true, "The log4j properties file to load");
-        o.setArgName("FILE");
-        o.setRequired(false);
-        return o;
-      }
-    })
-    ,NUM_THREADS("p", new OptionHandler() {
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        Option o = new Option(s, "threads", true, "The number of threads to use when extracting data.  The default is the number of cores of your machine.");
-        o.setArgName("NUM_THREADS");
-        o.setRequired(false);
-        return o;
-      }
-    })
-    ,BATCH_SIZE("b", new OptionHandler() {
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        Option o = new Option(s, "batchSize", true, "The batch size to use for HBase puts");
-        o.setArgName("SIZE");
-        o.setRequired(false);
-        return o;
-      }
-    })
-    ,INPUT("i", new OptionHandler() {
-      @Nullable
-      @Override
-      public Option apply(@Nullable String s) {
-        Option o = new Option(s, "input", true, "The CSV File to load");
-        o.setArgName("FILE");
-        o.setRequired(true);
-        return o;
-      }
-    })
-    ;
-    Option option;
-    String shortCode;
-    LoadOptions(String shortCode, OptionHandler optionHandler) {
-      this.shortCode = shortCode;
-      this.option = optionHandler.apply(shortCode);
-    }
-
-    public boolean has(CommandLine cli) {
-      return cli.hasOption(shortCode);
-    }
-
-    public String get(CommandLine cli) {
-      return cli.getOptionValue(shortCode);
-    }
-
-    public static CommandLine parse(CommandLineParser parser, String[] args) {
-      try {
-        CommandLine cli = parser.parse(getOptions(), args);
-        if(HELP.has(cli)) {
-          printHelp();
-          System.exit(0);
-        }
-        return cli;
-      } catch (ParseException e) {
-        System.err.println("Unable to parse args: " + Joiner.on(' ').join(args));
-        e.printStackTrace(System.err);
-        printHelp();
-        System.exit(-1);
-        return null;
-      }
-    }
-
-    public static void printHelp() {
-      HelpFormatter formatter = new HelpFormatter();
-      formatter.printHelp( "SimpleEnrichmentFlatFileLoader", getOptions());
-    }
-
-    public static Options getOptions() {
-      Options ret = new Options();
-      for(LoadOptions o : LoadOptions.values()) {
-        ret.addOption(o.option);
-      }
-      return ret;
-    }
-  }
-  public static List<File> getFiles(File root) {
-    if(!root.isDirectory())  {
-      return ImmutableList.of(root);
-    }
-    List<File> ret = new ArrayList<>();
-    Stack<File> stack = new Stack<File>();
-    stack.push(root);
-    while(!stack.isEmpty()) {
-      File f = stack.pop();
-      if(f.isDirectory()) {
-        for(File child : f.listFiles()) {
-          stack.push(child);
-        }
-      }
-      else {
-        ret.add(f);
-      }
-    }
-    return ret;
-  }
 
-  public HTableProvider getProvider() {
-    return new HTableProvider();
-  }
-
-  public List<Put> extract( String line
-                     , Extractor extractor
-                     , String cf
-                     , HbaseConverter converter
-                     ) throws IOException
-  {
-    List<Put> ret = new ArrayList<>();
-    Iterable<LookupKV> kvs = extractor.extract(line);
-    for(LookupKV kv : kvs) {
-      Put put = converter.toPut(cf, kv.getKey(), kv.getValue());
-      ret.add(put);
-    }
-    return ret;
-  }
-
-  public void load( final Iterable<Stream<String>> streams
-                  , final ThreadLocal<ExtractorState> state
-                  , final String cf
-                  , int numThreads
-                  )
-  {
-    for(Stream<String> stream : streams) {
-      try {
-        ForkJoinPool forkJoinPool = new ForkJoinPool(numThreads);
-        forkJoinPool.submit(() ->
-          stream.parallel().forEach(input -> {
-            ExtractorState es = state.get();
-            try {
-              es.getTable().put(extract(input, es.getExtractor(), cf, es.getConverter()));
-            } catch (IOException e) {
-              throw new IllegalStateException("Unable to continue: " + e.getMessage(), e);
-            }
-            }
-                                   )
-        ).get();
-      } catch (InterruptedException e) {
-        throw new IllegalStateException(e.getMessage(), e);
-      } catch (ExecutionException e) {
-        throw new IllegalStateException(e.getMessage(), e);
-      } finally {
-        stream.close();
-      }
-    }
-  }
-
-  private static Iterable<Stream<String>> streamify(List<File> files, int batchSize, boolean lineByLine) throws FileNotFoundException {
-    List<Stream<String>> ret = new ArrayList<>();
-    if(!lineByLine) {
-      ret.add(files.stream().map(f -> {
-        try {
-          return FileUtils.readFileToString(f);
-        } catch (IOException e) {
-          throw new IllegalStateException("File " + f.getName() + " not found.");
-        }
-      }));
-    }
-    else {
-      for(File f : files) {
-        ret.add(ReaderSpliterator.lineStream(new BufferedReader(new FileReader(f)), batchSize));
-      }
-    }
-    return ret;
+  public static void main(String... argv) throws Exception {
+    Configuration hadoopConfig = HBaseConfiguration.create();
+    String[] otherArgs = new GenericOptionsParser(hadoopConfig, argv).getRemainingArgs();
+    main(hadoopConfig, otherArgs);
   }
 
-  public static void main(String... argv) throws Exception {
-    Configuration conf = HBaseConfiguration.create();
-    String[] otherArgs = new GenericOptionsParser(conf, argv).getRemainingArgs();
+  public static void main(Configuration hadoopConfig, String[] argv) throws Exception {
 
-    CommandLine cli = LoadOptions.parse(new PosixParser(), otherArgs);
+    CommandLine cli = LoadOptions.parse(new PosixParser(), argv);
+    EnumMap<LoadOptions, Optional<Object>> config = LoadOptions.createConfig(cli);
     if(LoadOptions.LOG4J_PROPERTIES.has(cli)) {
       PropertyConfigurator.configure(LoadOptions.LOG4J_PROPERTIES.get(cli));
     }
     ExtractorHandler handler = ExtractorHandler.load(
-            FileUtils.readFileToString(new File(LoadOptions.EXTRACTOR_CONFIG.get(cli)))
+            FileUtils.readFileToString(new File(LoadOptions.EXTRACTOR_CONFIG.get(cli).trim()))
     );
-    int batchSize = 128;
-    if(LoadOptions.BATCH_SIZE.has(cli)) {
-      batchSize = ConversionUtils.convert(LoadOptions.BATCH_SIZE.get(cli), Integer.class);
-    }
-    int numThreads = Runtime.getRuntime().availableProcessors();
-    if(LoadOptions.NUM_THREADS.has(cli)) {
-      numThreads = ConversionUtils.convert(LoadOptions.NUM_THREADS.get(cli), Integer.class);
-    }
-    boolean lineByLine = !handler.getInputFormatHandler().getClass().equals(WholeFileFormat.class);
+    ImportStrategy strategy = (ImportStrategy) config.get(LoadOptions.IMPORT_MODE).get();
+    strategy.getImporter().importData(config, handler, hadoopConfig);
+
     SensorEnrichmentUpdateConfig sensorEnrichmentUpdateConfig = null;
     if(LoadOptions.ENRICHMENT_CONFIG.has(cli)) {
       sensorEnrichmentUpdateConfig = JSONUtils.INSTANCE.load( new File(LoadOptions.ENRICHMENT_CONFIG.get(cli))
               , SensorEnrichmentUpdateConfig.class
       );
     }
-    List<File> inputFiles = getFiles(new File(LoadOptions.INPUT.get(cli)));
-    SimpleEnrichmentFlatFileLoader loader = new SimpleEnrichmentFlatFileLoader();
-    ThreadLocal<ExtractorState> state = new ThreadLocal<ExtractorState>() {
-      @Override
-      protected ExtractorState initialValue() {
-        try {
-          ExtractorHandler handler = ExtractorHandler.load(
-            FileUtils.readFileToString(new File(LoadOptions.EXTRACTOR_CONFIG.get(cli)))
-          );
-          HTableInterface table = loader.getProvider().getTable(conf, LoadOptions.HBASE_TABLE.get(cli));
-          return new ExtractorState(table, handler.getExtractor(), new EnrichmentConverter());
-        } catch (IOException e1) {
-          throw new IllegalStateException("Unable to get table: " + e1);
-        }
-      }
-    };
-
-    loader.load(streamify(inputFiles, batchSize, lineByLine), state, LoadOptions.HBASE_CF.get(cli), numThreads);
 
     if(sensorEnrichmentUpdateConfig != null) {
       sensorEnrichmentUpdateConfig.updateSensorConfigs();

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/importer/ImportStrategy.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/importer/ImportStrategy.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/importer/ImportStrategy.java
new file mode 100644
index 0000000..df88640
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/importer/ImportStrategy.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.nonbulk.flatfile.importer;
+
+import java.util.Optional;
+
+public enum ImportStrategy {
+  LOCAL(LocalImporter.INSTANCE),
+  MR(MapReduceImporter.INSTANCE)
+  ;
+  private Importer importer;
+
+  ImportStrategy(Importer importer) {
+    this.importer = importer;
+  }
+
+  public Importer getImporter() {
+    return importer;
+  }
+
+  public static Optional<ImportStrategy> getStrategy(String strategyName) {
+    if(strategyName == null) {
+      return Optional.empty();
+    }
+    for(ImportStrategy strategy : values()) {
+      if(strategy.name().equalsIgnoreCase(strategyName.trim())) {
+        return Optional.of(strategy);
+      }
+    }
+    return Optional.empty();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/importer/Importer.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/importer/Importer.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/importer/Importer.java
new file mode 100644
index 0000000..81ede08
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/importer/Importer.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.metron.dataloads.nonbulk.flatfile.importer;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.metron.dataloads.extractor.ExtractorHandler;
+import org.apache.metron.dataloads.nonbulk.flatfile.LoadOptions;
+import org.apache.metron.enrichment.converter.EnrichmentConverter;
+
+import java.io.IOException;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.Optional;
+
+public interface Importer {
+  void importData(EnumMap<LoadOptions, Optional<Object>> config, ExtractorHandler handler , final Configuration hadoopConfig) throws IOException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/importer/LocalImporter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/importer/LocalImporter.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/importer/LocalImporter.java
new file mode 100644
index 0000000..652a4c3
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/importer/LocalImporter.java
@@ -0,0 +1,177 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.nonbulk.flatfile.importer;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hbase.client.HTableInterface;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.metron.common.utils.file.ReaderSpliterator;
+import org.apache.metron.dataloads.extractor.Extractor;
+import org.apache.metron.dataloads.extractor.ExtractorHandler;
+import org.apache.metron.dataloads.extractor.inputformat.WholeFileFormat;
+import org.apache.metron.dataloads.nonbulk.flatfile.ExtractorState;
+import org.apache.metron.dataloads.nonbulk.flatfile.LoadOptions;
+import org.apache.metron.dataloads.nonbulk.flatfile.location.Location;
+import org.apache.metron.dataloads.nonbulk.flatfile.location.LocationStrategy;
+import org.apache.metron.enrichment.converter.EnrichmentConverter;
+import org.apache.metron.enrichment.converter.HbaseConverter;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.apache.metron.hbase.HTableProvider;
+
+import java.io.*;
+import java.util.*;
+import java.util.concurrent.ForkJoinPool;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Consumer;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+public enum LocalImporter implements Importer {
+  INSTANCE;
+
+  public interface HTableProviderRetriever {
+    HTableProvider retrieve();
+  }
+
+
+  @Override
+  public void importData( final EnumMap<LoadOptions, Optional<Object>> config
+                        , final ExtractorHandler handler
+                        , final Configuration hadoopConfig
+                         ) throws IOException {
+    importData(config, handler, hadoopConfig, () -> new HTableProvider());
+
+  }
+  public void importData( final EnumMap<LoadOptions, Optional<Object>> config
+                        , final ExtractorHandler handler
+                        , final Configuration hadoopConfig
+                        , final HTableProviderRetriever provider
+                         ) throws IOException {
+    ThreadLocal<ExtractorState> state = new ThreadLocal<ExtractorState>() {
+      @Override
+      protected ExtractorState initialValue() {
+        try {
+          HTableInterface table = provider.retrieve().getTable(hadoopConfig, (String) config.get(LoadOptions.HBASE_TABLE).get());
+          return new ExtractorState(table, handler.getExtractor(), new EnrichmentConverter(), hadoopConfig);
+        } catch (IOException e1) {
+          throw new IllegalStateException("Unable to get table: " + e1);
+        }
+      }
+    };
+    boolean quiet = (boolean) config.get(LoadOptions.QUIET).get();
+    boolean lineByLine = !handler.getInputFormat().getClass().equals(WholeFileFormat.class);
+    List<String> inputs = (List<String>) config.get(LoadOptions.INPUT).get();
+    String cf = (String) config.get(LoadOptions.HBASE_CF).get();
+    if(!lineByLine) {
+      extractWholeFiles(inputs, state, cf, quiet);
+    }
+    else {
+      int batchSize = (int) config.get(LoadOptions.BATCH_SIZE).get();
+      int numThreads = (int) config.get(LoadOptions.NUM_THREADS).get();
+      extractLineByLine(inputs, state, cf, batchSize, numThreads, quiet);
+    }
+
+  }
+
+  public void extractLineByLine( List<String> inputs
+                               , ThreadLocal<ExtractorState> state
+                               , String cf
+                               , int batchSize
+                               , int numThreads
+                               , boolean quiet
+                               ) throws IOException {
+    inputs.stream().map(input -> LocationStrategy.getLocation(input, state.get().getFileSystem()))
+                   .forEach( loc -> {
+                      final Progress progress = new Progress();
+                      if(!quiet) {
+                        System.out.println("\nProcessing " + loc.toString());
+                      }
+                      try (Stream<String> stream = ReaderSpliterator.lineStream(loc.openReader(), batchSize)) {
+                        ForkJoinPool forkJoinPool = new ForkJoinPool(numThreads);
+                        forkJoinPool.submit(() ->
+                          stream.parallel().forEach(input -> {
+                            ExtractorState es = state.get();
+                            try {
+                              es.getTable().put(extract(input, es.getExtractor(), cf, es.getConverter(), progress, quiet));
+                            } catch (IOException e) {
+                              throw new IllegalStateException("Unable to continue: " + e.getMessage(), e);
+                            }
+                                                             }
+                                       )
+                               ).get();
+                             } catch (Exception e) {
+                               throw new IllegalStateException(e.getMessage(), e);
+                             }
+                                  }
+                   );
+  }
+
+  public void extractWholeFiles( List<String> inputs, ThreadLocal<ExtractorState> state, String cf, boolean quiet) throws IOException {
+    final Progress progress = new Progress();
+    final List<Location> locations = new ArrayList<>();
+      Location.fileVisitor(inputs, loc -> locations.add(loc), state.get().getFileSystem());
+      locations.parallelStream().forEach(loc -> {
+        try(BufferedReader br = loc.openReader()) {
+          String s = br.lines().collect(Collectors.joining());
+          state.get().getTable().put(extract( s
+                                            , state.get().getExtractor()
+                                            , cf, state.get().getConverter()
+                                            , progress
+                                            , quiet
+                                            )
+                                    );
+        } catch (IOException e) {
+          throw new IllegalStateException("Unable to read " + loc + ": " + e.getMessage(), e);
+        }
+      });
+  }
+
+
+  public List<Put> extract(String line
+                     , Extractor extractor
+                     , String cf
+                     , HbaseConverter converter
+                     , final Progress progress
+                     , final boolean quiet
+                     ) throws IOException
+  {
+    List<Put> ret = new ArrayList<>();
+    Iterable<LookupKV> kvs = extractor.extract(line);
+    for(LookupKV kv : kvs) {
+      Put put = converter.toPut(cf, kv.getKey(), kv.getValue());
+      ret.add(put);
+    }
+    if(!quiet) {
+      progress.update();
+    }
+    return ret;
+  }
+
+
+  public static class Progress {
+    private int count = 0;
+    private String anim= "|/-\\";
+
+    public synchronized void update() {
+      int currentCount = count++;
+      System.out.print("\rProcessed " + currentCount + " - " + anim.charAt(currentCount % anim.length()));
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/importer/MapReduceImporter.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/importer/MapReduceImporter.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/importer/MapReduceImporter.java
new file mode 100644
index 0000000..e83bdd6
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/importer/MapReduceImporter.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.nonbulk.flatfile.importer;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.log4j.Logger;
+import org.apache.metron.dataloads.extractor.ExtractorHandler;
+import org.apache.metron.dataloads.hbase.mr.BulkLoadMapper;
+import org.apache.metron.dataloads.nonbulk.flatfile.LoadOptions;
+import org.apache.metron.enrichment.converter.EnrichmentConverter;
+
+import java.io.IOException;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.Optional;
+import java.util.stream.Collectors;
+
+
+public enum MapReduceImporter implements Importer{
+  INSTANCE
+  ;
+
+  private static final Logger LOG = Logger.getLogger(MapReduceImporter.class);
+
+  @Override
+  public void importData(EnumMap<LoadOptions, Optional<Object>> config
+                        , ExtractorHandler handler
+                        , Configuration hadoopConfig
+                        ) throws IOException {
+    String table = (String) config.get(LoadOptions.HBASE_TABLE).get();
+    String cf = (String) config.get(LoadOptions.HBASE_CF).get();
+    String extractorConfigContents  = (String) config.get(LoadOptions.EXTRACTOR_CONFIG).get();
+    Job job = Job.getInstance(hadoopConfig);
+    List<String> inputs = (List<String>) config.get(LoadOptions.INPUT).get();
+    job.setJobName("MapReduceImporter: " + inputs.stream().collect(Collectors.joining(",")) + " => " +  table + ":" + cf);
+    LOG.info("Configuring " + job.getJobName());
+    job.setJarByClass(MapReduceImporter.class);
+    job.setMapperClass(org.apache.metron.dataloads.hbase.mr.BulkLoadMapper.class);
+    job.setOutputFormatClass(TableOutputFormat.class);
+    job.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, table);
+    job.getConfiguration().set(BulkLoadMapper.COLUMN_FAMILY_KEY, cf);
+    job.getConfiguration().set(BulkLoadMapper.CONFIG_KEY, extractorConfigContents);
+    job.getConfiguration().set(BulkLoadMapper.CONVERTER_KEY, EnrichmentConverter.class.getName());
+    job.setOutputKeyClass(ImmutableBytesWritable.class);
+    job.setOutputValueClass(Put.class);
+    job.setNumReduceTasks(0);
+    List<Path> paths = inputs.stream().map(p -> new Path(p)).collect(Collectors.toList());
+    handler.getInputFormat().set(job, paths, handler.getConfig());
+    try {
+      job.waitForCompletion(true);
+    } catch (Exception e) {
+      throw new IllegalStateException("Unable to complete job: " + e.getMessage(), e);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/FileLocation.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/FileLocation.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/FileLocation.java
new file mode 100644
index 0000000..267a6fb
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/FileLocation.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.nonbulk.flatfile.location;
+
+import java.io.*;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+import java.util.zip.GZIPInputStream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+public class FileLocation implements RawLocation {
+  @Override
+  public Optional<List<String>> list(String loc) {
+    List<String> children = new ArrayList<>();
+    for(File f : new File(loc).listFiles()) {
+        children.add(f.getPath());
+      }
+    return Optional.of(children);
+  }
+
+  @Override
+  public boolean exists(String loc) throws IOException {
+    return new File(loc).exists();
+  }
+
+  @Override
+  public boolean isDirectory(String loc) throws IOException {
+    return new File(loc).isDirectory();
+  }
+
+  @Override
+  public InputStream openInputStream(String loc) throws IOException {
+    return new FileInputStream(loc);
+  }
+
+  @Override
+  public boolean match(String loc) {
+    return new File(loc).exists();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/HDFSLocation.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/HDFSLocation.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/HDFSLocation.java
new file mode 100644
index 0000000..bae6a82
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/HDFSLocation.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.nonbulk.flatfile.location;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+
+public class HDFSLocation implements RawLocation<FileSystem> {
+
+  FileSystem fs = null;
+
+  @Override
+  public Optional<List<String>> list(String loc) throws IOException {
+    List<String> children = new ArrayList<>();
+    for(FileStatus f : fs.listStatus(new Path(loc)) ) {
+        children.add(f.getPath().toString());
+      }
+    return Optional.of(children);
+  }
+
+  @Override
+  public boolean exists(String loc) throws IOException {
+    return fs.exists(new Path(loc));
+  }
+
+  @Override
+  public boolean isDirectory(String loc) throws IOException {
+    return fs.isDirectory(new Path(loc));
+  }
+
+  @Override
+  public InputStream openInputStream(String loc) throws IOException {
+    return fs.open(new Path(loc));
+  }
+
+  @Override
+  public boolean match(String loc) {
+    try {
+      return loc.startsWith("hdfs://") && exists(loc);
+    } catch (IOException e) {
+      return false;
+    }
+  }
+
+  @Override
+  public void init(FileSystem state) {
+    this.fs = state;
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/Location.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/Location.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/Location.java
new file mode 100644
index 0000000..81eada6
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/Location.java
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.nonbulk.flatfile.location;
+
+import org.apache.hadoop.fs.*;
+import org.apache.metron.dataloads.nonbulk.flatfile.importer.LocalImporter;
+
+import java.io.*;
+import java.util.*;
+import java.util.function.Consumer;
+
+/**
+ * Location can be either a local file or a file on HDFS.
+ */
+public class Location {
+
+  private String loc;
+  private RawLocation<?> rawLocation;
+
+  public Location(String loc, RawLocation rawLocation) {
+    this.loc = loc;
+    this.rawLocation = rawLocation;
+
+  }
+
+  public RawLocation<?> getRawLocation() {
+    return rawLocation;
+  }
+
+  public Optional<List<Location>> getChildren() throws IOException {
+      if(exists() && isDirectory()) {
+        List<Location> children = new ArrayList<>();
+        for(String child : rawLocation.list(loc).orElse(new ArrayList<>())) {
+          children.add(new Location(child, rawLocation));
+        }
+        return Optional.of(children);
+      }
+      else {
+        return Optional.empty();
+      }
+  }
+
+
+  public boolean exists() throws IOException {
+    return rawLocation.exists(loc);
+  }
+
+  public boolean isDirectory() throws IOException {
+    return rawLocation.isDirectory(loc);
+  }
+
+  public BufferedReader openReader() throws IOException {
+    return rawLocation.openReader(loc);
+  }
+
+  @Override
+  public String toString() {
+    return loc;
+  }
+
+  public static void fileVisitor(List<String> inputs
+                         , final Consumer<Location> importConsumer
+                         , final FileSystem fs
+                         ) throws IOException {
+    Stack<Location> stack = new Stack<>();
+    for(String input : inputs) {
+      Location loc = LocationStrategy.getLocation(input, fs);
+      if(loc.exists()) {
+        stack.add(loc);
+      }
+    }
+    while(!stack.empty()) {
+      Location loc = stack.pop();
+      if(loc.isDirectory()) {
+        for(Location child : loc.getChildren().orElse(Collections.emptyList())) {
+          stack.push(child);
+        }
+      }
+      else {
+        importConsumer.accept(loc);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/LocationStrategy.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/LocationStrategy.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/LocationStrategy.java
new file mode 100644
index 0000000..338a1e2
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/LocationStrategy.java
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.nonbulk.flatfile.location;
+
+import org.apache.hadoop.fs.FileSystem;
+
+import java.util.Optional;
+import java.util.function.Function;
+
+public enum LocationStrategy {
+  HDFS(fs -> {
+    HDFSLocation location = new HDFSLocation();
+    location.init(fs);
+    return location;
+  })
+  ,FILE(fs -> {
+    FileLocation location = new FileLocation();
+    location.init(fs);
+    return location;
+  })
+  ,URL(fs -> {
+    URLLocation location = new URLLocation();
+    location.init(fs);
+    return location;
+  })
+  ;
+  Function<FileSystem, RawLocation<?>> locationCreator;
+
+  LocationStrategy(Function<FileSystem, RawLocation<?>> locationCreator) {
+    this.locationCreator = locationCreator;
+  }
+
+  public static Optional<RawLocation<?>> getRawLocation(String loc, FileSystem fs) {
+    for(LocationStrategy strategy : values()) {
+      RawLocation<?> location = strategy.locationCreator.apply(fs);
+      if(location.match(loc)) {
+        return Optional.of(location);
+      }
+    }
+    return Optional.empty();
+  }
+
+  public static Location getLocation(String loc, FileSystem fs) {
+    Optional<RawLocation<?>> rawLoc = getRawLocation(loc, fs);
+    if(rawLoc.isPresent()) {
+      return new Location(loc, rawLoc.get());
+    }
+    else {
+      throw new IllegalStateException("Unsupported type: " + loc);
+    }
+  }
+}



[12/17] incubator-metron git commit: METRON-682: Unify and Improve the Flat File Loader closes apache/incubator-metron#432

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/RawLocation.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/RawLocation.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/RawLocation.java
new file mode 100644
index 0000000..5f2db33
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/RawLocation.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.nonbulk.flatfile.location;
+
+import java.io.*;
+import java.util.List;
+import java.util.Optional;
+import java.util.zip.GZIPInputStream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+public interface RawLocation<T> {
+  Optional<List<String>> list(String loc) throws IOException;
+  boolean exists(String loc) throws IOException;
+  boolean isDirectory(String loc) throws IOException;
+
+  InputStream openInputStream(String loc) throws IOException;
+  boolean match(String loc);
+  default void init(T state) {
+
+  }
+
+  default BufferedReader openReader(String loc) throws IOException {
+    InputStream is = openInputStream(loc);
+    if(loc.endsWith(".gz")) {
+      return new BufferedReader(new InputStreamReader(new GZIPInputStream(is)));
+    }
+    else if(loc.endsWith(".zip")) {
+      ZipInputStream zis = new ZipInputStream(is);
+      ZipEntry entry = zis.getNextEntry();
+      if(entry != null) {
+        return new BufferedReader(new InputStreamReader(zis));
+      }
+      else {
+        return new BufferedReader(new InputStreamReader(new ByteArrayInputStream(new byte[] {})));
+      }
+    }
+    else {
+      return new BufferedReader(new InputStreamReader(is));
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/URLLocation.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/URLLocation.java b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/URLLocation.java
new file mode 100644
index 0000000..cc8edbe
--- /dev/null
+++ b/metron-platform/metron-data-management/src/main/java/org/apache/metron/dataloads/nonbulk/flatfile/location/URLLocation.java
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.nonbulk.flatfile.location;
+
+import java.io.*;
+import java.net.HttpURLConnection;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.zip.GZIPInputStream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipFile;
+import java.util.zip.ZipInputStream;
+
+public class URLLocation implements RawLocation {
+
+  @Override
+  public Optional<List<String>> list(String loc) throws IOException {
+    return Optional.of(Collections.emptyList());
+  }
+
+  @Override
+  public boolean exists(String loc) throws IOException {
+    return true;
+  }
+
+  @Override
+  public boolean isDirectory(String loc) throws IOException {
+    return false;
+  }
+
+  @Override
+  public InputStream openInputStream(String loc) throws IOException {
+    return new URL(loc).openConnection().getInputStream();
+  }
+
+  @Override
+  public boolean match(String loc) {
+    try {
+      new URL(loc);
+      return true;
+    } catch (MalformedURLException e) {
+      return false;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/scripts/flatfile_loader.sh
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/scripts/flatfile_loader.sh b/metron-platform/metron-data-management/src/main/scripts/flatfile_loader.sh
index bba7f8e..b9e2746 100755
--- a/metron-platform/metron-data-management/src/main/scripts/flatfile_loader.sh
+++ b/metron-platform/metron-data-management/src/main/scripts/flatfile_loader.sh
@@ -27,9 +27,25 @@ elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
   . /usr/lib/bigtop-utils/bigtop-detect-javahome
 fi
 
-export HBASE_HOME=${HBASE_HOME:-/usr/hdp/current/hbase-client}
 export METRON_VERSION=${project.version}
 export METRON_HOME=/usr/metron/$METRON_VERSION
+export CLASSNAME="org.apache.metron.dataloads.nonbulk.flatfile.SimpleEnrichmentFlatFileLoader"
 export DM_JAR=${project.artifactId}-$METRON_VERSION.jar
-CP=$METRON_HOME/lib/$DM_JAR:/usr/metron/${METRON_VERSION}/lib/taxii-1.1.0.1.jar:`${HBASE_HOME}/bin/hbase classpath`
-java -cp $CP org.apache.metron.dataloads.nonbulk.flatfile.SimpleEnrichmentFlatFileLoader "$@"
+export HBASE_HOME=${HBASE_HOME:-/usr/hdp/current/hbase-client}
+
+if [ $(which hadoop) ]
+then
+  HADOOP_CLASSPATH=${HBASE_HOME}/lib/hbase-server.jar:`${HBASE_HOME}/bin/hbase classpath`
+  for jar in $(echo $HADOOP_CLASSPATH | sed 's/:/ /g');do
+    if [ -f $jar ];then
+      LIBJARS="$jar,$LIBJARS"
+    fi
+  done
+  export HADOOP_CLASSPATH
+  hadoop jar $METRON_HOME/lib/$DM_JAR $CLASSNAME -libjars ${LIBJARS} "$@"
+else
+  echo "Warning: Metron cannot find the hadoop client on this node.  This means that loading via Map Reduce will NOT function."
+  CP=$METRON_HOME/lib/$DM_JAR:/usr/metron/${METRON_VERSION}/lib/taxii-1.1.0.1.jar:`${HBASE_HOME}/bin/hbase classpath`
+  java -cp $CP $CLASSNAME "$@"
+fi
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/main/scripts/threatintel_bulk_load.sh
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/main/scripts/threatintel_bulk_load.sh b/metron-platform/metron-data-management/src/main/scripts/threatintel_bulk_load.sh
deleted file mode 100755
index 865d0ad..0000000
--- a/metron-platform/metron-data-management/src/main/scripts/threatintel_bulk_load.sh
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/bin/bash
-# 
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-# 
-#     http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# 
-
-BIGTOP_DEFAULTS_DIR=${BIGTOP_DEFAULTS_DIR-/etc/default}
-[ -n "${BIGTOP_DEFAULTS_DIR}" -a -r ${BIGTOP_DEFAULTS_DIR}/hbase ] && . ${BIGTOP_DEFAULTS_DIR}/hbase
-
-# Autodetect JAVA_HOME if not defined
-if [ -e /usr/libexec/bigtop-detect-javahome ]; then
-  . /usr/libexec/bigtop-detect-javahome
-elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
-  . /usr/lib/bigtop-utils/bigtop-detect-javahome
-fi
-
-export HBASE_HOME=${HBASE_HOME:-/usr/hdp/current/hbase-client}
-HADOOP_CLASSPATH=${HBASE_HOME}/lib/hbase-server.jar:`${HBASE_HOME}/bin/hbase classpath`
-for jar in $(echo $HADOOP_CLASSPATH | sed 's/:/ /g');do
-  if [ -f $jar ];then
-    LIBJARS="$jar,$LIBJARS"
-  fi
-done
-export HADOOP_CLASSPATH
-export METRON_VERSION=${project.version}
-export METRON_HOME=/usr/metron/$METRON_VERSION
-export DM_JAR=${project.artifactId}-$METRON_VERSION.jar
-hadoop jar $METRON_HOME/lib/$DM_JAR org.apache.metron.dataloads.bulk.ThreatIntelBulkLoader -libjars ${LIBJARS} "$@"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/BulkLoadMapperIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/BulkLoadMapperIntegrationTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/BulkLoadMapperIntegrationTest.java
deleted file mode 100644
index b7a753b..0000000
--- a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/BulkLoadMapperIntegrationTest.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.dataloads.hbase.mr;
-
-import com.sun.jersey.guice.spi.container.GuiceComponentProviderFactory;
-import org.adrianwalker.multilinestring.Multiline;
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.PosixParser;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.util.GenericOptionsParser;
-import org.apache.metron.dataloads.bulk.ThreatIntelBulkLoader;
-import org.apache.metron.enrichment.converter.EnrichmentConverter;
-import org.apache.metron.enrichment.converter.EnrichmentKey;
-import org.apache.metron.enrichment.converter.EnrichmentValue;
-import org.apache.metron.enrichment.lookup.LookupKV;
-import org.apache.metron.test.utils.UnitTestHelper;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.*;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.logging.Level;
-
-public class BulkLoadMapperIntegrationTest {
-  /** The test util. */
-  private HBaseTestingUtility testUtil;
-
-  /** The test table. */
-  private HTable testTable;
-  private String tableName = "malicious_domains";
-  private String cf = "cf";
-  private String csvFile="input.csv";
-  private String extractorJson = "extractor.json";
-  private String enrichmentJson = "enrichment_config.json";
-  private String asOf = "04/15/2016";
-  private String asOfFormat = "georgia";
-  private String convertClass = "threadIntel.class";
-  private Configuration config = null;
-
-
-  @Before
-  public void setup() throws Exception {
-    UnitTestHelper.setJavaLoggingLevel(Level.SEVERE);
-    Map.Entry<HBaseTestingUtility, Configuration> kv = HBaseUtil.INSTANCE.create(true);
-    config = kv.getValue();
-    testUtil = kv.getKey();
-    testTable = testUtil.createTable(Bytes.toBytes(tableName), Bytes.toBytes(cf));
-  }
-
-  @After
-  public void teardown() throws Exception {
-    HBaseUtil.INSTANCE.teardown(testUtil);
-  }
- /**
-         {
-            "config" : {
-                        "columns" : {
-                                "host" : 0
-                                ,"meta" : 2
-                                    }
-                       ,"indicator_column" : "host"
-                       ,"separator" : ","
-                       ,"type" : "threat"
-                       }
-            ,"extractor" : "CSV"
-         }
-         */
-  @Multiline
-  private static String extractorConfig;
-
-  @Test
-  public void testCommandLine() throws Exception {
-    UnitTestHelper.setJavaLoggingLevel(GuiceComponentProviderFactory.class, Level.WARNING);
-    Configuration conf = HBaseConfiguration.create();
-
-    String[] argv = {"-f cf", "-t malicious_domains", "-e extractor.json", "-n enrichment_config.json", "-a 04/15/2016", "-i input.csv", "-z georgia", "-c threadIntel.class"};
-    String[] otherArgs = new GenericOptionsParser(conf, argv).getRemainingArgs();
-
-    CommandLine cli = ThreatIntelBulkLoader.BulkLoadOptions.parse(new PosixParser(), otherArgs);
-    Assert.assertEquals(extractorJson,ThreatIntelBulkLoader.BulkLoadOptions.EXTRACTOR_CONFIG.get(cli).trim());
-    Assert.assertEquals(cf, ThreatIntelBulkLoader.BulkLoadOptions.COLUMN_FAMILY.get(cli).trim());
-    Assert.assertEquals(tableName,ThreatIntelBulkLoader.BulkLoadOptions.TABLE.get(cli).trim());
-    Assert.assertEquals(enrichmentJson,ThreatIntelBulkLoader.BulkLoadOptions.ENRICHMENT_CONFIG.get(cli).trim());
-    Assert.assertEquals(csvFile,ThreatIntelBulkLoader.BulkLoadOptions.INPUT_DATA.get(cli).trim());
-    Assert.assertEquals(asOf, ThreatIntelBulkLoader.BulkLoadOptions.AS_OF_TIME.get(cli).trim());
-    Assert.assertEquals(asOfFormat, ThreatIntelBulkLoader.BulkLoadOptions.AS_OF_TIME_FORMAT.get(cli).trim());
-    Assert.assertEquals(convertClass, ThreatIntelBulkLoader.BulkLoadOptions.CONVERTER.get(cli).trim());
-  }
-
-  @Test
-  public void test() throws IOException, ClassNotFoundException, InterruptedException {
-
-    Assert.assertNotNull(testTable);
-    FileSystem fs = FileSystem.get(config);
-    String contents = "google.com,1,foo";
-    EnrichmentConverter converter = new EnrichmentConverter();
-    HBaseUtil.INSTANCE.writeFile(contents, new Path("input.csv"), fs);
-    Job job = ThreatIntelBulkLoader.createJob(config, "input.csv", tableName, cf, extractorConfig, 0L, new EnrichmentConverter());
-    Assert.assertTrue(job.waitForCompletion(true));
-    ResultScanner scanner = testTable.getScanner(Bytes.toBytes(cf));
-    List<LookupKV<EnrichmentKey, EnrichmentValue>> results = new ArrayList<>();
-    for(Result r : scanner) {
-      results.add(converter.fromResult(r, cf));
-    }
-    Assert.assertEquals(1, results.size());
-    Assert.assertEquals(results.get(0).getKey().indicator, "google.com");
-    Assert.assertEquals(results.get(0).getKey().type, "threat");
-    Assert.assertEquals(results.get(0).getValue().getMetadata().size(), 2);
-    Assert.assertEquals(results.get(0).getValue().getMetadata().get("meta"), "foo");
-    Assert.assertEquals(results.get(0).getValue().getMetadata().get("host"), "google.com");
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/LeastRecentlyUsedPrunerIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/LeastRecentlyUsedPrunerIntegrationTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/LeastRecentlyUsedPrunerIntegrationTest.java
index 14a5143..d82be9d 100644
--- a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/LeastRecentlyUsedPrunerIntegrationTest.java
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/hbase/mr/LeastRecentlyUsedPrunerIntegrationTest.java
@@ -36,10 +36,7 @@ import org.apache.metron.enrichment.lookup.LookupKey;
 import org.apache.metron.enrichment.lookup.accesstracker.BloomAccessTracker;
 import org.apache.metron.enrichment.lookup.accesstracker.PersistentAccessTracker;
 import org.apache.metron.test.utils.UnitTestHelper;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.*;
 
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -49,21 +46,21 @@ import java.util.logging.Level;
 
 public class LeastRecentlyUsedPrunerIntegrationTest {
     /** The test util. */
-    private HBaseTestingUtility testUtil;
+    private static HBaseTestingUtility testUtil;
 
     /** The test table. */
-    private HTable testTable;
-    private HTable atTable;
-    private String tableName = "malicious_domains";
-    private String cf = "cf";
-    private String atTableName = "access_trackers";
-    private String atCF= "cf";
-    private String beginTime = "04/14/2016 12:00:00";
-    private String timeFormat = "georgia";
-    private Configuration config = null;
+    private static HTable testTable;
+    private static HTable atTable;
+    private static final String tableName = "malicious_domains";
+    private static final String cf = "cf";
+    private static final String atTableName = "access_trackers";
+    private static final String atCF= "cf";
+    private static final String beginTime = "04/14/2016 12:00:00";
+    private static final String timeFormat = "georgia";
+    private static Configuration config = null;
 
-    @Before
-    public void setup() throws Exception {
+    @BeforeClass
+    public static void setup() throws Exception {
         UnitTestHelper.setJavaLoggingLevel(Level.SEVERE);
         Map.Entry<HBaseTestingUtility, Configuration> kv = HBaseUtil.INSTANCE.create(true);
         config = kv.getValue();
@@ -71,10 +68,12 @@ public class LeastRecentlyUsedPrunerIntegrationTest {
         testTable = testUtil.createTable(Bytes.toBytes(tableName), Bytes.toBytes(cf));
         atTable = testUtil.createTable(Bytes.toBytes(atTableName), Bytes.toBytes(atCF));
     }
-    @After
-    public void teardown() throws Exception {
+
+    @AfterClass
+    public static void teardown() throws Exception {
         HBaseUtil.INSTANCE.teardown(testUtil);
     }
+
     public List<LookupKey> getKeys(int start, int end) {
         List<LookupKey> keys = new ArrayList<>();
         for(int i = start;i < end;++i) {

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoaderIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoaderIntegrationTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoaderIntegrationTest.java
new file mode 100644
index 0000000..d0d637d
--- /dev/null
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoaderIntegrationTest.java
@@ -0,0 +1,349 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.dataloads.nonbulk.flatfile;
+
+import com.google.common.collect.ImmutableList;
+import org.adrianwalker.multilinestring.Multiline;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.PosixParser;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.metron.dataloads.extractor.Extractor;
+import org.apache.metron.dataloads.extractor.ExtractorHandler;
+import org.apache.metron.dataloads.hbase.mr.HBaseUtil;
+import org.apache.metron.enrichment.converter.EnrichmentConverter;
+import org.apache.metron.enrichment.converter.EnrichmentKey;
+import org.apache.metron.enrichment.converter.EnrichmentValue;
+import org.apache.metron.enrichment.lookup.LookupKV;
+import org.apache.metron.test.utils.UnitTestHelper;
+import org.junit.*;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.PrintWriter;
+import java.nio.file.Files;
+import java.nio.file.OpenOption;
+import java.nio.file.StandardOpenOption;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.stream.Stream;
+import java.util.zip.GZIPOutputStream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipFile;
+import java.util.zip.ZipOutputStream;
+
+public class SimpleEnrichmentFlatFileLoaderIntegrationTest {
+
+  private static HBaseTestingUtility testUtil;
+
+  /** The test table. */
+  private static HTable testTable;
+  private static Configuration config = null;
+  private static final String tableName = "enrichment";
+  private static final String cf = "cf";
+  private static final String csvFile="input.csv";
+  private static final String extractorJson = "extractor.json";
+  private static final String enrichmentJson = "enrichment_config.json";
+  private static final String log4jProperty = "log4j";
+  private static final File file1 = new File("target/sefflt_data_1.csv");
+  private static final File file2 = new File("target/sefflt_data_2.csv");
+  private static final File multilineFile= new File("target/sefflt_data_2.csv");
+  private static final File multilineZipFile= new File("target/sefflt_data_2.csv.zip");
+  private static final File multilineGzFile= new File("target/sefflt_data_2.csv.gz");
+  private static final File lineByLineExtractorConfigFile = new File("target/sefflt_extractorConfig_lbl.json");
+  private static final File wholeFileExtractorConfigFile = new File("target/sefflt_extractorConfig_wf.json");
+  private static final int NUM_LINES = 1000;
+
+  /**
+   {
+      "config" : {
+        "columns" : {
+          "host" : 0,
+          "meta" : 2
+                    },
+        "indicator_column" : "host",
+        "separator" : ",",
+        "type" : "enrichment"
+                 },
+      "extractor" : "CSV"
+   }
+   */
+  @Multiline
+  private static String lineByLineExtractorConfig;
+
+  /**
+   {
+      "config" : {
+        "columns" : {
+          "host" : 0,
+          "meta" : 2
+                    },
+        "indicator_column" : "host",
+        "separator" : ",",
+        "type" : "enrichment"
+                 },
+      "extractor" : "CSV",
+      "inputFormat" : "WHOLE_FILE"
+   }
+   */
+  @Multiline
+  private static String wholeFileExtractorConfig;
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    UnitTestHelper.setJavaLoggingLevel(Level.SEVERE);
+    Map.Entry<HBaseTestingUtility, Configuration> kv = HBaseUtil.INSTANCE.create(true);
+    config = kv.getValue();
+    testUtil = kv.getKey();
+    testTable = testUtil.createTable(Bytes.toBytes(tableName), Bytes.toBytes(cf));
+
+    for(Result r : testTable.getScanner(Bytes.toBytes(cf))) {
+      Delete d = new Delete(r.getRow());
+      testTable.delete(d);
+    }
+
+    if(lineByLineExtractorConfigFile.exists()) {
+      lineByLineExtractorConfigFile.delete();
+    }
+    Files.write( lineByLineExtractorConfigFile.toPath()
+               , lineByLineExtractorConfig.getBytes()
+               , StandardOpenOption.CREATE_NEW , StandardOpenOption.TRUNCATE_EXISTING
+    );
+    if(wholeFileExtractorConfigFile.exists()) {
+      wholeFileExtractorConfigFile.delete();
+    }
+    Files.write( wholeFileExtractorConfigFile.toPath()
+               , wholeFileExtractorConfig.getBytes()
+               , StandardOpenOption.CREATE_NEW , StandardOpenOption.TRUNCATE_EXISTING
+    );
+    if(file1.exists()) {
+      file1.delete();
+    }
+    Files.write( file1.toPath()
+               , "google1.com,1,foo2\n".getBytes()
+               , StandardOpenOption.CREATE_NEW , StandardOpenOption.TRUNCATE_EXISTING
+    );
+    if(file2.exists()) {
+      file2.delete();
+    }
+    Files.write( file2.toPath()
+               , "google2.com,2,foo2\n".getBytes()
+               , StandardOpenOption.CREATE_NEW , StandardOpenOption.TRUNCATE_EXISTING
+    );
+
+    if(multilineFile.exists()) {
+      multilineFile.delete();
+    }
+    if(multilineGzFile.exists()) {
+      multilineGzFile.delete();
+    }
+    if(multilineGzFile.exists()) {
+      multilineZipFile.delete();
+    }
+    PrintWriter[] pws =new PrintWriter[] {};
+    try {
+      ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(multilineZipFile));
+      ZipEntry entry = new ZipEntry("file");
+      zos.putNextEntry(entry);
+       pws = new PrintWriter[]{
+         new PrintWriter(multilineFile),
+         new PrintWriter(zos),
+         new PrintWriter(new GZIPOutputStream(new FileOutputStream(multilineGzFile)))
+                              };
+      for(int i = 0;i < NUM_LINES;++i) {
+        for(PrintWriter pw : pws) {
+          pw.println("google" + i + ".com," + i + ",foo" + i);
+        }
+      }
+    }
+    finally {
+      for(PrintWriter pw : pws) {
+        pw.close();
+      }
+    }
+
+  }
+
+  @AfterClass
+  public static void teardown() throws Exception {
+    HBaseUtil.INSTANCE.teardown(testUtil);
+    file1.delete();
+    file2.delete();
+    multilineFile.delete();
+    multilineGzFile.delete();
+    multilineZipFile.delete();
+    lineByLineExtractorConfigFile.delete();
+    wholeFileExtractorConfigFile.delete();
+  }
+
+
+  @Test
+  public void testArgs() throws Exception {
+    String[] argv = {"-c cf", "-t enrichment"
+            , "-e extractor.json", "-n enrichment_config.json"
+            , "-l log4j", "-i input.csv"
+            , "-p 2", "-b 128", "-q"
+    };
+
+    String[] otherArgs = new GenericOptionsParser(config, argv).getRemainingArgs();
+
+    CommandLine cli = LoadOptions.parse(new PosixParser(), otherArgs);
+    Assert.assertEquals(extractorJson, LoadOptions.EXTRACTOR_CONFIG.get(cli).trim());
+    Assert.assertEquals(cf, LoadOptions.HBASE_CF.get(cli).trim());
+    Assert.assertEquals(tableName, LoadOptions.HBASE_TABLE.get(cli).trim());
+    Assert.assertEquals(enrichmentJson, LoadOptions.ENRICHMENT_CONFIG.get(cli).trim());
+    Assert.assertEquals(csvFile, LoadOptions.INPUT.get(cli).trim());
+    Assert.assertEquals(log4jProperty, LoadOptions.LOG4J_PROPERTIES.get(cli).trim());
+    Assert.assertEquals("2", LoadOptions.NUM_THREADS.get(cli).trim());
+    Assert.assertEquals("128", LoadOptions.BATCH_SIZE.get(cli).trim());
+  }
+
+  @Test
+  public void testLocalLineByLine() throws Exception {
+    String[] argv = {"-c cf", "-t enrichment"
+            , "-e " + lineByLineExtractorConfigFile.getPath()
+            , "-i " + multilineFile.getPath()
+            , "-p 2", "-b 128", "-q"
+    };
+    SimpleEnrichmentFlatFileLoader.main(config, argv);
+    EnrichmentConverter converter = new EnrichmentConverter();
+    ResultScanner scanner = testTable.getScanner(Bytes.toBytes(cf));
+    List<LookupKV<EnrichmentKey, EnrichmentValue>> results = new ArrayList<>();
+    for (Result r : scanner) {
+      results.add(converter.fromResult(r, cf));
+      testTable.delete(new Delete(r.getRow()));
+    }
+    Assert.assertEquals(NUM_LINES, results.size());
+    Assert.assertTrue(results.get(0).getKey().indicator.startsWith("google"));
+    Assert.assertEquals(results.get(0).getKey().type, "enrichment");
+    Assert.assertEquals(results.get(0).getValue().getMetadata().size(), 2);
+    Assert.assertTrue(results.get(0).getValue().getMetadata().get("meta").toString().startsWith("foo"));
+    Assert.assertTrue(results.get(0).getValue().getMetadata().get("host").toString().startsWith("google"));
+
+  }
+
+  @Test
+  public void testLocalLineByLine_gz() throws Exception {
+    String[] argv = {"-c cf", "-t enrichment"
+            , "-e " + lineByLineExtractorConfigFile.getPath()
+            , "-i " + multilineGzFile.getPath()
+            , "-p 2", "-b 128", "-q"
+    };
+    SimpleEnrichmentFlatFileLoader.main(config, argv);
+    EnrichmentConverter converter = new EnrichmentConverter();
+    ResultScanner scanner = testTable.getScanner(Bytes.toBytes(cf));
+    List<LookupKV<EnrichmentKey, EnrichmentValue>> results = new ArrayList<>();
+    for (Result r : scanner) {
+      results.add(converter.fromResult(r, cf));
+      testTable.delete(new Delete(r.getRow()));
+    }
+    Assert.assertEquals(NUM_LINES, results.size());
+    Assert.assertTrue(results.get(0).getKey().indicator.startsWith("google"));
+    Assert.assertEquals(results.get(0).getKey().type, "enrichment");
+    Assert.assertEquals(results.get(0).getValue().getMetadata().size(), 2);
+    Assert.assertTrue(results.get(0).getValue().getMetadata().get("meta").toString().startsWith("foo"));
+    Assert.assertTrue(results.get(0).getValue().getMetadata().get("host").toString().startsWith("google"));
+
+  }
+
+  @Test
+  public void testLocalLineByLine_zip() throws Exception {
+    String[] argv = {"-c cf", "-t enrichment"
+            , "-e " + lineByLineExtractorConfigFile.getPath()
+            , "-i " + multilineZipFile.getPath()
+            , "-p 2", "-b 128", "-q"
+    };
+    SimpleEnrichmentFlatFileLoader.main(config, argv);
+    EnrichmentConverter converter = new EnrichmentConverter();
+    ResultScanner scanner = testTable.getScanner(Bytes.toBytes(cf));
+    List<LookupKV<EnrichmentKey, EnrichmentValue>> results = new ArrayList<>();
+    for (Result r : scanner) {
+      results.add(converter.fromResult(r, cf));
+      testTable.delete(new Delete(r.getRow()));
+    }
+    Assert.assertEquals(NUM_LINES, results.size());
+    Assert.assertTrue(results.get(0).getKey().indicator.startsWith("google"));
+    Assert.assertEquals(results.get(0).getKey().type, "enrichment");
+    Assert.assertEquals(results.get(0).getValue().getMetadata().size(), 2);
+    Assert.assertTrue(results.get(0).getValue().getMetadata().get("meta").toString().startsWith("foo"));
+    Assert.assertTrue(results.get(0).getValue().getMetadata().get("host").toString().startsWith("google"));
+
+  }
+
+  @Test
+  public void testLocalWholeFile() throws Exception {
+    String[] argv = { "-c cf", "-t enrichment"
+            , "-e " + wholeFileExtractorConfigFile.getPath()
+            , "-i " + file1.getPath() + "," + file2.getPath()
+            , "-p 2", "-b 128", "-q"
+    };
+    SimpleEnrichmentFlatFileLoader.main(config, argv);
+    EnrichmentConverter converter = new EnrichmentConverter();
+    ResultScanner scanner = testTable.getScanner(Bytes.toBytes(cf));
+    List<LookupKV<EnrichmentKey, EnrichmentValue>> results = new ArrayList<>();
+    for(Result r : scanner) {
+      results.add(converter.fromResult(r, cf));
+      testTable.delete(new Delete(r.getRow()));
+    }
+    Assert.assertEquals(2, results.size());
+    Assert.assertTrue(results.get(0).getKey().indicator.startsWith("google"));
+    Assert.assertEquals(results.get(0).getKey().type, "enrichment");
+    Assert.assertEquals(results.get(0).getValue().getMetadata().size(), 2);
+    Assert.assertTrue(results.get(0).getValue().getMetadata().get("meta").toString().startsWith("foo"));
+    Assert.assertTrue(results.get(0).getValue().getMetadata().get("host").toString().startsWith( "google"));
+
+  }
+
+  @Test
+  public void testMRLineByLine() throws Exception {
+    String[] argv = {"-c cf", "-t enrichment"
+            , "-e " + lineByLineExtractorConfigFile.getPath()
+            , "-i " + multilineFile.getName()
+            , "-m MR"
+            , "-p 2", "-b 128", "-q"
+    };
+    FileSystem fs = FileSystem.get(config);
+    HBaseUtil.INSTANCE.writeFile(new String(Files.readAllBytes(multilineFile.toPath())), new Path(multilineFile.getName()), fs);
+    SimpleEnrichmentFlatFileLoader.main(config, argv);
+    EnrichmentConverter converter = new EnrichmentConverter();
+    ResultScanner scanner = testTable.getScanner(Bytes.toBytes(cf));
+    List<LookupKV<EnrichmentKey, EnrichmentValue>> results = new ArrayList<>();
+    for (Result r : scanner) {
+      results.add(converter.fromResult(r, cf));
+      testTable.delete(new Delete(r.getRow()));
+    }
+    Assert.assertEquals(NUM_LINES, results.size());
+    Assert.assertTrue(results.get(0).getKey().indicator.startsWith("google"));
+    Assert.assertEquals(results.get(0).getKey().type, "enrichment");
+    Assert.assertEquals(results.get(0).getValue().getMetadata().size(), 2);
+    Assert.assertTrue(results.get(0).getValue().getMetadata().get("meta").toString().startsWith("foo"));
+    Assert.assertTrue(results.get(0).getValue().getMetadata().get("host").toString().startsWith("google"));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoaderTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoaderTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoaderTest.java
deleted file mode 100644
index 4ffb91a..0000000
--- a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/flatfile/SimpleEnrichmentFlatFileLoaderTest.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.metron.dataloads.nonbulk.flatfile;
-
-import com.google.common.collect.ImmutableList;
-import org.adrianwalker.multilinestring.Multiline;
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.PosixParser;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.HTableInterface;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.util.GenericOptionsParser;
-import org.apache.log4j.PropertyConfigurator;
-import org.apache.metron.dataloads.bulk.ThreatIntelBulkLoader;
-import org.apache.metron.dataloads.extractor.Extractor;
-import org.apache.metron.dataloads.extractor.ExtractorHandler;
-import org.apache.metron.dataloads.extractor.inputformat.WholeFileFormat;
-import org.apache.metron.dataloads.nonbulk.flatfile.SimpleEnrichmentFlatFileLoader;
-import org.apache.metron.dataloads.hbase.mr.HBaseUtil;
-import org.apache.metron.enrichment.converter.HbaseConverter;
-import org.apache.metron.enrichment.converter.EnrichmentConverter;
-import org.apache.metron.enrichment.converter.EnrichmentKey;
-import org.apache.metron.enrichment.converter.EnrichmentValue;
-import org.apache.metron.enrichment.lookup.LookupKV;
-import org.apache.metron.common.utils.JSONUtils;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.stream.Stream;
-
-public class SimpleEnrichmentFlatFileLoaderTest {
-
-  private HBaseTestingUtility testUtil;
-
-  /** The test table. */
-  private HTable testTable;
-  private String tableName = "enrichment";
-  private String cf = "cf";
-  private String csvFile="input.csv";
-  private String extractorJson = "extractor.json";
-  private String enrichmentJson = "enrichment_config.json";
-  private String log4jProperty = "log4j";
-
-  Configuration config = null;
-  /**
-   {
-      "config" : {
-        "columns" : {
-          "host" : 0,
-          "meta" : 2
-                    },
-        "indicator_column" : "host",
-        "separator" : ",",
-        "type" : "enrichment"
-                 },
-      "extractor" : "CSV"
-   }
-   */
-  @Multiline
-  private static String extractorConfig;
-
-  @Before
-  public void setup() throws Exception {
-    Map.Entry<HBaseTestingUtility, Configuration> kv = HBaseUtil.INSTANCE.create(true);
-    config = kv.getValue();
-    testUtil = kv.getKey();
-    testTable = testUtil.createTable(Bytes.toBytes(tableName), Bytes.toBytes(cf));
-  }
-
-  @After
-  public void teardown() throws Exception {
-    HBaseUtil.INSTANCE.teardown(testUtil);
-  }
-
-  @Test
-  public void testCommandLine() throws Exception {
-    Configuration conf = HBaseConfiguration.create();
-
-    String[] argv = { "-c cf", "-t enrichment"
-            , "-e extractor.json", "-n enrichment_config.json"
-            , "-l log4j", "-i input.csv"
-            , "-p 2", "-b 128"
-    };
-    String[] otherArgs = new GenericOptionsParser(conf, argv).getRemainingArgs();
-
-    CommandLine cli = SimpleEnrichmentFlatFileLoader.LoadOptions.parse(new PosixParser(), otherArgs);
-    Assert.assertEquals(extractorJson,SimpleEnrichmentFlatFileLoader.LoadOptions.EXTRACTOR_CONFIG.get(cli).trim());
-    Assert.assertEquals(cf, SimpleEnrichmentFlatFileLoader.LoadOptions.HBASE_CF.get(cli).trim());
-    Assert.assertEquals(tableName,SimpleEnrichmentFlatFileLoader.LoadOptions.HBASE_TABLE.get(cli).trim());
-    Assert.assertEquals(enrichmentJson,SimpleEnrichmentFlatFileLoader.LoadOptions.ENRICHMENT_CONFIG.get(cli).trim());
-    Assert.assertEquals(csvFile,SimpleEnrichmentFlatFileLoader.LoadOptions.INPUT.get(cli).trim());
-    Assert.assertEquals(log4jProperty, SimpleEnrichmentFlatFileLoader.LoadOptions.LOG4J_PROPERTIES.get(cli).trim());
-    Assert.assertEquals("2", SimpleEnrichmentFlatFileLoader.LoadOptions.NUM_THREADS.get(cli).trim());
-    Assert.assertEquals("128", SimpleEnrichmentFlatFileLoader.LoadOptions.BATCH_SIZE.get(cli).trim());
-  }
-
-  @Test
-  public void test() throws Exception {
-
-    Assert.assertNotNull(testTable);
-    String contents = "google.com,1,foo";
-
-    EnrichmentConverter converter = new EnrichmentConverter();
-    ExtractorHandler handler = ExtractorHandler.load(extractorConfig);
-    Extractor e = handler.getExtractor();
-    SimpleEnrichmentFlatFileLoader loader = new SimpleEnrichmentFlatFileLoader();
-    Stream<String> contentStreams = ImmutableList.of(contents).stream();
-    ThreadLocal<ExtractorState> state = new ThreadLocal<ExtractorState>() {
-      @Override
-      protected ExtractorState initialValue() {
-        return new ExtractorState(testTable, e, converter);
-      }
-    };
-    loader.load(ImmutableList.of(contentStreams)
-               , state
-               , cf
-               , 2
-               );
-
-    ResultScanner scanner = testTable.getScanner(Bytes.toBytes(cf));
-    List<LookupKV<EnrichmentKey, EnrichmentValue>> results = new ArrayList<>();
-    for(Result r : scanner) {
-      results.add(converter.fromResult(r, cf));
-    }
-    Assert.assertEquals(1, results.size());
-    Assert.assertEquals(results.get(0).getKey().indicator, "google.com");
-    Assert.assertEquals(results.get(0).getKey().type, "enrichment");
-    Assert.assertEquals(results.get(0).getValue().getMetadata().size(), 2);
-    Assert.assertEquals(results.get(0).getValue().getMetadata().get("meta"), "foo");
-    Assert.assertEquals(results.get(0).getValue().getMetadata().get("host"), "google.com");
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiIntegrationTest.java b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiIntegrationTest.java
index 1cb58d8..0223514 100644
--- a/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiIntegrationTest.java
+++ b/metron-platform/metron-data-management/src/test/java/org/apache/metron/dataloads/nonbulk/taxii/TaxiiIntegrationTest.java
@@ -33,10 +33,7 @@ import org.apache.metron.enrichment.converter.EnrichmentKey;
 import org.apache.metron.enrichment.converter.EnrichmentValue;
 import org.apache.metron.test.mock.MockHTable;
 import org.apache.metron.enrichment.lookup.LookupKV;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.*;
 
 import java.io.IOException;
 import java.util.HashSet;
@@ -44,13 +41,13 @@ import java.util.Set;
 
 public class TaxiiIntegrationTest {
 
-    @Before
-    public void setup() throws IOException {
+    @BeforeClass
+    public static void setup() throws IOException {
         MockTaxiiService.start(8282);
     }
 
-    @After
-    public void teardown() {
+    @AfterClass
+    public static void teardown() {
         MockTaxiiService.shutdown();
         MockHTable.Provider.clear();
     }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/1be4fcb0/metron-platform/metron-indexing/src/test/java/org/apache/metron/indexing/integration/IndexingIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-indexing/src/test/java/org/apache/metron/indexing/integration/IndexingIntegrationTest.java b/metron-platform/metron-indexing/src/test/java/org/apache/metron/indexing/integration/IndexingIntegrationTest.java
index a93c442..ae04e43 100644
--- a/metron-platform/metron-indexing/src/test/java/org/apache/metron/indexing/integration/IndexingIntegrationTest.java
+++ b/metron-platform/metron-indexing/src/test/java/org/apache/metron/indexing/integration/IndexingIntegrationTest.java
@@ -175,8 +175,8 @@ public abstract class IndexingIntegrationTest extends BaseIntegrationTest {
             .withComponent("config", configUploadComponent)
             .withComponent("storm", fluxComponent)
             .withComponent("search", getSearchComponent(topologyProperties))
-            .withMillisecondsBetweenAttempts(15000)
-            .withNumRetries(10)
+            .withMillisecondsBetweenAttempts(1500)
+            .withNumRetries(100)
             .withMaxTimeMS(150000)
             .withCustomShutdownOrder(new String[] {"search","storm","config","kafka","zk"})
             .build();



[17/17] incubator-metron git commit: METRON-660 Working generator script and site specification, with re-write script to conform Github-MD source files to doxia-markdown. Also misc fixes to markdown files. This closes apache/incubator-metron#429

Posted by ce...@apache.org.
METRON-660 Working generator script and site specification, with re-write script to conform Github-MD source files to doxia-markdown. Also misc fixes to markdown files. This closes apache/incubator-metron#429


Project: http://git-wip-us.apache.org/repos/asf/incubator-metron/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-metron/commit/e4d54a27
Tree: http://git-wip-us.apache.org/repos/asf/incubator-metron/tree/e4d54a27
Diff: http://git-wip-us.apache.org/repos/asf/incubator-metron/diff/e4d54a27

Branch: refs/heads/Metron_0.3.1
Commit: e4d54a2784a571ec84d5eb31f801f788b96bec7e
Parents: 2531b72
Author: mattf-horton <mf...@hortonworks.com>
Authored: Thu Jan 19 00:19:00 2017 -0800
Committer: cstella <ce...@gmail.com>
Committed: Mon Feb 6 15:15:13 2017 -0500

----------------------------------------------------------------------
 README.md                                       |   1 -
 metron-analytics/README.md                      |   8 +
 .../metron-profiler-client/README.md            |   2 +-
 metron-deployment/README.md                     |   4 +-
 metron-deployment/packer-build/README.md        |   8 +-
 metron-deployment/roles/README.md               |   7 +
 metron-deployment/vagrant/README.md             |   6 +
 .../metron-data-management/README.md            |  20 +-
 metron-platform/metron-parsers/README.md        |   4 +-
 metron-platform/metron-pcap-backend/README.md   |   5 +-
 metron-sensors/README.md                        |   5 +
 site-book/.gitignore                            |   3 +-
 site-book/bin/fix-md-dialect.py                 | 422 +++++++++++++++++++
 site-book/bin/generate-md.sh                    | 277 ++++++++++++
 site-book/src/site/images/metron-logo.png       | Bin 21186 -> 0 bytes
 .../image-archive/ApacheIncubating_Logo.png     | Bin 0 -> 11294 bytes
 .../resources/image-archive/metron-logo.png     | Bin 0 -> 21186 bytes
 site-book/src/site/site.xml                     |  74 +++-
 site-book/src/test/resources/test-fix-md.txt    |  34 ++
 .../src/test/resources/test-fix-md.txt.result   |  34 ++
 .../src/test/resources/test-fix-md.txt.stat     |  34 ++
 21 files changed, 922 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
index 92e1060..419e9dd 100644
--- a/README.md
+++ b/README.md
@@ -23,7 +23,6 @@ the context and situational awareness, as well as the who and where
 information critical for investigation
 
 3. **Efficient information storage** based on how the information will be used:
-
    - Logs and telemetry are stored such that they can be efficiently mined and
 analyzed for concise security visibility
    - The ability to extract and reconstruct full packets helps an analyst answer 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/metron-analytics/README.md
----------------------------------------------------------------------
diff --git a/metron-analytics/README.md b/metron-analytics/README.md
new file mode 100644
index 0000000..d639ca3
--- /dev/null
+++ b/metron-analytics/README.md
@@ -0,0 +1,8 @@
+# Metron Analytics
+
+Metron analytics consists of:
+
+- Model-as-a-Service (MAAS) access to Machine Learning services
+- Profiler and Profiler Client
+- Statistics
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/metron-analytics/metron-profiler-client/README.md
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-profiler-client/README.md b/metron-analytics/metron-profiler-client/README.md
index 60779c8..4609164 100644
--- a/metron-analytics/metron-profiler-client/README.md
+++ b/metron-analytics/metron-profiler-client/README.md
@@ -1,6 +1,6 @@
 # Metron Profiler Client
 
-This project provides a client API for accessing the profiles generated by the [Metron Profiler](..//metron-profiler).  This includes both a Java API and Stellar API for accessing the profile data.  The primary use case is to extract profile data for use during model scoring.
+This project provides a client API for accessing the profiles generated by the [Metron Profiler](../metron-profiler).  This includes both a Java API and Stellar API for accessing the profile data.  The primary use case is to extract profile data for use during model scoring.
 
 ## Stellar Client API
 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/metron-deployment/README.md
----------------------------------------------------------------------
diff --git a/metron-deployment/README.md b/metron-deployment/README.md
index eb93df4..46a5f8d 100644
--- a/metron-deployment/README.md
+++ b/metron-deployment/README.md
@@ -106,7 +106,7 @@ This will set up
 
 ### Prerequisites
 - A cluster managed by Ambari 2.4
-- Metron RPMs available on the cluster in the /localrepo directory.  See [RPM](#RPM) for further information.
+- Metron RPMs available on the cluster in the /localrepo directory.  See [RPM](#rpm) for further information.
 
 ### Building Management Pack
 From `metron-deployment` run
@@ -129,7 +129,7 @@ The Indexing / Parsers/ Enrichment masters should be colocated with a Kafka Brok
 This colocation is currently not enforced by Ambari, and should be managed by either a Service or Stack advisor as an enhancement.
 
 Several configuration parameters will need to be filled in, and should be pretty self explanatory (primarily a couple of Elasticsearch configs, and the Storm REST URL).  Examples are provided in the descriptions on Ambari.
-Notably, the URL for the GeoIP database that is preloaded (and is prefilled by default) can be set to use a `file://` location
+Notably, the URL for the GeoIP database that is preloaded (and is prefilled by default) can be set to use a `file:///` location
 
 After installation, a custom action is available in Ambari (where stop / start services are) to install Elasticsearch templates.  Similar to this, a custom Kibana action to Load Template is available.
 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/metron-deployment/packer-build/README.md
----------------------------------------------------------------------
diff --git a/metron-deployment/packer-build/README.md b/metron-deployment/packer-build/README.md
index ee49477..4c75287 100644
--- a/metron-deployment/packer-build/README.md
+++ b/metron-deployment/packer-build/README.md
@@ -25,9 +25,13 @@ Build Single Images
 ---------------------- 
  Navigate to *your-project-directory*/metron-deployment/packer-build
  * HDP Centos 
- ```bin/bento build hdp-centos-6.7.json```
+ ```
+bin/bento build hdp-centos-6.7.json
+```
  * Full Metron
- ```bin/bento build metron-centos-6.7.json```
+ ```
+bin/bento build metron-centos-6.7.json
+```
 
 Using Your New Box File
 ---------------------- 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/metron-deployment/roles/README.md
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/README.md b/metron-deployment/roles/README.md
new file mode 100644
index 0000000..32c4fbc
--- /dev/null
+++ b/metron-deployment/roles/README.md
@@ -0,0 +1,7 @@
+# Ansible Roles
+
+- Kibana
+- Monit
+- OpenTaxii
+- Pcap Relay
+- Sensor Test Mode

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/metron-deployment/vagrant/README.md
----------------------------------------------------------------------
diff --git a/metron-deployment/vagrant/README.md b/metron-deployment/vagrant/README.md
new file mode 100644
index 0000000..b629a1f
--- /dev/null
+++ b/metron-deployment/vagrant/README.md
@@ -0,0 +1,6 @@
+# Vagrant Deployment
+
+- Codelab Platform
+- Fast CAPA Test Platform
+- Full Dev Platform
+- Quick Dev Platform

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/metron-platform/metron-data-management/README.md
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/README.md b/metron-platform/metron-data-management/README.md
index eaafda4..60c0283 100644
--- a/metron-platform/metron-data-management/README.md
+++ b/metron-platform/metron-data-management/README.md
@@ -1,4 +1,4 @@
-# metron-data-management
+# Resource Data Management
 
 This project is a collection of classes to assist with loading of
 various enrichment and threat intelligence sources into Metron.
@@ -64,7 +64,7 @@ schema if necessary for the data if it is not fixed (as in STIX, e.g.).
 Consider the following example configuration file which
 describes how to process a CSV file.
 
-````
+```
 {
   "config" : {
     "columns" : {
@@ -77,7 +77,7 @@ describes how to process a CSV file.
   }
   ,"extractor" : "CSV"
 }
-````
+```
 
 In this example, we have instructed the extractor of the schema (i.e. the columns field), 
 two columns at the first and third position.  We have indicated that the `ip` column is the indicator type
@@ -113,14 +113,14 @@ NOTE: The enrichment type will be used as the type above.
 
 Consider the following configuration for an Extractor
 
-````
+```
 {
   "config" : {
     "stix_address_categories" : "IPV_4_ADDR"
   }
   ,"extractor" : "STIX"
 }
-````
+```
 
 In here, we're configuring the STIX extractor to load from a series of STIX files, however we only want to bring in IPv4
 addresses from the set of all possible addresses.  Note that if no categories are specified for import, all are assumed.
@@ -136,7 +136,7 @@ documents flowing through the enrichment topology.
 
 Consider the following Enrichment Configuration JSON.  This one is for a threat intelligence type:
 
-````
+```
 {
   "zkQuorum" : "localhost:2181"
  ,"sensorToFieldList" : {
@@ -149,7 +149,7 @@ Consider the following Enrichment Configuration JSON.  This one is for a threat
            }
                         }
 }
-````
+```
 
 We have to specify the following:
 * The zookeeper quorum which holds the cluster configuration
@@ -174,7 +174,7 @@ It is quite common for this Taxii server to be an aggregation server such as Sol
 In addition to the Enrichment and Extractor configs described above, this loader requires a configuration file describing the connection information
 to the Taxii server.  An illustrative example of such a configuration file is:
 
-````
+```
 {
    "endpoint" : "http://localhost:8282/taxii-discovery-service"
   ,"type" : "DISCOVER"
@@ -183,7 +183,7 @@ to the Taxii server.  An illustrative example of such a configuration file is:
   ,"columnFamily" : "cf"
   ,"allowedIndicatorTypes" : [ "domainname:FQDN", "address:IPV_4_ADDR" ]
 }
-````
+```
 
 As you can see, we are specifying the following information:
 * endpoint : The URL of the endpoint
@@ -249,6 +249,6 @@ The parameters for the utility are as follows:
 |------------|---------------------|--------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
 | -h         |                     | No           | Generate the help screen/set of options                                                                                                                                              |
 | -g         | --geo_url           | No           | GeoIP URL - defaults to http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz
-| -r         | --remote_dir        | No           | HDFS directory to land formatted GeoIP file - defaults to /apps/metron/geo/<epoch millis>/
+| -r         | --remote_dir        | No           | HDFS directory to land formatted GeoIP file - defaults to /apps/metron/geo/\<epoch millis\>/
 | -t         | --tmp_dir           | No           | Directory for landing the temporary GeoIP data - defaults to /tmp
 | -z         | --zk_quorum         | Yes          | Zookeeper Quorum URL (zk1:port,zk2:port,...)

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/metron-platform/metron-parsers/README.md
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/README.md b/metron-platform/metron-parsers/README.md
index cb51274..3c4310d 100644
--- a/metron-platform/metron-parsers/README.md
+++ b/metron-platform/metron-parsers/README.md
@@ -295,7 +295,7 @@ usage: start_parser_topology.sh
                                                 (zk1:2181,zk2:2181,...
 ```
 
-# The `--extra_kafka_spout_config` Option
+## The `--extra_kafka_spout_config` Option
 These options are intended to configure the Storm Kafka Spout more completely.  These options can be
 specified in a JSON file containing a map associating the kafka spout configuration parameter to a value.
 The range of values possible to configure are:
@@ -322,7 +322,7 @@ For instance, creating a JSON file which will set the `bufferSizeBytes` to 2MB a
 
 This would be loaded by passing the file as argument to `--extra_kafka_spout_config`
 
-# The `--extra_topology_options` Option
+## The `--extra_topology_options` Option
 
 These options are intended to be Storm configuration options and will live in
 a JSON file which will be loaded into the Storm config.  For instance, if you wanted to set a storm property on

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/metron-platform/metron-pcap-backend/README.md
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap-backend/README.md b/metron-platform/metron-pcap-backend/README.md
index 927ae40..e1a2683 100644
--- a/metron-platform/metron-pcap-backend/README.md
+++ b/metron-platform/metron-pcap-backend/README.md
@@ -80,7 +80,10 @@ This tool exposes the two methods for filtering PCAP data via a command line too
 - fixed
 - query (Metron Stellar)
 
-The tool is executed via ```${metron_home}/bin/pcap_query.sh [fixed|query]```
+The tool is executed via 
+```
+${metron_home}/bin/pcap_query.sh [fixed|query]
+```
 
 #### Usage
 ```

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/metron-sensors/README.md
----------------------------------------------------------------------
diff --git a/metron-sensors/README.md b/metron-sensors/README.md
new file mode 100644
index 0000000..af932e5
--- /dev/null
+++ b/metron-sensors/README.md
@@ -0,0 +1,5 @@
+# Metron Sensors
+
+- Fast CAPA
+- Py CAPA
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/site-book/.gitignore
----------------------------------------------------------------------
diff --git a/site-book/.gitignore b/site-book/.gitignore
index de3a505..90e4c61 100644
--- a/site-book/.gitignore
+++ b/site-book/.gitignore
@@ -1,4 +1,5 @@
-site/markdown/
+src/site/markdown/
+src/site/resources/images/
 *~
 target/
 *dependency-reduced-pom.xml

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/site-book/bin/fix-md-dialect.py
----------------------------------------------------------------------
diff --git a/site-book/bin/fix-md-dialect.py b/site-book/bin/fix-md-dialect.py
new file mode 100755
index 0000000..23ce42a
--- /dev/null
+++ b/site-book/bin/fix-md-dialect.py
@@ -0,0 +1,422 @@
+#########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#########################################################################
+
+
+## Markdown has these types of paragraph: heading, text, list item (bullet or numbered),
+## codeblock, table, and block quote.
+##
+## This script fixes up differences in Markdown dialect, between Github-MD and doxia-markdown.
+## Specifically, it fixes these problems:
+##     1. In Github-MD, bullets and codeblock starts are self-delimiting.  In doxia-markdown, they
+## must be separated from preceding text or (in the case of codeblocks) bullets, by a blank line.
+## Failure to do so causes the bullet or codeblock delimiter to be interpreted as ordinary text,
+## and the content gets munched into the preceding paragraph.  The codeblock delimiter (```) as text
+## gets interpreted as a codephrase delimiter (`) plus a preceding or following empty codephrase (``).
+##     2. Github-MD is liberal in regard to what an 'indent' is, allowing 1, 2, 4, or 8 blanks, or
+## a tab.  We mostly use 2 blanks.  Doxia-markdown requires strictly 4 spaces or a tab.  Failure
+## to adhere to this requirement causes indents to be ignored or misinterpreted, leading again to
+## paragraph munching and delimiter ignoring.
+##     3. In Doxia-markdown, if you indent below a header or text paragraph, it is interpreted as
+## an implicit codeblock start.  In Github-MD, we only start codeblocks with the explicit
+## codeblock delimiter (```) and sometimes indent below text just for visual emphasis, so the
+## doxia-markdown interpretation is unwelcome.  Thus, in our rewrite, we disallow indenting below
+## text or headers.  This may make the text less pretty than the Github-MD presentation, but it
+## avoids the incorrect codeblocking.
+##     4. In Doxia-markdown, the indent of the end-codeblock delimiter must match that of the
+## begin-codeblock delimiter, or it won't be recognized and the codeblock will run on.
+##     5. Relative links need to be re-written.  '.md' files need to be changed to '.html', and
+## as best we can we will re-write named anchors referring to tags autogenerated from headers.
+## The problem with generated tags is that Github-MD forces header text to lower-case, and
+## replaces blank spaces with hyphens, while doxia-markdown leaves case unchanged, and replaces
+## blanks with underscores.  Fortunately we seem to have a culture of using link references that
+## are typographically the same as the header text, so we have some basis for fixing most links.
+##     6. H1 headers don't get named anchors generated, unlike H2 and lower headers. Don't know
+## why doxia-markdown has this deficiency, perhaps it assumes H1 will only be used once at the
+## beginning of the doc.  We will insert an explicit anchor just before the H1 headers, to fix.
+##
+## So far, we're ignoring tables and block quotes.
+##
+## This script also manages the re-writing of named files to *.tmp, then mv to replace the original file.
+
+
+import sys
+import os
+import inspect
+import re
+
+# These are the characters excluded by Markdown from use in auto-generated anchor text for Headings.
+EXCLUDED_CHARS_REGEX = r'[()[\]`"' + r"'" + r']'   # May add chars in future as needed
+
+def report_error(s) :
+    print >>sys.stderr, "ERROR: " + s 
+    print >>sys.stderr, "on line: " + str(FNR) + " in file: " + FILENAME  
+    print >>sys.stderr, inputline
+    exit -1
+
+
+def trace(msg) :
+    if TRACE :
+        print >>sys.stderr, "TRACE: " + inspect.currentframe().f_back.f_code.co_name + " : InputLine " + str(FNR) + " : " + msg
+
+class INDENT_STACK :
+    'This class maintains the indent stack during doc parsing.'
+
+    def __init__(self) :
+        self.my_stack = [ {'physical' : 0, 'logical' : 0, 'type' : 'none' } ]
+
+    def init_indent(self) :
+        del self.my_stack
+        self.my_stack = [ {'physical' : 0, 'logical' : 0, 'type' : 'none' } ]
+
+    def push_indent(self, n, new_type) :
+        #Increment the logical depth only if under a bullet type. This fixes problem #3.
+        level = self.logical_indent_level() + (self.current_type() == "bullet")  # plus 1 if true
+        self.my_stack.append( {'physical':n, 'logical':level, 'type':new_type} )
+
+    def set_current_type(self, new_type) :
+        # adjust topmost type
+        self.my_stack[-1]['type'] = new_type
+
+    def pop_indent(self) :
+        if len(self.my_stack) > 1 :
+            return self.my_stack.pop()['physical']
+        else :
+            return 0
+
+    def current_indent(self) :
+        # top of stack, physical
+        return self.my_stack[-1]['physical']
+
+    def logical_indent_level(self) :
+        # top of stack, logical
+        return self.my_stack[-1]['logical']
+
+    def current_type(self) :
+        # top of stack, type
+        return self.my_stack[-1]['type']
+
+    ## End class INDENT_STACK
+
+global indent_stack
+indent_stack = INDENT_STACK()  # single instance
+
+
+def convert_tabs(s) :
+    # Courtesy of Python, this does a real column-aware tab expansion.
+    # If this doesn't work, we'll need to go back to erroring on " \t", that is, spaces followed by tabs.
+    trace("orig length {0}".format(len(s)) )
+    ct = s.count("\t")
+    s = s.expandtabs(4)
+    trace("after {0} tab substitutions, end length is {1}".format(ct, len(s)) )
+    return s
+
+
+def fix_prefix_blanks(new_type) :
+    global inputline
+    # Fix up the indenting (prefix blanks) in inputline.  This fixes problem #2.
+    # Don't worry about blank lines here, they are filtered out before calling this method.
+    # Both uses and maintains the indent stack, which is why we need the new_type passed in.
+    prefix_blanks = re.search(r'^[\s]*', inputline)
+    if prefix_blanks :
+        prefix_blanks = prefix_blanks.group()
+        trace("After prefix-blanks match, prefix_blanks is |" + prefix_blanks + "| length is " + str(len(prefix_blanks)) )
+        prefix_blanks = convert_tabs(prefix_blanks)
+    else :
+        prefix_blanks = ""
+
+    trace("After convert_tabs, prefix_blanks is |" + prefix_blanks + "| length is " + str(len(prefix_blanks)) )
+
+    # prefix_blanks now contains the 'physical' indent of the current paragraph, after tab substitution.
+    # The indent of this paragraph may be > or == to the previous paragraph.  Those are the easy cases.
+    # If the indent is less than previous, is it equal to the indent of the next lower indented object?
+    # Or of a lower yet object?  Or is it intermediate between two lower objects currently in the stack?
+    # The latter case is an anomoly, but there's no enforcement in Github-MD.
+    # The following logic is an empirical reverse engineering, that seems adequate so far.
+    # It basically says, find a prior level of indent that this is not less than, and then pretend that
+    # the objects between it and this object weren't there.
+
+    trace("current logical_indent_level is {0} and current_indent is {1}".format(
+            indent_stack.logical_indent_level(), indent_stack.current_indent() ))
+    while len(prefix_blanks) < indent_stack.current_indent() :
+        indent_stack.pop_indent()
+    if len(prefix_blanks) > indent_stack.current_indent() :
+        indent_stack.push_indent(len(prefix_blanks), new_type)
+    else :  # len(prefix_blanks) == indent_stack.current_indent()
+        indent_stack.set_current_type(new_type)
+
+    trace(("After evaluating this line's prefix-blanks and prev_type, new logical_indent_level() is {0} " +
+           "and current_indent is {1}").format(indent_stack.logical_indent_level(), indent_stack.current_indent() ))
+
+    # Now whack off the prefix blanks, and replace with a standardized string of blanks appropriate to
+    # the logical indent level.
+    trace("Orig line is " + inputline)
+    inputline = re.sub(r'^[\s]*', BLANKS[0 : 4*indent_stack.logical_indent_level()], inputline, 1)
+    trace("New line is  " + inputline)
+
+
+def rewrite_relative_links() :
+    global inputline
+    trace("entering with line: " + inputline)
+    # Fix up the relative links in inputline.  This fixes problem #5.
+    num_links = inputline.count("](")
+    links = re.findall(r'\[[^\]]+\]\([^)]+\)', inputline)
+    num_whole_links = len(links)
+    trace("num_links = {0}, num_whole_links = {1}".format(num_links, num_whole_links))
+    if (num_links != num_whole_links) :
+        if re.search(r'\[[^\][!]*\![\s]*\[', inputline) :
+            # Nested link label expressions, with '!'.
+            # Special case where a link value is inlined into the link label,
+            # as in the first line of the base README.md file.  Bail on such lines.
+            trace("WARNING: Found nested link label expressions.")
+            return
+        else :
+            report_error("Found link split across multiple lines.  We can't process this.")
+
+    for linkitem in links :
+        pieces = re.search(r'(\[[\s`]*)([^\]]*[^\s`\]])([\s`]*\]\([\s]*)([^\s]+)([\s]*\))', linkitem).groups()
+        trace("Link: " + linkitem)
+        trace("Pieces: " + " ".join( (pieces[0],pieces[1],pieces[2],pieces[3],pieces[4]) ))
+        labeltext = pieces[1]
+        href = pieces[3]
+        trace("Extracted labeltext is: " + labeltext)
+        trace("Extracted href is: " + href)
+        if re.search(r'^http|\?', href) :
+            # Don't rewrite absolute or parameterized URLs; neither is native to this markdown book.
+            trace("skipping absolute or parameterized URL")
+            continue
+
+        # Rewrite implicit index references to explicit, so the book will work as well
+        # with 'file:///' preview as with a real web server.
+        # We are only concerned with file path names here, so split at '#' if present.
+        num_sharps = href.count("#")
+        if (num_sharps >= 2) :
+            report_error("Multiple #'s in a single link href.")
+        elif (num_sharps == 1) :
+            # Implicit index references are directory names, which seldom have a filetype suffix.
+            # On the other hand, explicit file references must have filetype, else the browser
+            # won't know what to do with it.  So if no filetype extension, assume is a directory
+            # and add 'index.html'.  Skip if this is an intra-document link.
+            if not re.search(r'^#|\.[^/#]+#', href) :
+                if not href.count("/#") : 
+                    href = re.sub(r'#', "/#", href, 1)
+                href = re.sub(r'/#', "/index.html#", href, 1)
+
+            # Fix up '.md' references.
+            href = re.sub(r'^README\.md#', "index.html#", href)
+            href = re.sub(r'/README\.md#', "/index.html#", href)
+            href = re.sub(r'\.md#', ".html#", href)
+
+        else :  # num_sharps == 0
+            # Same logic as above, just at $ instead of #.
+            if not re.search(r'\.[^/]+$', href) :
+                if not href.endswith("/") :
+                    href = href + "/"
+                href = re.sub(r'/$', "/index.html", href)
+
+            # Fix up '.md' references.
+            href = re.sub(r'^README\.md$', "index.html", href)
+            href = re.sub(r'/README\.md$', "/index.html", href)
+            href = re.sub(r'\.md$', ".html", href)
+
+        trace("After .md fixup, href is: " + href)
+
+        # Re-write named anchors referring to generated tags.
+        sharp = href.find("#")
+        if (sharp >= 0) :
+            named_anchor = href[sharp+1 : ]
+            scratch = labeltext.lower()
+            scratch = re.sub(r' ', "-", scratch)
+            scratch = re.sub(EXCLUDED_CHARS_REGEX, "", scratch)
+            if (scratch == named_anchor) :
+                trace("Found a rewritable case")
+                scratch = labeltext
+                scratch = re.sub(r' ', "_", scratch)
+                scratch = re.sub(EXCLUDED_CHARS_REGEX, "", scratch)
+                href = re.sub("#" + named_anchor, "#" + scratch, href)
+
+        trace("After anchor rewrite, href is: " + href)
+        
+        # Now swap out the bad href for the fixed one in inputline.
+        if (href != pieces[3]) :
+            # Assemble the full link string to prevent similar substrings (to href) in different contexts being substituted.
+            scratch = pieces[0] + pieces[1] + pieces[2] + href + pieces[4]
+            trace("Fixed link text is: " + scratch)
+            trace("linkitem is still:  " + linkitem)
+            k = inputline.find(linkitem)
+            inputline = inputline[ : k] + scratch + inputline[ k + len(linkitem) : ]
+            trace("Fixed inputline is: " + inputline)
+
+
+
+################################################
+# begin state machine
+
+global inputline, active_type
+BLANKS = "                                                                                    "
+TRACE = 0
+FNR = -1
+trace("Starting trace")
+
+# Github uses relative indents, but doxia wants only and exactly multiples of 4.
+# To turn the more forgiving into more regular, we must track both logical and actual indents.
+indent_stack.init_indent()
+
+# Paragraph type can be none, text, bullet, code, or heading.
+# Note 'current_type()' used in managing the logical indent level on the indent stack,
+# and 'active_type' used in the pattern recognition state machine, are deliberately different.
+active_type = "none"
+
+# Note: order of the below 'if' clauses is critically important for the state machine.
+# Don't change the order.
+
+if len(sys.argv) <= 1 :
+    report_error("Please provide names of files to be processed, as command line arguments.")
+
+for FILENAME in sys.argv[1:] :
+    infile = open(FILENAME, 'r')
+    outfile = open(FILENAME + ".tmp", 'w')
+    FNR = 0
+    H1_COUNT = 0
+    for inputline in infile :
+        FNR += 1
+        inputline = inputline.rstrip("\n")
+
+        if '](' in inputline :
+            # Detect lines with hyperlinks in them, and re-write them if necessary and possible.
+            # This is the only fall-through block, and we put it at the very beginning.
+            rewrite_relative_links();  # in inputline
+            # Fall through for further processing.
+
+        if (active_type == "code") and ("```" not in inputline) :
+            trace("in codeblock, regular line")
+            # what happens in the codeblock, stays in the codeblock
+            # Put this case first (after link detection), so we don't have to test it in all the other cases.
+            print >>outfile, inputline
+            continue
+
+        if (active_type == "code") and ("```" in inputline) :
+            trace("in codeblock, end delimiter line")
+            # detect end of codeblock
+            # This must be the second case.
+            if re.search(r'```[\s]*[^\s]', inputline) :
+                # If there's text following the end-``` on the same line, error out and fix it in the source file.
+                report_error("Text following codeblock end delimiter (```) on same line.")
+
+            if re.search(r'```.*```', inputline) :
+                # If there are two sets of triple-ticks on the same line, that's a problem too.
+                report_error("Two sets of codeblock delimiters (```) on same line.")
+
+            active_type = "none"
+            # Force the indenting of the end-``` to match the beginning. This fixes problem #4.
+            inputline = re.sub(r'^[\s]*', BLANKS[0 : 4*indent_stack.logical_indent_level()], inputline)
+            print >>outfile, inputline
+            continue
+
+        if (active_type != "code") and ("```" in inputline) :
+            trace("start codeblock, delimiter line")
+            # detect start of codeblock
+            if re.search(r'[^\s][\s]*```', inputline) :
+                # If there's text preceding the begin-``` on the same line, error out and fix it in the source file.
+                report_error("Text preceding codeblock start delimiter (```) on same line.")
+
+            if re.search(r'```.*```', inputline) :
+                # If there are two sets of triple-ticks on the same line, that's a problem too.
+                report_error("Two sets of codeblock delimiters (```) on same line.")
+
+            if active_type == "text" or active_type == "bullet" :
+                print >>outfile, ""   # Need preceding blank line before codeblock, in doxia.
+
+            active_type = "code"
+            fix_prefix_blanks(active_type)  # in inputline
+            print >>outfile, inputline
+            continue
+
+        if re.search(r'^[\s]*$', inputline) :
+            trace("blank line")
+            # detect blank lines
+            active_type = "none"
+            print >>outfile, inputline  # Perhaps this should be print "" instead?
+            continue
+
+        if re.search(r'^[\s]*([*+-]|[\d]+\.)[\s]', inputline) :
+            trace("bullet line")
+            # detect bullet line (numbered or not)
+            if (active_type == "text") :
+                print >>outfile, ""  # Need preceding blank line between text and bullet, in doxia. This fixes problem #1.
+
+            active_type = "bullet"
+            fix_prefix_blanks(active_type);  # in inputline
+            print >>outfile, inputline
+            continue
+
+        if inputline.startswith("#") :
+            trace("header line")
+            # detects header lines, which are self-delimiting, and cannot have indenting
+            # Header line resets the indenting as well as current type
+            active_type = "none"
+            indent_stack.init_indent()
+            if re.search(r'^#[^#]', inputline) :
+                # First-level headers ("H1") need explicit anchor inserted.  This fixes problem #6.
+                anchor_name = re.sub(r' ', "_", inputline[1:].strip())
+                anchor_name = re.sub(EXCLUDED_CHARS_REGEX, "", anchor_name)
+                anchor_text = '<a name="' + anchor_name + '"></a>'
+                if H1_COUNT == 0 :
+                    # Treat the first header differently - put the header after instead of before
+                    # This is necessary to preserve document metadata titling in generated html.
+                    # However, it means the title itself gets hidden above the top of window, when the link is used.
+                    H1_COUNT = 1
+                    print >>outfile, inputline
+                    print >>outfile, anchor_text
+                    print >>outfile, ""  # Anchors aren't self-delimiting, so insert a blank line after.
+                else :
+                    print >>outfile, ""  # Anchors aren't self-delimiting, so insert a blank line first.
+                    print >>outfile, anchor_text
+                    print >>outfile, inputline
+            else :
+                # H2 or deeper level of header, doxia auto-generates anchor.
+                print >>outfile, inputline
+            continue
+
+        if re.search(r'^[\s]*#', inputline) :
+            trace("header line, bad")
+            report_error("Header specification character (#) detected with indenting.  This is presumed to be an error, since it will render as text. If intentional, put a period or other printable character before it.")
+
+        ## default action -- last case in state machine switch
+        trace("text line")
+        # Everything else is text-like, and therefore continues active_type, unless none.
+        if (active_type == "none") :
+            # Start new text paragraph.
+            active_type = "text"
+            fix_prefix_blanks(active_type);  # in inputline
+            print >>outfile, inputline
+            continue
+        else :
+            # This is just a continuation of current text or bullet.
+            # Indenting is irrelevant.
+            print >>outfile, inputline
+            continue
+
+    ## end loop on inputlines
+    if (active_type == "code") :
+        report_error("Unmatched codeblock delimiter (```) detected.")
+
+    infile.close()
+    outfile.close()
+    os.rename(FILENAME + ".tmp", FILENAME)
+
+## end loop on FILENAMEs
+trace("ending trace")

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/site-book/bin/generate-md.sh
----------------------------------------------------------------------
diff --git a/site-book/bin/generate-md.sh b/site-book/bin/generate-md.sh
new file mode 100755
index 0000000..df97c76
--- /dev/null
+++ b/site-book/bin/generate-md.sh
@@ -0,0 +1,277 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# ------------------------------------------------------------------
+#
+# This script collects the *.md files and other resources needed to generate
+# a book-like collection of end-user documentation.  
+#
+# The Metron development community has chosen to do most documentation in README.md
+# files, because they are easy to write and maintain, and located near the code they
+# document. Also they are versioned along with that code, so they are always in sync
+# with the particular version being considered.
+#
+# However, the location of the various README.md files in github are not necessarily
+# obvious to non-developers, and can therefore be difficult to find and use.
+# In order to make the files easier to use as end-user documentation, we collect them
+# into a book-like collection.  It should perhaps be viewed as a collection of essays,
+# since each README.md file is written independently.
+
+## This script assumes it is running at $METRON_SOURCE/site-book/bin/
+METRON_SOURCE=`cd $(dirname $0); cd ../..; pwd`
+
+## Maintainers set EXCLUSION_LIST to a list of egrep-style regular expressions.
+## MD files whose file path that matches any of these patterns will be excluded.
+## Please note that the file paths being matched are output of 'find', rooted at
+## $METRON_SOURCE.  'Find' will start each path with './', which is matched by '^\./'.
+## Please place each regex in single quotes, and don't forget to backslash-escape
+## literal periods and other special characters if needed.
+EXCLUSION_LIST=(
+    '/site/'
+    '/site-book/'
+    '/build_utils/'
+)
+
+## This is a list of resources (eg .png files) needed to render the markdown files.
+## Each entry is a file path, relative to $METRON_SOURCE.
+## Note: any images in site-book/src/site/resources/image-archive/ will also be included.
+RESOURCE_LIST=(
+    metron-platform/metron-parsers/parser_arch.png
+    metron-platform/metron-indexing/indexing_arch.png
+    metron-platform/metron-enrichment/enrichment_arch.png
+    metron-analytics/metron-maas-service/maas_arch.png
+)
+
+## This is a list of duples, flattened into a bash array.  Even fields are relative paths to a .md file
+## that needs an href re-written to match a resource in the images/ directory.  Odd fields are the corresponding
+## one-line sed script, in single quotes, that does the rewrite.  See below for examples.
+HREF_REWRITE_LIST=(
+    metron-platform/metron-enrichment/README.md 's#(enrichment_arch.png)#(../../images/enrichment_arch.png)#g'
+    metron-platform/metron-indexing/README.md 's#(indexing_arch.png)#(../../images/indexing_arch.png)#g'
+    metron-platform/metron-parsers/README.md 's#(parser_arch.png)#(../../images/parser_arch.png)#g'
+    metron-analytics/metron-maas-service/README.md 's#(maas_arch.png)#(../../images/maas_arch.png)#g'
+)
+
+
+######################
+######################
+# utility functions
+
+# input: none
+# output: traces, if enabled
+TRACE_ENABLE=0
+function trace () {
+    if (( $TRACE_ENABLE == 1 )) ; then
+	echo "$*"
+    fi  # else do nothing
+}
+TREE_TRACE_ENABLE=0
+function tree_trace () {
+    if (( $TREE_TRACE_ENABLE == 1 )) ; then
+	echo "$*"
+    fi  # else do nothing
+}
+
+# input: cumulative directory_path, indent_level
+# output: items to site.tmp, as lines of text
+# This function is called recursively as we descend the directory tree
+# The cum_dir_path must not have a terminal "/".
+function descend () {
+    tree_trace "enter decend( $@ )"
+    local cum_dir_path
+    local -i indent
+    local open_item_exists
+    cum_dir_path="$1"
+    indent=$2
+
+    if [ -e "${cum_dir_path}"/index.md ] ; then
+	dir_name=`basename "$cum_dir_path"`
+	dir_name="${dir_name#metron-}"  #remove the "metron-" prefix if present
+	dir_name=`get_prettyname "$dir_name"`  #capitalize the remainder
+	# Is it a leaf node?
+	num_peers=`ls -d "${cum_dir_path}"/* |wc -l`
+	if (( $num_peers == 1 )) ; then #yes, it's a leaf node, do a closed item
+	    echo "${INDENTS[$indent]}<item name='${dir_name}' href='${cum_dir_path}/index.html'/>" >> ../site.tmp
+	    tree_trace "exit descend due to leaf node"
+	    return  #nothing else to process in this directory path
+	fi  #otherwise carry on with open item and child items at deeper indent
+	echo "${INDENTS[$indent]}<item name='${dir_name}' href='${cum_dir_path}/index.html' collapse='true'>" >> ../site.tmp
+	open_item_exists=1
+	indent=$(( indent + 1 ))
+    else
+	open_item_exists=0
+    fi
+    for md in "${cum_dir_path}"/*.md ; do
+	if [ ! -e "$md" ] ; then continue ; fi  #globbing sometimes gives spurious results
+	item_name=`basename -s ".md" "$md"`  #strip the suffix
+	if [ "$item_name" != "index" ] ; then
+	    echo "${INDENTS[$indent]}<item name='${item_name}' href='${cum_dir_path}/${item_name}.html'/>" >> ../site.tmp
+	fi
+    done
+    for dir in "${cum_dir_path}"/* ; do
+	if [ ! -e "$dir" ] ; then continue ; fi  #globbing sometimes gives spurious results
+	if [ -d "$dir" ] ; then
+	    descend "$dir" $indent
+	fi
+    done
+    if (( open_item_exists == 1 )) ; then
+	indent=$(( indent - 1 ))  #close the item
+	echo "${INDENTS[$indent]}</item>" >> ../site.tmp
+    fi
+    tree_trace "exit descend with indent = $indent"
+}
+
+# input: a file basename
+# output: a "pretty" human label, on stdout for Command Substitution
+# Currently just capitalize the first letter
+# In future, might do CamelCase or subst hyphens to underscores
+function get_prettyname () {
+    echo "$(tr '[:lower:]' '[:upper:]' <<< ${1:0:1})${1:1}"
+}
+
+
+######################
+## Proceed
+
+cd "$METRON_SOURCE"
+
+# Clean up generated directories
+if [ -d "$METRON_SOURCE"/site-book/src/site/markdown ] ; then
+    rm -rf "$METRON_SOURCE"/site-book/src/site/markdown ; fi
+if [ -d "$METRON_SOURCE"/site-book/src/site/resources/images ] ; then
+    rm -rf "$METRON_SOURCE"/site-book/src/site/resources/images ; fi
+mkdir -p "$METRON_SOURCE"/site-book/src/site/markdown \
+    "$METRON_SOURCE"/site-book/src/site/resources/images
+
+# cons up the exclude exec string
+cmd=""
+for exclusion in "${EXCLUSION_LIST[@]}" ; do
+    cmd="${cmd} | egrep -v '${exclusion}'"
+done
+
+# Capture the hierarchical list of .md files.
+# Take them all, not just README.md files.
+cmd="find . -name '*.md' -print ${cmd}"
+echo " "
+echo Collecting markdown files with exclusions: $cmd
+echo " "
+MD_FILE_LIST=( `eval $cmd` )
+
+# Pipe the files into the src/site/markdown directory tree
+tar cvf - "${MD_FILE_LIST[@]}" | ( cd "$METRON_SOURCE"/site-book/src/site/markdown; tar xf -  )
+
+# Grab the other resources needed
+echo " "
+echo Collecting additional resource files:
+for r in "${RESOURCE_LIST[@]}" site-book/src/site/resources/image-archive/* ; do
+    if [ ! -e "$r" ] ; then continue ; fi  #globbing sometimes gives spurious results
+    echo ./"$r"
+    cp "$r" "$METRON_SOURCE"/site-book/src/site/resources/images/
+done
+echo " "
+
+cd site-book/src/site/markdown
+
+# Rewrite hrefs for resource references, using table provided by Maintainers
+for (( i=0; i<${#HREF_REWRITE_LIST[@]} ; i+=2 )) ; do
+    echo rewriting href in "${HREF_REWRITE_LIST[$i]}" : "${HREF_REWRITE_LIST[ $(( i + 1 )) ]}"
+    case "${OSTYPE}" in
+	linux*)
+	    # Linux sed correctly parses lack of argument after -i option
+            sed -i -e "${HREF_REWRITE_LIST[ $(( i + 1 )) ]}" "${HREF_REWRITE_LIST[$i]}"
+	    ;;
+	darwin*)
+            # Mac OS X sed needs an empty-string argument after -i option to get the same result
+            sed -i '' -e "${HREF_REWRITE_LIST[ $(( i + 1 )) ]}" "${HREF_REWRITE_LIST[$i]}"
+	    ;;
+	*)
+	    echo "ERROR: Unable to determine 'sed' argument list for OS ${OSTYPE}" > /dev/stderr
+	    exit -1
+	    ;;
+    esac
+done
+echo " "
+
+# Rename "README" files to "index" files, so they will be the default doc for a site sub-directory, just
+# like README is the default doc for a github sub-directory.  This makes some internal links (to directories)
+# work instead of being broken.
+echo Renaming \"README\" files to \"index\" files.
+if (( `ls -R |grep -c 'index.md'` > 0 )) ; then
+    echo "ERROR: index.md file exists in tree already, we currently don't handle that"
+    exit -1
+fi
+find . -name README.md -execdir mv README.md index.md \;
+echo " "
+
+# Insert the tree of generated html files in the LHS menu of the site.xml
+# The problem is that we want a depth-first listing, with files before subdirectories, and "index" always first.
+# So the following logic is a little complex, but we avoid having to hardwire the tree structure -- which we
+# may go back to in the long run.
+
+BEGIN_TAG="BEGIN_MENU_TREE"
+END_TAG="END_MENU_TREE"
+INDENTS=( "" "  " "    " "      " "        " "          " "            " )
+
+echo "Generating menu tree from directory tree structure"
+echo " "
+
+# Copy the first part of the file, up to where the menu tree goes.
+sed -n -e "1,/${BEGIN_TAG}/ p" ../site.xml > ../site.tmp
+
+# Now start inserting menu tree items
+# top level of markdown tree is special
+if [ -e index.md ] ; then
+    echo "<item name='Metron' href='index.html' title='Apache Metron - Incubating' collapse='false'>" >> ../site.tmp
+    item0_exists=1
+else
+    item0_exists=0
+fi
+indent_level=1
+for md in *.md ; do
+    if [ ! -e "$md" ] ; then continue ; fi  #globbing sometimes gives spurious results
+    if [ "$md" != "index.md" ] ; then
+	item_name="${md%.*}"  #strip the suffix
+	echo "${INDENTS[$indent_level]}<item name='${item_name}' href='${item_name}.html' />" >> ../site.tmp
+    fi
+done
+for dir in * ; do
+    if [ ! -e "$dir" ] ; then continue ; fi  #globbing sometimes gives spurious results
+    if [ -d "$dir" ] ; then
+	descend "$dir" $indent_level
+    fi
+done
+if (( item0_exists == 1 )) ; then
+    echo "</item>" >> ../site.tmp
+fi
+
+# Copy the last part of the file, from the end of the menu tree.
+sed -n -e "/${END_TAG}/,"'$ p' ../site.xml >> ../site.tmp
+
+mv ../site.xml ../site.xml.bak
+mv ../site.tmp ../site.xml
+
+echo "Done."
+echo " "
+
+echo "Fixing up markdown dialect problems between Github-MD and doxia-markdown:"
+find . -name '*.md' -print -exec python "$METRON_SOURCE"/site-book/bin/fix-md-dialect.py '{}' \;
+echo "Done."
+echo " "
+
+
+exit 0

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/site-book/src/site/images/metron-logo.png
----------------------------------------------------------------------
diff --git a/site-book/src/site/images/metron-logo.png b/site-book/src/site/images/metron-logo.png
deleted file mode 100644
index a0bc8cb..0000000
Binary files a/site-book/src/site/images/metron-logo.png and /dev/null differ

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/site-book/src/site/resources/image-archive/ApacheIncubating_Logo.png
----------------------------------------------------------------------
diff --git a/site-book/src/site/resources/image-archive/ApacheIncubating_Logo.png b/site-book/src/site/resources/image-archive/ApacheIncubating_Logo.png
new file mode 100644
index 0000000..83f096c
Binary files /dev/null and b/site-book/src/site/resources/image-archive/ApacheIncubating_Logo.png differ

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/site-book/src/site/resources/image-archive/metron-logo.png
----------------------------------------------------------------------
diff --git a/site-book/src/site/resources/image-archive/metron-logo.png b/site-book/src/site/resources/image-archive/metron-logo.png
new file mode 100644
index 0000000..a0bc8cb
Binary files /dev/null and b/site-book/src/site/resources/image-archive/metron-logo.png differ

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/site-book/src/site/site.xml
----------------------------------------------------------------------
diff --git a/site-book/src/site/site.xml b/site-book/src/site/site.xml
index 90f774a..ba96f27 100644
--- a/site-book/src/site/site.xml
+++ b/site-book/src/site/site.xml
@@ -16,7 +16,7 @@
   See the License for the specific language governing permissions and
   limitations under the License.
 -->
-<project name="Falcon" xmlns="http://maven.apache.org/DECORATION/1.3.0"
+<project name="Metron" xmlns="http://maven.apache.org/DECORATION/1.3.0"
          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/DECORATION/1.3.0 http://maven.apache.org/xsd/decoration-1.3.0.xsd">
 
@@ -29,17 +29,27 @@
     <custom>
         <fluidoSkin>
             <project>Apache Metron - Incubating</project>
-            <sideBarEnabled>false</sideBarEnabled>
+	    <topBarEnabled>false</topBarEnabled>
+            <sideBarEnabled>true</sideBarEnabled>
         </fluidoSkin>
     </custom>
 
     <bannerLeft>
         <name>Apache Metron - Incubating</name>
-        <src>./images/metron-logo.png</src>
-        <width>740px</width>
-        <height>242px</height>
+        <src>images/metron-logo.png</src>
+	<href>http://metron.incubator.apache.org/</href>
+        <width>148px</width>
+        <height>48px</height>
     </bannerLeft>
 
+    <bannerRight>
+        <name>Apache Incubating</name>
+	<src>images/ApacheIncubating_Logo.png</src>
+	<href>http://incubator.apache.org/</href>
+        <width>192px</width>
+        <height>48px</height>
+    </bannerRight>
+
     <publishDate position="right"/>
     <version position="right"/>
 
@@ -51,9 +61,61 @@
         </head>
 
         <breadcrumbs position="left">
-            <item name="Metron" title="Apache Metron - Incubating" href="index.html"/>
+            <item name="Apache" href="http://www.apache.org"/>
+            <item name="Metron-Incubating" title="Apache Metron - Incubating" href="http://metron.incubator.apache.org/"/>
+	    <item name="Documentation" title="Metron Docs" href="index.html"/>
         </breadcrumbs>
 
+	<menu name="User Documentation">
+	<!-- BEGIN_MENU_TREE - Do not remove this line, it is used for auto-insert -->
+<item name='Metron' href='index.html' title='Apache Metron - Incubating' collapse='false'>
+  <item name='Analytics' href='metron-analytics/index.html' collapse='true'>
+    <item name='Maas-service' href='metron-analytics/metron-maas-service/index.html'/>
+    <item name='Profiler' href='metron-analytics/metron-profiler/index.html'/>
+    <item name='Profiler-client' href='metron-analytics/metron-profiler-client/index.html'/>
+    <item name='Statistics' href='metron-analytics/metron-statistics/index.html' collapse='true'>
+      <item name='HLLP' href='metron-analytics/metron-statistics/HLLP.html'/>
+    </item>
+  </item>
+  <item name='Deployment' href='metron-deployment/index.html' collapse='true'>
+    <item name='Amazon-ec2' href='metron-deployment/amazon-ec2/index.html'/>
+    <item name='Ansible-docker' href='metron-deployment/packaging/docker/ansible-docker/index.html'/>
+    <item name='Rpm-docker' href='metron-deployment/packaging/docker/rpm-docker/index.html'/>
+    <item name='Packer-build' href='metron-deployment/packer-build/index.html'/>
+    <item name='Roles' href='metron-deployment/roles/index.html' collapse='true'>
+      <item name='Kibana' href='metron-deployment/roles/kibana/index.html'/>
+      <item name='Monit' href='metron-deployment/roles/monit/index.html'/>
+      <item name='Opentaxii' href='metron-deployment/roles/opentaxii/index.html'/>
+      <item name='Pcap_replay' href='metron-deployment/roles/pcap_replay/index.html'/>
+      <item name='Sensor-stubs' href='metron-deployment/roles/sensor-stubs/index.html'/>
+      <item name='Sensor-test-mode' href='metron-deployment/roles/sensor-test-mode/index.html'/>
+    </item>
+    <item name='Vagrant' href='metron-deployment/vagrant/index.html' collapse='true'>
+      <item name='Codelab-platform' href='metron-deployment/vagrant/codelab-platform/index.html'/>
+      <item name='Fastcapa-test-platform' href='metron-deployment/vagrant/fastcapa-test-platform/index.html'/>
+      <item name='Full-dev-platform' href='metron-deployment/vagrant/full-dev-platform/index.html'/>
+      <item name='Quick-dev-platform' href='metron-deployment/vagrant/quick-dev-platform/index.html'/>
+    </item>
+  </item>
+  <item name='Docker' href='metron-docker/index.html'/>
+  <item name='Platform' href='metron-platform/index.html' collapse='true'>
+    <item name='Api' href='metron-platform/metron-api/index.html'/>
+    <item name='Common' href='metron-platform/metron-common/index.html'/>
+    <item name='Data-management' href='metron-platform/metron-data-management/index.html'/>
+    <item name='Enrichment' href='metron-platform/metron-enrichment/index.html'/>
+    <item name='Indexing' href='metron-platform/metron-indexing/index.html'/>
+    <item name='Management' href='metron-platform/metron-management/index.html'/>
+    <item name='Parsers' href='metron-platform/metron-parsers/index.html'/>
+    <item name='Pcap-backend' href='metron-platform/metron-pcap-backend/index.html'/>
+  </item>
+  <item name='Sensors' href='metron-sensors/index.html' collapse='true'>
+    <item name='Fastcapa' href='metron-sensors/fastcapa/index.html'/>
+    <item name='Pycapa' href='metron-sensors/pycapa/index.html'/>
+  </item>
+</item>
+	<!-- END_MENU_TREE - Do not remove this line, it is used for auto-insert -->
+	</menu>
+
         <footer>
             © 2015-2016 The Apache Software Foundation. Apache Metron, Metron, Apache, the Apache feather logo,
             and the Apache Metron project logo are trademarks of The Apache Software Foundation.

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/site-book/src/test/resources/test-fix-md.txt
----------------------------------------------------------------------
diff --git a/site-book/src/test/resources/test-fix-md.txt b/site-book/src/test/resources/test-fix-md.txt
new file mode 100644
index 0000000..3a84a06
--- /dev/null
+++ b/site-book/src/test/resources/test-fix-md.txt
@@ -0,0 +1,34 @@
+
+The problem #5 (rewrite_relative_links) is particularly tricky, so this test file
+allows you to test it.  Run 
+`cp src/test/resources/test-fix-md.txt.stat src/test/resources/test-fix-md.txt`
+then
+`awk -f bin/fix-md-dialect.awk src/test/resources/test-fix-md.txt`
+The .txt file will be re-written in place.  You can check the results by comparing
+the "Should be..." lines with the lines above them.  Contrast with the original
+.stat file to see before re-write.
+
+Here [Don't rewrite abs urls](http://nowhere.foo:80/README.md) is case 1.
+Should still be               http://nowhere.foo:80/README.md
+
+Here [Fleeber](README.md) is case 2a.
+Should be      index.html
+
+Here [Fleeber](NO_README.md) is case 2b.
+Should be      NO_README.html
+
+Here [Fleeber](README.md#fleeber) is case 2c.
+Should be      index.html#Fleeber
+
+Here [Fleeber](NO_README.md#fleeber) is case 2b.
+Should be      NO_README.html#Fleeber
+
+Here [`Fleeber`](multi.md#fleeber) is case 3.
+Should be        multi.html#Fleeber
+
+Here [When All Is Lost](#when-all-is-lost) intra-document link is case 4.
+Should be               #When_All_Is_Lost
+
+Here is a combo: [Creative Nonsense](../metron-params/#creative-nonsense) and [`RPM`](../metron-params/README.md#rpm)
+Should be:                           ../metron-params/index.html#Creative_Nonsense) and [`RPM`](../metron-params/index.html#RPM)
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/site-book/src/test/resources/test-fix-md.txt.result
----------------------------------------------------------------------
diff --git a/site-book/src/test/resources/test-fix-md.txt.result b/site-book/src/test/resources/test-fix-md.txt.result
new file mode 100644
index 0000000..dc404bf
--- /dev/null
+++ b/site-book/src/test/resources/test-fix-md.txt.result
@@ -0,0 +1,34 @@
+
+The problem #5 (rewrite_relative_links) is particularly tricky, so this test file
+allows you to test it.  Run 
+`cp src/test/resources/test-fix-md.txt.stat src/test/resources/test-fix-md.txt`
+then
+`awk -f bin/fix-md-dialect.awk src/test/resources/test-fix-md.txt`
+The .txt file will be re-written in place.  You can check the results by comparing
+the "Should be..." lines with the lines above them.  Contrast with the original
+.stat file to see before re-write.
+
+Here [Don't rewrite abs urls](http://nowhere.foo:80/README.md) is case 1.
+Should still be               http://nowhere.foo:80/README.md
+
+Here [Fleeber](index.html) is case 2a.
+Should be      index.html
+
+Here [Fleeber](NO_README.html) is case 2b.
+Should be      NO_README.html
+
+Here [Fleeber](index.html#Fleeber) is case 2c.
+Should be      index.html#Fleeber
+
+Here [Fleeber](NO_README.html#Fleeber) is case 2b.
+Should be      NO_README.html#Fleeber
+
+Here [`Fleeber`](multi.html#Fleeber) is case 3.
+Should be        multi.html#Fleeber
+
+Here [When All Is Lost](#When_All_Is_Lost) intra-document link is case 4.
+Should be               #When_All_Is_Lost
+
+Here is a combo: [Creative Nonsense](../metron-params/index.html#Creative_Nonsense) and [`RPM`](../metron-params/index.html#RPM)
+Should be:                           ../metron-params/index.html#Creative_Nonsense) and [`RPM`](../metron-params/index.html#RPM)
+

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e4d54a27/site-book/src/test/resources/test-fix-md.txt.stat
----------------------------------------------------------------------
diff --git a/site-book/src/test/resources/test-fix-md.txt.stat b/site-book/src/test/resources/test-fix-md.txt.stat
new file mode 100644
index 0000000..3a84a06
--- /dev/null
+++ b/site-book/src/test/resources/test-fix-md.txt.stat
@@ -0,0 +1,34 @@
+
+The problem #5 (rewrite_relative_links) is particularly tricky, so this test file
+allows you to test it.  Run 
+`cp src/test/resources/test-fix-md.txt.stat src/test/resources/test-fix-md.txt`
+then
+`awk -f bin/fix-md-dialect.awk src/test/resources/test-fix-md.txt`
+The .txt file will be re-written in place.  You can check the results by comparing
+the "Should be..." lines with the lines above them.  Contrast with the original
+.stat file to see before re-write.
+
+Here [Don't rewrite abs urls](http://nowhere.foo:80/README.md) is case 1.
+Should still be               http://nowhere.foo:80/README.md
+
+Here [Fleeber](README.md) is case 2a.
+Should be      index.html
+
+Here [Fleeber](NO_README.md) is case 2b.
+Should be      NO_README.html
+
+Here [Fleeber](README.md#fleeber) is case 2c.
+Should be      index.html#Fleeber
+
+Here [Fleeber](NO_README.md#fleeber) is case 2b.
+Should be      NO_README.html#Fleeber
+
+Here [`Fleeber`](multi.md#fleeber) is case 3.
+Should be        multi.html#Fleeber
+
+Here [When All Is Lost](#when-all-is-lost) intra-document link is case 4.
+Should be               #When_All_Is_Lost
+
+Here is a combo: [Creative Nonsense](../metron-params/#creative-nonsense) and [`RPM`](../metron-params/README.md#rpm)
+Should be:                           ../metron-params/index.html#Creative_Nonsense) and [`RPM`](../metron-params/index.html#RPM)
+


[11/17] incubator-metron git commit: METRON-699: Update metron-statistics documentation closes apache/incubator-metron#440

Posted by ce...@apache.org.
METRON-699: Update metron-statistics documentation closes apache/incubator-metron#440


Project: http://git-wip-us.apache.org/repos/asf/incubator-metron/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-metron/commit/a11e85c5
Tree: http://git-wip-us.apache.org/repos/asf/incubator-metron/tree/a11e85c5
Diff: http://git-wip-us.apache.org/repos/asf/incubator-metron/diff/a11e85c5

Branch: refs/heads/Metron_0.3.1
Commit: a11e85c5582ebe4077ffb90378c8f81bb5fd6637
Parents: 57c38af
Author: JonZeolla <ze...@gmail.com>
Authored: Mon Feb 6 09:13:57 2017 -0500
Committer: cstella <ce...@gmail.com>
Committed: Mon Feb 6 09:13:57 2017 -0500

----------------------------------------------------------------------
 metron-analytics/metron-statistics/README.md | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/a11e85c5/metron-analytics/metron-statistics/README.md
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-statistics/README.md b/metron-analytics/metron-statistics/README.md
index f6ab15f..4d78839 100644
--- a/metron-analytics/metron-statistics/README.md
+++ b/metron-analytics/metron-statistics/README.md
@@ -12,27 +12,27 @@ functions can be used from everywhere where Stellar is used.
 
 ### Approximation Statistics
 
-### `HLLP_ADD`
+#### `HLLP_ADD`
   * Description: Add value to the HyperLogLogPlus estimator set. See [HLLP README](HLLP.md)
   * Input:
     * hyperLogLogPlus - the hllp estimator to add a value to
     * value+ - value to add to the set. Takes a single item or a list.
   * Returns: The HyperLogLogPlus set with a new value added
 
-### `HLLP_CARDINALITY`
+#### `HLLP_CARDINALITY`
   * Description: Returns HyperLogLogPlus-estimated cardinality for this set. See [HLLP README](HLLP.md)
   * Input:
     * hyperLogLogPlus - the hllp set
   * Returns: Long value representing the cardinality for this set
 
-### `HLLP_INIT`
+#### `HLLP_INIT`
   * Description: Initializes the HyperLogLogPlus estimator set. p must be a value between 4 and sp and sp must be less than 32 and greater than 4. See [HLLP README](HLLP.md)
   * Input:
     * p - the precision value for the normal set
     * sp - the precision value for the sparse set. If p is set, but sp is 0 or not specified, the sparse set will be disabled.
   * Returns: A new HyperLogLogPlus set
 
-### `HLLP_MERGE`
+#### `HLLP_MERGE`
   * Description: Merge hllp sets together. The resulting estimator is initialized with p and sp precision values from the first provided hllp estimator set. See [HLLP README](HLLP.md)
   * Input:
     * hllp - List of hllp estimators to merge. Takes a single hllp set or a list.


[08/17] incubator-metron git commit: METRON-680 GeoLiteDatabase incorrectly using country geoname_id instead of city (justinleet) closes apache/incubator-metron#433

Posted by ce...@apache.org.
METRON-680 GeoLiteDatabase incorrectly using country geoname_id instead of city (justinleet) closes apache/incubator-metron#433


Project: http://git-wip-us.apache.org/repos/asf/incubator-metron/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-metron/commit/ddca4d82
Tree: http://git-wip-us.apache.org/repos/asf/incubator-metron/tree/ddca4d82
Diff: http://git-wip-us.apache.org/repos/asf/incubator-metron/diff/ddca4d82

Branch: refs/heads/Metron_0.3.1
Commit: ddca4d82947aceef19ed7c2de7cd0e49475774c9
Parents: 8340c0e
Author: justinleet <ju...@gmail.com>
Authored: Fri Feb 3 11:07:34 2017 -0500
Committer: leet <le...@apache.org>
Committed: Fri Feb 3 11:07:34 2017 -0500

----------------------------------------------------------------------
 .../metron/enrichment/adapters/geo/GeoLiteDatabase.java      | 2 +-
 .../metron/enrichment/adapters/geo/GeoAdapterTest.java       | 2 +-
 .../metron/enrichment/adapters/geo/GeoLiteDatabaseTest.java  | 4 ++--
 .../enrichment/stellar/GeoEnrichmentFunctionsTest.java       | 8 ++++----
 4 files changed, 8 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/ddca4d82/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/geo/GeoLiteDatabase.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/geo/GeoLiteDatabase.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/geo/GeoLiteDatabase.java
index aa4731b..d40d980 100644
--- a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/geo/GeoLiteDatabase.java
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/adapters/geo/GeoLiteDatabase.java
@@ -141,7 +141,7 @@ public enum GeoLiteDatabase {
       Postal postal = cityResponse.getPostal();
       Location location = cityResponse.getLocation();
 
-      geoInfo.put("locID", convertNullToEmptyString(country.getGeoNameId()));
+      geoInfo.put("locID", convertNullToEmptyString(city.getGeoNameId()));
       geoInfo.put("country", convertNullToEmptyString(country.getIsoCode()));
       geoInfo.put("city", convertNullToEmptyString(city.getName()));
       geoInfo.put("postalCode", convertNullToEmptyString(postal.getCode()));

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/ddca4d82/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/geo/GeoAdapterTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/geo/GeoAdapterTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/geo/GeoAdapterTest.java
index f91939d..8d2a7ec 100644
--- a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/geo/GeoAdapterTest.java
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/geo/GeoAdapterTest.java
@@ -36,7 +36,7 @@ public class GeoAdapterTest {
 
   /**
    * {
-   * "locID":"6252001",
+   * "locID":"5803556",
    * "country":"US",
    * "city":"Milton",
    * "postalCode":"98354",

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/ddca4d82/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/geo/GeoLiteDatabaseTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/geo/GeoLiteDatabaseTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/geo/GeoLiteDatabaseTest.java
index cc891c6..238f8e0 100644
--- a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/geo/GeoLiteDatabaseTest.java
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/adapters/geo/GeoLiteDatabaseTest.java
@@ -43,7 +43,7 @@ public class GeoLiteDatabaseTest {
 
   /**
    * {
-   * "locID":"6252001",
+   * "locID":"5803556",
    * "country":"US",
    * "city":"Milton",
    * "postalCode":"98354",
@@ -60,7 +60,7 @@ public class GeoLiteDatabaseTest {
 
   /**
    * {
-   * "locID":"2635167",
+   * "locID":"2643743",
    * "country":"GB",
    * "city":"London",
    * "postalCode":"",

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/ddca4d82/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/stellar/GeoEnrichmentFunctionsTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/stellar/GeoEnrichmentFunctionsTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/stellar/GeoEnrichmentFunctionsTest.java
index c87449d..e8f8f71 100644
--- a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/stellar/GeoEnrichmentFunctionsTest.java
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/stellar/GeoEnrichmentFunctionsTest.java
@@ -43,7 +43,7 @@ public class GeoEnrichmentFunctionsTest {
 
   /**
    * {
-   * "locID":"6252001",
+   * "locID":"5803556",
    * "country":"US",
    * "city":"Milton",
    * "postalCode":"98354",
@@ -122,7 +122,7 @@ public class GeoEnrichmentFunctionsTest {
   public void testGetRemote() throws Exception {
     String stellar = "GEO_GET('216.160.83.56')";
     Object result = run(stellar, ImmutableMap.of());
-    Assert.assertEquals("Remote Local IP should return result based on DB", expectedMessage, result);
+    Assert.assertEquals("Remote IP should return result based on DB", expectedMessage, result);
   }
 
   @Test
@@ -130,7 +130,7 @@ public class GeoEnrichmentFunctionsTest {
   public void testGetRemoteSingleField() throws Exception {
     String stellar = "GEO_GET('216.160.83.56', ['country'])";
     Object result = run(stellar, ImmutableMap.of());
-    Assert.assertEquals("Remote Local IP should return country result based on DB", "US", result);
+    Assert.assertEquals("Remote IP should return country result based on DB", "US", result);
   }
 
   @Test
@@ -138,7 +138,7 @@ public class GeoEnrichmentFunctionsTest {
   public void testGetRemoteMultipleFields() throws Exception {
     String stellar = "GEO_GET('216.160.83.56', ['country', 'city', 'dmaCode', 'location_point'])";
     Object result = run(stellar, ImmutableMap.of());
-    Assert.assertEquals("Remote Local IP should return country result based on DB", expectedSubsetMessage, result);
+    Assert.assertEquals("Remote IP should return country result based on DB", expectedSubsetMessage, result);
   }
 
   @Test(expected=org.apache.metron.common.dsl.ParseException.class)


[09/17] incubator-metron git commit: METRON-630 More work is needed tarLongFileMode posix (atopian via ottobackwards) closes apache/incubator-metron#361

Posted by ce...@apache.org.
METRON-630 More work is needed tarLongFileMode posix (atopian via ottobackwards) closes apache/incubator-metron#361


Project: http://git-wip-us.apache.org/repos/asf/incubator-metron/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-metron/commit/84a36a65
Tree: http://git-wip-us.apache.org/repos/asf/incubator-metron/tree/84a36a65
Diff: http://git-wip-us.apache.org/repos/asf/incubator-metron/diff/84a36a65

Branch: refs/heads/Metron_0.3.1
Commit: 84a36a650571ffefa61380d928285cdd758d0af0
Parents: ddca4d8
Author: atopian <jt...@covermymeds.com>
Authored: Sun Feb 5 08:33:05 2017 -0500
Committer: Otto Fowler <ot...@apache.org>
Committed: Sun Feb 5 08:33:05 2017 -0500

----------------------------------------------------------------------
 metron-analytics/metron-maas-service/pom.xml    | 1 +
 metron-analytics/metron-profiler-client/pom.xml | 1 +
 metron-analytics/metron-profiler/pom.xml        | 1 +
 metron-platform/metron-common/pom.xml           | 1 +
 metron-platform/metron-data-management/pom.xml  | 1 +
 metron-platform/metron-elasticsearch/pom.xml    | 1 +
 metron-platform/metron-enrichment/pom.xml       | 1 +
 metron-platform/metron-indexing/pom.xml         | 1 +
 metron-platform/metron-management/pom.xml       | 1 +
 metron-platform/metron-parsers/pom.xml          | 1 +
 metron-platform/metron-pcap-backend/pom.xml     | 1 +
 metron-platform/metron-solr/pom.xml             | 1 +
 12 files changed, 12 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/84a36a65/metron-analytics/metron-maas-service/pom.xml
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-maas-service/pom.xml b/metron-analytics/metron-maas-service/pom.xml
index 7f48923..ffb1009 100644
--- a/metron-analytics/metron-maas-service/pom.xml
+++ b/metron-analytics/metron-maas-service/pom.xml
@@ -309,6 +309,7 @@
         <artifactId>maven-assembly-plugin</artifactId>
         <configuration>
           <descriptor>src/main/assembly/assembly.xml</descriptor>
+          <tarLongFileMode>posix</tarLongFileMode>
         </configuration>
         <executions>
           <execution>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/84a36a65/metron-analytics/metron-profiler-client/pom.xml
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-profiler-client/pom.xml b/metron-analytics/metron-profiler-client/pom.xml
index 0d5c7a0..e43d116 100644
--- a/metron-analytics/metron-profiler-client/pom.xml
+++ b/metron-analytics/metron-profiler-client/pom.xml
@@ -319,6 +319,7 @@
                 <artifactId>maven-assembly-plugin</artifactId>
                 <configuration>
                     <descriptor>src/main/assembly/assembly.xml</descriptor>
+                    <tarLongFileMode>posix</tarLongFileMode>
                 </configuration>
                 <executions>
                     <execution>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/84a36a65/metron-analytics/metron-profiler/pom.xml
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-profiler/pom.xml b/metron-analytics/metron-profiler/pom.xml
index 75c2589..3295d59 100644
--- a/metron-analytics/metron-profiler/pom.xml
+++ b/metron-analytics/metron-profiler/pom.xml
@@ -368,6 +368,7 @@
                 <artifactId>maven-assembly-plugin</artifactId>
                 <configuration>
                     <descriptor>src/main/assembly/assembly.xml</descriptor>
+                    <tarLongFileMode>posix</tarLongFileMode>
                 </configuration>
                 <executions>
                     <execution>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/84a36a65/metron-platform/metron-common/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/pom.xml b/metron-platform/metron-common/pom.xml
index 9b833c2..e122dbd 100644
--- a/metron-platform/metron-common/pom.xml
+++ b/metron-platform/metron-common/pom.xml
@@ -449,6 +449,7 @@
                 <artifactId>maven-assembly-plugin</artifactId>
                 <configuration>
                     <descriptor>src/main/assembly/assembly.xml</descriptor>
+                    <tarLongFileMode>posix</tarLongFileMode>
                 </configuration>
                 <executions>
                     <execution>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/84a36a65/metron-platform/metron-data-management/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-data-management/pom.xml b/metron-platform/metron-data-management/pom.xml
index be7fe33..5408d7e 100644
--- a/metron-platform/metron-data-management/pom.xml
+++ b/metron-platform/metron-data-management/pom.xml
@@ -432,6 +432,7 @@
                 <artifactId>maven-assembly-plugin</artifactId>
                 <configuration>
                     <descriptor>src/main/assembly/assembly.xml</descriptor>
+                    <tarLongFileMode>posix</tarLongFileMode>
                 </configuration>
                 <executions>
                     <execution>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/84a36a65/metron-platform/metron-elasticsearch/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-elasticsearch/pom.xml b/metron-platform/metron-elasticsearch/pom.xml
index 1786725..ca23073 100644
--- a/metron-platform/metron-elasticsearch/pom.xml
+++ b/metron-platform/metron-elasticsearch/pom.xml
@@ -334,6 +334,7 @@
                 <artifactId>maven-assembly-plugin</artifactId>
                 <configuration>
                     <descriptor>src/main/assembly/assembly.xml</descriptor>
+                    <tarLongFileMode>posix</tarLongFileMode>
                 </configuration>
                 <executions>
                     <execution>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/84a36a65/metron-platform/metron-enrichment/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/pom.xml b/metron-platform/metron-enrichment/pom.xml
index e46ee95..c0c7129 100644
--- a/metron-platform/metron-enrichment/pom.xml
+++ b/metron-platform/metron-enrichment/pom.xml
@@ -371,6 +371,7 @@
                 <artifactId>maven-assembly-plugin</artifactId>
                 <configuration>
                     <descriptor>src/main/assembly/assembly.xml</descriptor>
+                    <tarLongFileMode>posix</tarLongFileMode>
                 </configuration>
                 <executions>
                     <execution>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/84a36a65/metron-platform/metron-indexing/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-indexing/pom.xml b/metron-platform/metron-indexing/pom.xml
index b7d4cde..ffd1162 100644
--- a/metron-platform/metron-indexing/pom.xml
+++ b/metron-platform/metron-indexing/pom.xml
@@ -249,6 +249,7 @@
                 <artifactId>maven-assembly-plugin</artifactId>
                 <configuration>
                     <descriptor>src/main/assembly/assembly.xml</descriptor>
+                    <tarLongFileMode>posix</tarLongFileMode>
                 </configuration>
                 <executions>
                     <execution>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/84a36a65/metron-platform/metron-management/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-management/pom.xml b/metron-platform/metron-management/pom.xml
index 0da1d8f..4d359d2 100644
--- a/metron-platform/metron-management/pom.xml
+++ b/metron-platform/metron-management/pom.xml
@@ -254,6 +254,7 @@
                 <artifactId>maven-assembly-plugin</artifactId>
                 <configuration>
                     <descriptor>src/main/assembly/assembly.xml</descriptor>
+                    <tarLongFileMode>posix</tarLongFileMode>
                 </configuration>
                 <executions>
                     <execution>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/84a36a65/metron-platform/metron-parsers/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-parsers/pom.xml b/metron-platform/metron-parsers/pom.xml
index f30c88f..f599719 100644
--- a/metron-platform/metron-parsers/pom.xml
+++ b/metron-platform/metron-parsers/pom.xml
@@ -324,6 +324,7 @@
                 <artifactId>maven-assembly-plugin</artifactId>
                 <configuration>
                     <descriptor>src/main/assembly/assembly.xml</descriptor>
+                    <tarLongFileMode>posix</tarLongFileMode>
                 </configuration>
                 <executions>
                     <execution>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/84a36a65/metron-platform/metron-pcap-backend/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-pcap-backend/pom.xml b/metron-platform/metron-pcap-backend/pom.xml
index 60699e2..093cd5d 100644
--- a/metron-platform/metron-pcap-backend/pom.xml
+++ b/metron-platform/metron-pcap-backend/pom.xml
@@ -286,6 +286,7 @@
                 <artifactId>maven-assembly-plugin</artifactId>
                 <configuration>
                     <descriptor>src/main/assembly/assembly.xml</descriptor>
+                    <tarLongFileMode>posix</tarLongFileMode>
                 </configuration>
                 <executions>
                     <execution>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/84a36a65/metron-platform/metron-solr/pom.xml
----------------------------------------------------------------------
diff --git a/metron-platform/metron-solr/pom.xml b/metron-platform/metron-solr/pom.xml
index 86a23c4..6ba0640 100644
--- a/metron-platform/metron-solr/pom.xml
+++ b/metron-platform/metron-solr/pom.xml
@@ -307,6 +307,7 @@
                 <artifactId>maven-assembly-plugin</artifactId>
                 <configuration>
                     <descriptor>src/main/assembly/assembly.xml</descriptor>
+                    <tarLongFileMode>posix</tarLongFileMode>
                 </configuration>
                 <executions>
                     <execution>


[14/17] incubator-metron git commit: METRON-692: Update Upgrading.md for 0.3.0 -> 0.3.1 closes apache/incubator-metron#437

Posted by ce...@apache.org.
METRON-692: Update Upgrading.md for 0.3.0 -> 0.3.1 closes apache/incubator-metron#437


Project: http://git-wip-us.apache.org/repos/asf/incubator-metron/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-metron/commit/73cb6575
Tree: http://git-wip-us.apache.org/repos/asf/incubator-metron/tree/73cb6575
Diff: http://git-wip-us.apache.org/repos/asf/incubator-metron/diff/73cb6575

Branch: refs/heads/Metron_0.3.1
Commit: 73cb6575cbfd44a40289583b891da138f7f78ce8
Parents: 1be4fcb
Author: cstella <ce...@gmail.com>
Authored: Mon Feb 6 11:09:53 2017 -0500
Committer: cstella <ce...@gmail.com>
Committed: Mon Feb 6 11:09:53 2017 -0500

----------------------------------------------------------------------
 Upgrading.md | 154 ++++++++++++++++++++++++++++++++++++++++++++++++++++++
 1 file changed, 154 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/73cb6575/Upgrading.md
----------------------------------------------------------------------
diff --git a/Upgrading.md b/Upgrading.md
new file mode 100644
index 0000000..2a2ea50
--- /dev/null
+++ b/Upgrading.md
@@ -0,0 +1,154 @@
+# Upgrading
+This document constitutes a per-version listing of changes of
+configuration which are non-backwards compatible.
+
+## 0.2.0BETA to 0.3.0
+### [METRON-447: Monit fails to reload when upgrading from 0.2.0BETA to master](https://issues.apache.org/jira/browse/METRON-447)
+
+#### Description
+
+`/etc/monit.d/enrichment-elasticsearch.monit` was renamed to
+`/etc/monit.d/indexing-elasticsearch.monit`, however the old file isn't
+removed via ansible, which causes the below error during an upgrade:
+`Starting monit: /etc/monit.d/enrichment-elasticsearch.monit:18: Service
+name conflict, enrichment already defined
+'/usr/local/monit/status_enrichment_topology.sh'`
+
+### [METRON-448:Upgrading via Ansible deployment does not add topology.classpath ](https://issues.apache.org/jira/browse/METRON-448)
+
+#### Description
+When using Ansible to deploy the latest Metron bits to an existing installation, storm-site is not being updated with the new 0.2.1BETA parameter `topology.classpath`. Topologies are unable to find the client configs as a result.
+
+#### Workaround
+Set the `topology.classpath` property for storm in Ambari to `/etc/hbase/conf:/etc/hadoop/conf`
+
+## 0.3.0 to 0.3.1
+
+### [METRON-664: Make the index configuration per-writer with enabled/disabled](https://issues.apache.org/jira/browse/METRON-664)
+
+#### Description
+
+As of 0.3.0 the indexing configuration
+* Is held in the enrichment configuration for a sensor 
+* Has properties which control every writers (i.e. HDFS, solr or elasticsearch).
+
+In the 0.3.1 release, this configuration has been broken out
+and control for individual writers are separated.
+
+Please see the description of the configurations in the indexing [README](https://github.com/apache/incubator-metron/tree/Metron_0.3.1/metron-platform/metron-indexing#sensor-indexing-configuration)
+
+#### Migration
+
+Migrate the configurations from each sensor enrichment configuration and create appropriate configurations for indexing.
+
+For instance, if a sensor enrichment config for sensor `foo`
+is in `$METRON_HOME/config/zookeeper/enrichments/foo.json` and looks like
+```
+{
+  "index" : "foo",
+  "batchSize" : 100
+}
+```
+
+You would create a file to configure each writer for sensor `foo` called `$METRON_HOME/config/zookeeper/indexing/foo.json` with the contents
+```
+{
+  "elasticsearch" : {
+    "index" : "foo",
+    "batchSize" : 100,
+    "enabled" : true
+  },
+  "hdfs" : { 
+    "index" : "foo",
+    "batchSize" : 100,
+    "enabled" : true
+  }
+}
+```
+
+### [METRON-675: Make Threat Triage rules able to be assigned names and comments](https://issues.apache.org/jira/browse/METRON-675)
+
+#### Description
+
+As of 0.3.0, threat triage rules were defined as a simple Map associating a Stellar expression with a score.
+As of 0.3.1, due to the fact that there may be many threat triage rules, we have made the rules more complex.
+To help organize these, we have made the threat triage objects in their own right that contain optional name and optional comment fields.
+   
+This essentially makes the risk level rules slightly more complex.  The format goes from:
+```
+"riskLevelRules" : {
+    "stellar expression" : numeric score
+}
+```
+to:
+```
+"riskLevelRules" : [
+     {
+        "name" : "optional name",
+        "comment" : "optional comment",
+        "rule" : "stellar expression",
+        "score" : numeric score
+     }
+]
+```
+   
+#### Migration
+
+For every sensor enrichment configuration, you will need to migrate the `riskLevelRules` section
+to move from a map to a list of risk level rule objects.
+
+### [METRON-283: Migrate Geo Enrichment outside of MySQL](https://issues.apache.org/jira/browse/METRON-283)
+
+#### Description
+
+As of 0.3.0, a MySQL database was used for storage and retrieval of
+GeoIP information during enrichment.
+As of 0.3.1, the MySQL database is removed in favor of using MaxMind's
+binary GeoIP files and stored on HDFS
+
+After initial setup, this change is transparent and existing enrichment
+definitions will run as-is.
+
+#### Migration
+
+While new installs will not require any additional steps, in an existing
+install a script must be run to retrieve and load the initial data.
+
+The shell script `geo_enrichment_load.sh` will retrieve MaxMind GeoLite2
+data and load data into HDFS, and update the configuration to point to
+this data.
+In most cases the following usage will grab the data appropriately:
+
+```
+$METRON_HOME/bin/geo_enrichment_load.sh -z <zk_server>:<zk_port>
+```
+
+Additional options, including changing the source file location (which
+can be a file:// location if the GeoIP data is already downloaded), are
+available with the
+-h flag and are also detailed in the metron-data-management README.me
+ file.
+
+One caveat is that this script will NOT update on disk config files. It
+is recommended to retrieve the configuration using
+
+```
+$METRON_HOME/bin/zk_load_configs.sh -z <zk_server>:<zk_port> -m DUMP
+```
+
+The new config will be `geo.hdfs.file` in the global section of the
+configuration. Append this key-value into the global.json in the config
+directory. A PUSH is unnecessary
+
+### [METRON-684: Decouple Timestamp calculation from PROFILE_GET](https://issues.apache.org/jira/browse/METRON-684)
+
+#### Description
+
+During 0.3.1 we decoupled specifying durations for calls to the profiler
+into a separate function.  The consequence is that existing calls to
+`PROFILE_GET` will need to migrate.
+
+#### Migration
+
+Existing calls to `PROFILE_GET` will need to change from `PROFILE_GET('profile', 'entity', duration, 'durationUnits')` to `PROFILE_GET('profile', 'entity', PROFILE_FIXED(duration, 'durationUnits'))`
+


[16/17] incubator-metron git commit: METRON-660 Fixes to get doxia markdown gen working

Posted by ce...@apache.org.
METRON-660 Fixes to get doxia markdown gen working


Project: http://git-wip-us.apache.org/repos/asf/incubator-metron/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-metron/commit/2531b728
Tree: http://git-wip-us.apache.org/repos/asf/incubator-metron/tree/2531b728
Diff: http://git-wip-us.apache.org/repos/asf/incubator-metron/diff/2531b728

Branch: refs/heads/Metron_0.3.1
Commit: 2531b728b66aea1688fc3a34847c8878f80d717d
Parents: dd99533
Author: Michael Miklavcic <mi...@gmail.com>
Authored: Mon Jan 16 22:32:54 2017 -0700
Committer: cstella <ce...@gmail.com>
Committed: Mon Feb 6 15:14:51 2017 -0500

----------------------------------------------------------------------
 README.md                                       |   4 +-
 metron-deployment/amazon-ec2/README.md          |   2 +-
 .../packaging/docker/ansible-docker/README.md   |   4 +-
 .../packaging/docker/rpm-docker/README.md       |   2 +-
 metron-deployment/packer-build/README.md        |  10 +--
 site-book/pom.xml                               |  16 +++--
 site-book/site/images/metron-logo.png           | Bin 21186 -> 0 bytes
 site-book/site/site.xml                         |  62 -------------------
 site-book/src/site/images/metron-logo.png       | Bin 0 -> 21186 bytes
 site-book/src/site/site.xml                     |  62 +++++++++++++++++++
 10 files changed, 83 insertions(+), 79 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/2531b728/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
index c72c1d3..92e1060 100644
--- a/README.md
+++ b/README.md
@@ -68,12 +68,12 @@ Build the full project and run tests:
 $ mvn clean install
 ```
 
-Build without tests:<br>
+Build without tests:
 ```
 $ mvn clean install -DskipTests
 ```
 
-Build with the HDP profile:<br>
+Build with the HDP profile:
 ```
 $ mvn clean install -PHDP-2.5.0.0
 ```

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/2531b728/metron-deployment/amazon-ec2/README.md
----------------------------------------------------------------------
diff --git a/metron-deployment/amazon-ec2/README.md b/metron-deployment/amazon-ec2/README.md
index dd30f6e..18c1e91 100644
--- a/metron-deployment/amazon-ec2/README.md
+++ b/metron-deployment/amazon-ec2/README.md
@@ -263,7 +263,7 @@ to retry, use: --limit @playbook.retry
 
 This will occur if Apache Metron attempts to deploy more host instances than allowed by your account.  The total number of instances required for Apache Metron can be reduced by editing `deployment/amazon-ec/playbook.yml`.  Perhaps a better alternative is to request of Amazon that this limit be increased.  Amazon has some additional [advice for dealing with this error and more](http://docs.aws.amazon.com/AWSEC2/latest/APIReference/errors-overview.html).
 
-> You've reached the limit on the number of instances you can run concurrently. The limit depends on the instance type. For more information, see [How many instances can I run in Amazon EC2](http://aws.amazon.com/ec2/faqs/#How_many_instances_can_I_run_in_Amazon_EC2). If you need additional instances, complete the [Amazon EC2 Instance Request Form](https://console.aws.amazon.com/support/home#/case/create?issueType=service-limit-increase&limitType=service-code-ec2-instances).
+> You've reached the limit on the number of instances you can run concurrently. The limit depends on the instance type. For more information, see [How many instances can I run in Amazon EC2](http://aws.amazon.com/ec2/faqs/#How_many_instances_can_I_run_in_Amazon_EC2). If you need additional instances, complete the [Amazon EC2 Instance Request Form](https://console.aws.amazon.com/support/home#/case/create?issueType=service-limit-increase&amp;limitType=service-code-ec2-instances).
 
 ### Error: 'SSH encountered an unknown error during the connection'
 

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/2531b728/metron-deployment/packaging/docker/ansible-docker/README.md
----------------------------------------------------------------------
diff --git a/metron-deployment/packaging/docker/ansible-docker/README.md b/metron-deployment/packaging/docker/ansible-docker/README.md
index e00445e..6ee7618 100644
--- a/metron-deployment/packaging/docker/ansible-docker/README.md
+++ b/metron-deployment/packaging/docker/ansible-docker/README.md
@@ -4,7 +4,7 @@ It is provisioned with software required to sucessfully run the deployment scrip
 
 ## Building the Container
 1. Install Docker [https://www.docker.com/products/overview]
-2. Navigate to <project-directory>/metron-deployment/packaging/docker/ansible-docker
+2. Navigate to \<project-directory\>/metron-deployment/packaging/docker/ansible-docker
 3. Build the container `docker build -t ansible-docker:2.0.0.2 .`
 
 ## Using the Container
@@ -12,6 +12,6 @@ Full instructions are found on the wiki [https://cwiki.apache.org/confluence/pag
 
 tl;dr
 
-1. docker run -it -v <project-directory>:/root/incubator-metron ansible-docker:2.0.0.2 bash
+1. docker run -it -v \<project-directory\>:/root/incubator-metron ansible-docker:2.0.0.2 bash
 2. cd /root/incubator-metron
 3. mvn clean package -DskipTests

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/2531b728/metron-deployment/packaging/docker/rpm-docker/README.md
----------------------------------------------------------------------
diff --git a/metron-deployment/packaging/docker/rpm-docker/README.md b/metron-deployment/packaging/docker/rpm-docker/README.md
index 5ab7a82..584e05f 100644
--- a/metron-deployment/packaging/docker/rpm-docker/README.md
+++ b/metron-deployment/packaging/docker/rpm-docker/README.md
@@ -4,5 +4,5 @@ It is provisioned with software required to sucessfully run the deployment scrip
 
 ## Building the Container
 1. Install Docker [https://www.docker.com/products/overview]
-2. Navigate to <project-directory>/metron-deployment/packaging/rpm-docker
+2. Navigate to \<project-directory\>/metron-deployment/packaging/rpm-docker
 3. Build the container `docker build -t rpm-docker .`

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/2531b728/metron-deployment/packer-build/README.md
----------------------------------------------------------------------
diff --git a/metron-deployment/packer-build/README.md b/metron-deployment/packer-build/README.md
index eebfdb0..ee49477 100644
--- a/metron-deployment/packer-build/README.md
+++ b/metron-deployment/packer-build/README.md
@@ -16,7 +16,7 @@ Prerequisites
 
 Build Both Images
 ---------------------- 
-  Navigate to <your-project-directory>/metron-deployment/packer-build
+  Navigate to \<your-project-directory\>/metron-deployment/packer-build
   Execute bin/bento build
   
   Packer will build both images and export .box files to the ./builds directory.
@@ -32,16 +32,16 @@ Build Single Images
 Using Your New Box File
 ---------------------- 
 Modify the relevant Vagrantfile (codelab-platform or quick-dev-platform) replacing the lines:
-
+```
 <pre><code>config.vm.box = "<i>box_name</i>"
 config.ssh.insert_key = true</code></pre>
-
+```
 with
-
+```
 <pre></code>config.vm.box = "<i>test_box_name</i>"
 config.vm.box = "<i>PathToBoxfile/Boxfilename</i>"
 config.ssh.insert_key = true</code></pre>
-
+```
 Launch the image as usual.
 
 Node: Vagrant will cache boxes, you can force Vagrant to reload your box by running <code>vagrant box remove <i>test_box_name</i></code> before launching your new image.

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/2531b728/site-book/pom.xml
----------------------------------------------------------------------
diff --git a/site-book/pom.xml b/site-book/pom.xml
index 74ce248..d94c46e 100644
--- a/site-book/pom.xml
+++ b/site-book/pom.xml
@@ -17,7 +17,7 @@
 	<modelVersion>4.0.0</modelVersion>
 	<groupId>org.apache.metron</groupId>
 	<artifactId>site-book</artifactId>
-	<packaging>pom</packaging>
+        <!-- <packaging>pom</packaging> -->
 	<name>site-book</name>
 	<parent>
 		<groupId>org.apache.metron</groupId>
@@ -40,9 +40,10 @@
 			<distribution>repo</distribution>
 		</license>
 	</licenses>
-	<modules>
+        <!--	<modules>
 		<module>site</module>
 	</modules>
+        -->
 
 	<properties>
           <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
@@ -58,7 +59,7 @@
 		<dependency>
                   <groupId>org.apache.maven.doxia</groupId>
                   <artifactId>doxia-module-markdown</artifactId>
-                  <version>1.7</version>
+                  <version>1.6</version>
 		</dependency>
               </dependencies>
               <executions>
@@ -66,11 +67,14 @@
                   <goals>
                     <goal>site</goal>
                   </goals>
+                  <phase>prepare-package</phase>
 		</execution>
               </executions>
-              <configuration>
-		<skip>false</skip>
-              </configuration>
+	      <configuration>
+                    <generateProjectInfo>false</generateProjectInfo>
+                    <generateReports>false</generateReports>
+                    <skip>false</skip>
+             </configuration>
             </plugin>
 	  </plugins>
 	</build>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/2531b728/site-book/site/images/metron-logo.png
----------------------------------------------------------------------
diff --git a/site-book/site/images/metron-logo.png b/site-book/site/images/metron-logo.png
deleted file mode 100644
index a0bc8cb..0000000
Binary files a/site-book/site/images/metron-logo.png and /dev/null differ

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/2531b728/site-book/site/site.xml
----------------------------------------------------------------------
diff --git a/site-book/site/site.xml b/site-book/site/site.xml
deleted file mode 100644
index 90f774a..0000000
--- a/site-book/site/site.xml
+++ /dev/null
@@ -1,62 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-  
-       http://www.apache.org/licenses/LICENSE-2.0
-  
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project name="Falcon" xmlns="http://maven.apache.org/DECORATION/1.3.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/DECORATION/1.3.0 http://maven.apache.org/xsd/decoration-1.3.0.xsd">
-
-    <skin>
-        <groupId>org.apache.maven.skins</groupId>
-        <artifactId>maven-fluido-skin</artifactId>
-        <version>1.3.0</version>
-    </skin>
-
-    <custom>
-        <fluidoSkin>
-            <project>Apache Metron - Incubating</project>
-            <sideBarEnabled>false</sideBarEnabled>
-        </fluidoSkin>
-    </custom>
-
-    <bannerLeft>
-        <name>Apache Metron - Incubating</name>
-        <src>./images/metron-logo.png</src>
-        <width>740px</width>
-        <height>242px</height>
-    </bannerLeft>
-
-    <publishDate position="right"/>
-    <version position="right"/>
-
-    <body>
-        <head>
-            <script type="text/javascript">
-                $( document ).ready( function() { $( '.carousel' ).carousel( { interval: 3500 } ) } );
-            </script>
-        </head>
-
-        <breadcrumbs position="left">
-            <item name="Metron" title="Apache Metron - Incubating" href="index.html"/>
-        </breadcrumbs>
-
-        <footer>
-            © 2015-2016 The Apache Software Foundation. Apache Metron, Metron, Apache, the Apache feather logo,
-            and the Apache Metron project logo are trademarks of The Apache Software Foundation.
-        </footer>
-    </body>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/2531b728/site-book/src/site/images/metron-logo.png
----------------------------------------------------------------------
diff --git a/site-book/src/site/images/metron-logo.png b/site-book/src/site/images/metron-logo.png
new file mode 100644
index 0000000..a0bc8cb
Binary files /dev/null and b/site-book/src/site/images/metron-logo.png differ

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/2531b728/site-book/src/site/site.xml
----------------------------------------------------------------------
diff --git a/site-book/src/site/site.xml b/site-book/src/site/site.xml
new file mode 100644
index 0000000..90f774a
--- /dev/null
+++ b/site-book/src/site/site.xml
@@ -0,0 +1,62 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+  
+       http://www.apache.org/licenses/LICENSE-2.0
+  
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project name="Falcon" xmlns="http://maven.apache.org/DECORATION/1.3.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/DECORATION/1.3.0 http://maven.apache.org/xsd/decoration-1.3.0.xsd">
+
+    <skin>
+        <groupId>org.apache.maven.skins</groupId>
+        <artifactId>maven-fluido-skin</artifactId>
+        <version>1.3.0</version>
+    </skin>
+
+    <custom>
+        <fluidoSkin>
+            <project>Apache Metron - Incubating</project>
+            <sideBarEnabled>false</sideBarEnabled>
+        </fluidoSkin>
+    </custom>
+
+    <bannerLeft>
+        <name>Apache Metron - Incubating</name>
+        <src>./images/metron-logo.png</src>
+        <width>740px</width>
+        <height>242px</height>
+    </bannerLeft>
+
+    <publishDate position="right"/>
+    <version position="right"/>
+
+    <body>
+        <head>
+            <script type="text/javascript">
+                $( document ).ready( function() { $( '.carousel' ).carousel( { interval: 3500 } ) } );
+            </script>
+        </head>
+
+        <breadcrumbs position="left">
+            <item name="Metron" title="Apache Metron - Incubating" href="index.html"/>
+        </breadcrumbs>
+
+        <footer>
+            © 2015-2016 The Apache Software Foundation. Apache Metron, Metron, Apache, the Apache feather logo,
+            and the Apache Metron project logo are trademarks of The Apache Software Foundation.
+        </footer>
+    </body>
+</project>


[06/17] incubator-metron git commit: METRON-658: Updated Grammar to Handle More Uses of in/not in Expressions closes apache/incubator-metron#430

Posted by ce...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/8340c0e2/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarParser.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarParser.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarParser.java
index 4043601..b8f5173 100644
--- a/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarParser.java
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/stellar/generated/StellarParser.java
@@ -2,7 +2,7 @@
 package org.apache.metron.common.stellar.generated;
 
 //CHECKSTYLE:OFF
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -46,14 +46,14 @@ public class StellarParser extends Parser {
 		WS=43;
 	public static final int
 		RULE_transformation = 0, RULE_transformation_expr = 1, RULE_conditional_expr = 2, 
-		RULE_comparison_expr = 3, RULE_transformation_entity = 4, RULE_comp_operator = 5, 
-		RULE_arith_operator_addition = 6, RULE_arith_operator_mul = 7, RULE_func_args = 8, 
-		RULE_op_list = 9, RULE_list_entity = 10, RULE_kv_list = 11, RULE_map_entity = 12, 
-		RULE_arithmetic_expr = 13, RULE_arithmetic_expr_mul = 14, RULE_functions = 15, 
-		RULE_arithmetic_operands = 16, RULE_identifier_operand = 17;
+		RULE_logical_expr = 3, RULE_b_expr = 4, RULE_in_expr = 5, RULE_comparison_expr = 6, 
+		RULE_transformation_entity = 7, RULE_comp_operator = 8, RULE_func_args = 9, 
+		RULE_op_list = 10, RULE_list_entity = 11, RULE_kv_list = 12, RULE_map_entity = 13, 
+		RULE_arithmetic_expr = 14, RULE_arithmetic_expr_mul = 15, RULE_functions = 16, 
+		RULE_arithmetic_operands = 17, RULE_identifier_operand = 18;
 	public static final String[] ruleNames = {
-		"transformation", "transformation_expr", "conditional_expr", "comparison_expr", 
-		"transformation_entity", "comp_operator", "arith_operator_addition", "arith_operator_mul", 
+		"transformation", "transformation_expr", "conditional_expr", "logical_expr", 
+		"b_expr", "in_expr", "comparison_expr", "transformation_entity", "comp_operator", 
 		"func_args", "op_list", "list_entity", "kv_list", "map_entity", "arithmetic_expr", 
 		"arithmetic_expr_mul", "functions", "arithmetic_operands", "identifier_operand"
 	};
@@ -145,9 +145,9 @@ public class StellarParser extends Parser {
 		try {
 			enterOuterAlt(_localctx, 1);
 			{
-			setState(36);
+			setState(38);
 			transformation_expr();
-			setState(37);
+			setState(39);
 			match(EOF);
 			}
 		}
@@ -187,6 +187,20 @@ public class StellarParser extends Parser {
 			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitComparisonExpression(this);
 		}
 	}
+	public static class LogicalExpressionContext extends Transformation_exprContext {
+		public Logical_exprContext logical_expr() {
+			return getRuleContext(Logical_exprContext.class,0);
+		}
+		public LogicalExpressionContext(Transformation_exprContext ctx) { copyFrom(ctx); }
+		@Override
+		public void enterRule(ParseTreeListener listener) {
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterLogicalExpression(this);
+		}
+		@Override
+		public void exitRule(ParseTreeListener listener) {
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitLogicalExpression(this);
+		}
+	}
 	public static class TransformationEntityContext extends Transformation_exprContext {
 		public Transformation_entityContext transformation_entity() {
 			return getRuleContext(Transformation_entityContext.class,0);
@@ -201,6 +215,20 @@ public class StellarParser extends Parser {
 			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitTransformationEntity(this);
 		}
 	}
+	public static class InExpressionContext extends Transformation_exprContext {
+		public In_exprContext in_expr() {
+			return getRuleContext(In_exprContext.class,0);
+		}
+		public InExpressionContext(Transformation_exprContext ctx) { copyFrom(ctx); }
+		@Override
+		public void enterRule(ParseTreeListener listener) {
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterInExpression(this);
+		}
+		@Override
+		public void exitRule(ParseTreeListener listener) {
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitInExpression(this);
+		}
+	}
 	public static class ArithExpressionContext extends Transformation_exprContext {
 		public Arithmetic_exprContext arithmetic_expr() {
 			return getRuleContext(Arithmetic_exprContext.class,0);
@@ -250,13 +278,13 @@ public class StellarParser extends Parser {
 		Transformation_exprContext _localctx = new Transformation_exprContext(_ctx, getState());
 		enterRule(_localctx, 2, RULE_transformation_expr);
 		try {
-			setState(47);
+			setState(51);
 			switch ( getInterpreter().adaptivePredict(_input,0,_ctx) ) {
 			case 1:
 				_localctx = new ConditionalExprContext(_localctx);
 				enterOuterAlt(_localctx, 1);
 				{
-				setState(39);
+				setState(41);
 				conditional_expr();
 				}
 				break;
@@ -264,11 +292,11 @@ public class StellarParser extends Parser {
 				_localctx = new TransformationExprContext(_localctx);
 				enterOuterAlt(_localctx, 2);
 				{
-				setState(40);
+				setState(42);
 				match(LPAREN);
-				setState(41);
+				setState(43);
 				transformation_expr();
-				setState(42);
+				setState(44);
 				match(RPAREN);
 				}
 				break;
@@ -276,7 +304,7 @@ public class StellarParser extends Parser {
 				_localctx = new ArithExpressionContext(_localctx);
 				enterOuterAlt(_localctx, 3);
 				{
-				setState(44);
+				setState(46);
 				arithmetic_expr(0);
 				}
 				break;
@@ -284,7 +312,7 @@ public class StellarParser extends Parser {
 				_localctx = new TransformationEntityContext(_localctx);
 				enterOuterAlt(_localctx, 4);
 				{
-				setState(45);
+				setState(47);
 				transformation_entity();
 				}
 				break;
@@ -292,10 +320,26 @@ public class StellarParser extends Parser {
 				_localctx = new ComparisonExpressionContext(_localctx);
 				enterOuterAlt(_localctx, 5);
 				{
-				setState(46);
+				setState(48);
 				comparison_expr(0);
 				}
 				break;
+			case 6:
+				_localctx = new LogicalExpressionContext(_localctx);
+				enterOuterAlt(_localctx, 6);
+				{
+				setState(49);
+				logical_expr();
+				}
+				break;
+			case 7:
+				_localctx = new InExpressionContext(_localctx);
+				enterOuterAlt(_localctx, 7);
+				{
+				setState(50);
+				in_expr();
+				}
+				break;
 			}
 		}
 		catch (RecognitionException re) {
@@ -321,8 +365,8 @@ public class StellarParser extends Parser {
 		}
 	}
 	public static class TernaryFuncWithoutIfContext extends Conditional_exprContext {
-		public Comparison_exprContext comparison_expr() {
-			return getRuleContext(Comparison_exprContext.class,0);
+		public Logical_exprContext logical_expr() {
+			return getRuleContext(Logical_exprContext.class,0);
 		}
 		public TerminalNode QUESTION() { return getToken(StellarParser.QUESTION, 0); }
 		public List<Transformation_exprContext> transformation_expr() {
@@ -344,8 +388,8 @@ public class StellarParser extends Parser {
 	}
 	public static class TernaryFuncWithIfContext extends Conditional_exprContext {
 		public TerminalNode IF() { return getToken(StellarParser.IF, 0); }
-		public Comparison_exprContext comparison_expr() {
-			return getRuleContext(Comparison_exprContext.class,0);
+		public Logical_exprContext logical_expr() {
+			return getRuleContext(Logical_exprContext.class,0);
 		}
 		public TerminalNode THEN() { return getToken(StellarParser.THEN, 0); }
 		public List<Transformation_exprContext> transformation_expr() {
@@ -370,7 +414,7 @@ public class StellarParser extends Parser {
 		Conditional_exprContext _localctx = new Conditional_exprContext(_ctx, getState());
 		enterRule(_localctx, 4, RULE_conditional_expr);
 		try {
-			setState(62);
+			setState(66);
 			switch (_input.LA(1)) {
 			case NOT:
 			case TRUE:
@@ -389,15 +433,15 @@ public class StellarParser extends Parser {
 				_localctx = new TernaryFuncWithoutIfContext(_localctx);
 				enterOuterAlt(_localctx, 1);
 				{
-				setState(49);
-				comparison_expr(0);
-				setState(50);
+				setState(53);
+				logical_expr();
+				setState(54);
 				match(QUESTION);
-				setState(51);
+				setState(55);
 				transformation_expr();
-				setState(52);
+				setState(56);
 				match(COLON);
-				setState(53);
+				setState(57);
 				transformation_expr();
 				}
 				break;
@@ -405,17 +449,17 @@ public class StellarParser extends Parser {
 				_localctx = new TernaryFuncWithIfContext(_localctx);
 				enterOuterAlt(_localctx, 2);
 				{
-				setState(55);
+				setState(59);
 				match(IF);
-				setState(56);
-				comparison_expr(0);
-				setState(57);
+				setState(60);
+				logical_expr();
+				setState(61);
 				match(THEN);
-				setState(58);
+				setState(62);
 				transformation_expr();
-				setState(59);
+				setState(63);
 				match(ELSE);
-				setState(60);
+				setState(64);
 				transformation_expr();
 				}
 				break;
@@ -434,140 +478,326 @@ public class StellarParser extends Parser {
 		return _localctx;
 	}
 
-	public static class Comparison_exprContext extends ParserRuleContext {
-		public Comparison_exprContext(ParserRuleContext parent, int invokingState) {
+	public static class Logical_exprContext extends ParserRuleContext {
+		public Logical_exprContext(ParserRuleContext parent, int invokingState) {
 			super(parent, invokingState);
 		}
-		@Override public int getRuleIndex() { return RULE_comparison_expr; }
+		@Override public int getRuleIndex() { return RULE_logical_expr; }
 	 
-		public Comparison_exprContext() { }
-		public void copyFrom(Comparison_exprContext ctx) {
+		public Logical_exprContext() { }
+		public void copyFrom(Logical_exprContext ctx) {
 			super.copyFrom(ctx);
 		}
 	}
-	public static class NotFuncContext extends Comparison_exprContext {
-		public TerminalNode NOT() { return getToken(StellarParser.NOT, 0); }
-		public TerminalNode LPAREN() { return getToken(StellarParser.LPAREN, 0); }
-		public Comparison_exprContext comparison_expr() {
-			return getRuleContext(Comparison_exprContext.class,0);
+	public static class LogicalExpressionAndContext extends Logical_exprContext {
+		public B_exprContext b_expr() {
+			return getRuleContext(B_exprContext.class,0);
 		}
-		public TerminalNode RPAREN() { return getToken(StellarParser.RPAREN, 0); }
-		public NotFuncContext(Comparison_exprContext ctx) { copyFrom(ctx); }
+		public TerminalNode AND() { return getToken(StellarParser.AND, 0); }
+		public Logical_exprContext logical_expr() {
+			return getRuleContext(Logical_exprContext.class,0);
+		}
+		public LogicalExpressionAndContext(Logical_exprContext ctx) { copyFrom(ctx); }
 		@Override
 		public void enterRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterNotFunc(this);
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterLogicalExpressionAnd(this);
 		}
 		@Override
 		public void exitRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitNotFunc(this);
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitLogicalExpressionAnd(this);
 		}
 	}
-	public static class ComparisonExpressionParensContext extends Comparison_exprContext {
-		public TerminalNode LPAREN() { return getToken(StellarParser.LPAREN, 0); }
-		public Comparison_exprContext comparison_expr() {
-			return getRuleContext(Comparison_exprContext.class,0);
+	public static class BoleanExpressionContext extends Logical_exprContext {
+		public B_exprContext b_expr() {
+			return getRuleContext(B_exprContext.class,0);
 		}
-		public TerminalNode RPAREN() { return getToken(StellarParser.RPAREN, 0); }
-		public ComparisonExpressionParensContext(Comparison_exprContext ctx) { copyFrom(ctx); }
+		public BoleanExpressionContext(Logical_exprContext ctx) { copyFrom(ctx); }
 		@Override
 		public void enterRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterComparisonExpressionParens(this);
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterBoleanExpression(this);
 		}
 		@Override
 		public void exitRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitComparisonExpressionParens(this);
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitBoleanExpression(this);
 		}
 	}
-	public static class InExpressionContext extends Comparison_exprContext {
-		public List<Identifier_operandContext> identifier_operand() {
-			return getRuleContexts(Identifier_operandContext.class);
+	public static class LogicalExpressionOrContext extends Logical_exprContext {
+		public B_exprContext b_expr() {
+			return getRuleContext(B_exprContext.class,0);
 		}
-		public Identifier_operandContext identifier_operand(int i) {
-			return getRuleContext(Identifier_operandContext.class,i);
+		public TerminalNode OR() { return getToken(StellarParser.OR, 0); }
+		public Logical_exprContext logical_expr() {
+			return getRuleContext(Logical_exprContext.class,0);
 		}
-		public TerminalNode IN() { return getToken(StellarParser.IN, 0); }
-		public InExpressionContext(Comparison_exprContext ctx) { copyFrom(ctx); }
+		public LogicalExpressionOrContext(Logical_exprContext ctx) { copyFrom(ctx); }
 		@Override
 		public void enterRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterInExpression(this);
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterLogicalExpressionOr(this);
 		}
 		@Override
 		public void exitRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitInExpression(this);
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitLogicalExpressionOr(this);
 		}
 	}
-	public static class ComparisonExpressionWithOperatorContext extends Comparison_exprContext {
-		public List<Identifier_operandContext> identifier_operand() {
-			return getRuleContexts(Identifier_operandContext.class);
+
+	public final Logical_exprContext logical_expr() throws RecognitionException {
+		Logical_exprContext _localctx = new Logical_exprContext(_ctx, getState());
+		enterRule(_localctx, 6, RULE_logical_expr);
+		try {
+			setState(77);
+			switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) {
+			case 1:
+				_localctx = new LogicalExpressionAndContext(_localctx);
+				enterOuterAlt(_localctx, 1);
+				{
+				setState(68);
+				b_expr();
+				setState(69);
+				match(AND);
+				setState(70);
+				logical_expr();
+				}
+				break;
+			case 2:
+				_localctx = new LogicalExpressionOrContext(_localctx);
+				enterOuterAlt(_localctx, 2);
+				{
+				setState(72);
+				b_expr();
+				setState(73);
+				match(OR);
+				setState(74);
+				logical_expr();
+				}
+				break;
+			case 3:
+				_localctx = new BoleanExpressionContext(_localctx);
+				enterOuterAlt(_localctx, 3);
+				{
+				setState(76);
+				b_expr();
+				}
+				break;
+			}
+		}
+		catch (RecognitionException re) {
+			_localctx.exception = re;
+			_errHandler.reportError(this, re);
+			_errHandler.recover(this, re);
 		}
-		public Identifier_operandContext identifier_operand(int i) {
-			return getRuleContext(Identifier_operandContext.class,i);
+		finally {
+			exitRule();
 		}
-		public Comp_operatorContext comp_operator() {
-			return getRuleContext(Comp_operatorContext.class,0);
+		return _localctx;
+	}
+
+	public static class B_exprContext extends ParserRuleContext {
+		public Comparison_exprContext comparison_expr() {
+			return getRuleContext(Comparison_exprContext.class,0);
 		}
-		public ComparisonExpressionWithOperatorContext(Comparison_exprContext ctx) { copyFrom(ctx); }
+		public In_exprContext in_expr() {
+			return getRuleContext(In_exprContext.class,0);
+		}
+		public B_exprContext(ParserRuleContext parent, int invokingState) {
+			super(parent, invokingState);
+		}
+		@Override public int getRuleIndex() { return RULE_b_expr; }
 		@Override
 		public void enterRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterComparisonExpressionWithOperator(this);
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterB_expr(this);
 		}
 		@Override
 		public void exitRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitComparisonExpressionWithOperator(this);
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitB_expr(this);
 		}
 	}
-	public static class LogicalExpressionAndContext extends Comparison_exprContext {
-		public List<Comparison_exprContext> comparison_expr() {
-			return getRuleContexts(Comparison_exprContext.class);
+
+	public final B_exprContext b_expr() throws RecognitionException {
+		B_exprContext _localctx = new B_exprContext(_ctx, getState());
+		enterRule(_localctx, 8, RULE_b_expr);
+		try {
+			setState(81);
+			switch ( getInterpreter().adaptivePredict(_input,3,_ctx) ) {
+			case 1:
+				enterOuterAlt(_localctx, 1);
+				{
+				setState(79);
+				comparison_expr(0);
+				}
+				break;
+			case 2:
+				enterOuterAlt(_localctx, 2);
+				{
+				setState(80);
+				in_expr();
+				}
+				break;
+			}
 		}
-		public Comparison_exprContext comparison_expr(int i) {
-			return getRuleContext(Comparison_exprContext.class,i);
+		catch (RecognitionException re) {
+			_localctx.exception = re;
+			_errHandler.reportError(this, re);
+			_errHandler.recover(this, re);
 		}
-		public TerminalNode AND() { return getToken(StellarParser.AND, 0); }
-		public LogicalExpressionAndContext(Comparison_exprContext ctx) { copyFrom(ctx); }
+		finally {
+			exitRule();
+		}
+		return _localctx;
+	}
+
+	public static class In_exprContext extends ParserRuleContext {
+		public In_exprContext(ParserRuleContext parent, int invokingState) {
+			super(parent, invokingState);
+		}
+		@Override public int getRuleIndex() { return RULE_in_expr; }
+	 
+		public In_exprContext() { }
+		public void copyFrom(In_exprContext ctx) {
+			super.copyFrom(ctx);
+		}
+	}
+	public static class NInExpressionStatementContext extends In_exprContext {
+		public Identifier_operandContext identifier_operand() {
+			return getRuleContext(Identifier_operandContext.class,0);
+		}
+		public TerminalNode NIN() { return getToken(StellarParser.NIN, 0); }
+		public B_exprContext b_expr() {
+			return getRuleContext(B_exprContext.class,0);
+		}
+		public NInExpressionStatementContext(In_exprContext ctx) { copyFrom(ctx); }
 		@Override
 		public void enterRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterLogicalExpressionAnd(this);
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterNInExpressionStatement(this);
 		}
 		@Override
 		public void exitRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitLogicalExpressionAnd(this);
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitNInExpressionStatement(this);
 		}
 	}
-	public static class NInExpressionContext extends Comparison_exprContext {
-		public List<Identifier_operandContext> identifier_operand() {
-			return getRuleContexts(Identifier_operandContext.class);
+	public static class InExpressionStatementContext extends In_exprContext {
+		public Identifier_operandContext identifier_operand() {
+			return getRuleContext(Identifier_operandContext.class,0);
 		}
-		public Identifier_operandContext identifier_operand(int i) {
-			return getRuleContext(Identifier_operandContext.class,i);
+		public TerminalNode IN() { return getToken(StellarParser.IN, 0); }
+		public B_exprContext b_expr() {
+			return getRuleContext(B_exprContext.class,0);
 		}
-		public TerminalNode NIN() { return getToken(StellarParser.NIN, 0); }
-		public NInExpressionContext(Comparison_exprContext ctx) { copyFrom(ctx); }
+		public InExpressionStatementContext(In_exprContext ctx) { copyFrom(ctx); }
 		@Override
 		public void enterRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterNInExpression(this);
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterInExpressionStatement(this);
 		}
 		@Override
 		public void exitRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitNInExpression(this);
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitInExpressionStatement(this);
+		}
+	}
+
+	public final In_exprContext in_expr() throws RecognitionException {
+		In_exprContext _localctx = new In_exprContext(_ctx, getState());
+		enterRule(_localctx, 10, RULE_in_expr);
+		try {
+			setState(91);
+			switch ( getInterpreter().adaptivePredict(_input,4,_ctx) ) {
+			case 1:
+				_localctx = new InExpressionStatementContext(_localctx);
+				enterOuterAlt(_localctx, 1);
+				{
+				setState(83);
+				identifier_operand();
+				setState(84);
+				match(IN);
+				setState(85);
+				b_expr();
+				}
+				break;
+			case 2:
+				_localctx = new NInExpressionStatementContext(_localctx);
+				enterOuterAlt(_localctx, 2);
+				{
+				setState(87);
+				identifier_operand();
+				setState(88);
+				match(NIN);
+				setState(89);
+				b_expr();
+				}
+				break;
+			}
+		}
+		catch (RecognitionException re) {
+			_localctx.exception = re;
+			_errHandler.reportError(this, re);
+			_errHandler.recover(this, re);
 		}
+		finally {
+			exitRule();
+		}
+		return _localctx;
 	}
-	public static class LogicalExpressionOrContext extends Comparison_exprContext {
+
+	public static class Comparison_exprContext extends ParserRuleContext {
+		public Comparison_exprContext(ParserRuleContext parent, int invokingState) {
+			super(parent, invokingState);
+		}
+		@Override public int getRuleIndex() { return RULE_comparison_expr; }
+	 
+		public Comparison_exprContext() { }
+		public void copyFrom(Comparison_exprContext ctx) {
+			super.copyFrom(ctx);
+		}
+	}
+	public static class NotFuncContext extends Comparison_exprContext {
+		public TerminalNode NOT() { return getToken(StellarParser.NOT, 0); }
+		public TerminalNode LPAREN() { return getToken(StellarParser.LPAREN, 0); }
+		public Logical_exprContext logical_expr() {
+			return getRuleContext(Logical_exprContext.class,0);
+		}
+		public TerminalNode RPAREN() { return getToken(StellarParser.RPAREN, 0); }
+		public NotFuncContext(Comparison_exprContext ctx) { copyFrom(ctx); }
+		@Override
+		public void enterRule(ParseTreeListener listener) {
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterNotFunc(this);
+		}
+		@Override
+		public void exitRule(ParseTreeListener listener) {
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitNotFunc(this);
+		}
+	}
+	public static class ComparisonExpressionParensContext extends Comparison_exprContext {
+		public TerminalNode LPAREN() { return getToken(StellarParser.LPAREN, 0); }
+		public Logical_exprContext logical_expr() {
+			return getRuleContext(Logical_exprContext.class,0);
+		}
+		public TerminalNode RPAREN() { return getToken(StellarParser.RPAREN, 0); }
+		public ComparisonExpressionParensContext(Comparison_exprContext ctx) { copyFrom(ctx); }
+		@Override
+		public void enterRule(ParseTreeListener listener) {
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterComparisonExpressionParens(this);
+		}
+		@Override
+		public void exitRule(ParseTreeListener listener) {
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitComparisonExpressionParens(this);
+		}
+	}
+	public static class ComparisonExpressionWithOperatorContext extends Comparison_exprContext {
 		public List<Comparison_exprContext> comparison_expr() {
 			return getRuleContexts(Comparison_exprContext.class);
 		}
 		public Comparison_exprContext comparison_expr(int i) {
 			return getRuleContext(Comparison_exprContext.class,i);
 		}
-		public TerminalNode OR() { return getToken(StellarParser.OR, 0); }
-		public LogicalExpressionOrContext(Comparison_exprContext ctx) { copyFrom(ctx); }
+		public Comp_operatorContext comp_operator() {
+			return getRuleContext(Comp_operatorContext.class,0);
+		}
+		public ComparisonExpressionWithOperatorContext(Comparison_exprContext ctx) { copyFrom(ctx); }
 		@Override
 		public void enterRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterLogicalExpressionOr(this);
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterComparisonExpressionWithOperator(this);
 		}
 		@Override
 		public void exitRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitLogicalExpressionOr(this);
+			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitComparisonExpressionWithOperator(this);
 		}
 	}
 	public static class OperandContext extends Comparison_exprContext {
@@ -594,133 +824,77 @@ public class StellarParser extends Parser {
 		int _parentState = getState();
 		Comparison_exprContext _localctx = new Comparison_exprContext(_ctx, _parentState);
 		Comparison_exprContext _prevctx = _localctx;
-		int _startState = 6;
-		enterRecursionRule(_localctx, 6, RULE_comparison_expr, _p);
+		int _startState = 12;
+		enterRecursionRule(_localctx, 12, RULE_comparison_expr, _p);
 		try {
 			int _alt;
 			enterOuterAlt(_localctx, 1);
 			{
-			setState(87);
-			switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) {
+			setState(104);
+			switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) {
 			case 1:
 				{
-				_localctx = new ComparisonExpressionWithOperatorContext(_localctx);
-				_ctx = _localctx;
-				_prevctx = _localctx;
-
-				setState(65);
-				identifier_operand();
-				setState(66);
-				comp_operator();
-				setState(67);
-				identifier_operand();
-				}
-				break;
-			case 2:
-				{
-				_localctx = new InExpressionContext(_localctx);
-				_ctx = _localctx;
-				_prevctx = _localctx;
-				setState(69);
-				identifier_operand();
-				setState(70);
-				match(IN);
-				setState(71);
-				identifier_operand();
-				}
-				break;
-			case 3:
-				{
-				_localctx = new NInExpressionContext(_localctx);
-				_ctx = _localctx;
-				_prevctx = _localctx;
-				setState(73);
-				identifier_operand();
-				setState(74);
-				match(NIN);
-				setState(75);
-				identifier_operand();
-				}
-				break;
-			case 4:
-				{
 				_localctx = new NotFuncContext(_localctx);
 				_ctx = _localctx;
 				_prevctx = _localctx;
-				setState(77);
+
+				setState(94);
 				match(NOT);
-				setState(78);
+				setState(95);
 				match(LPAREN);
-				setState(79);
-				comparison_expr(0);
-				setState(80);
+				setState(96);
+				logical_expr();
+				setState(97);
 				match(RPAREN);
 				}
 				break;
-			case 5:
+			case 2:
 				{
 				_localctx = new ComparisonExpressionParensContext(_localctx);
 				_ctx = _localctx;
 				_prevctx = _localctx;
-				setState(82);
+				setState(99);
 				match(LPAREN);
-				setState(83);
-				comparison_expr(0);
-				setState(84);
+				setState(100);
+				logical_expr();
+				setState(101);
 				match(RPAREN);
 				}
 				break;
-			case 6:
+			case 3:
 				{
 				_localctx = new OperandContext(_localctx);
 				_ctx = _localctx;
 				_prevctx = _localctx;
-				setState(86);
+				setState(103);
 				identifier_operand();
 				}
 				break;
 			}
 			_ctx.stop = _input.LT(-1);
-			setState(97);
+			setState(112);
 			_errHandler.sync(this);
-			_alt = getInterpreter().adaptivePredict(_input,4,_ctx);
+			_alt = getInterpreter().adaptivePredict(_input,6,_ctx);
 			while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
 				if ( _alt==1 ) {
 					if ( _parseListeners!=null ) triggerExitRuleEvent();
 					_prevctx = _localctx;
 					{
-					setState(95);
-					switch ( getInterpreter().adaptivePredict(_input,3,_ctx) ) {
-					case 1:
-						{
-						_localctx = new LogicalExpressionAndContext(new Comparison_exprContext(_parentctx, _parentState));
-						pushNewRecursionContext(_localctx, _startState, RULE_comparison_expr);
-						setState(89);
-						if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)");
-						setState(90);
-						match(AND);
-						setState(91);
-						comparison_expr(6);
-						}
-						break;
-					case 2:
-						{
-						_localctx = new LogicalExpressionOrContext(new Comparison_exprContext(_parentctx, _parentState));
-						pushNewRecursionContext(_localctx, _startState, RULE_comparison_expr);
-						setState(92);
-						if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)");
-						setState(93);
-						match(OR);
-						setState(94);
-						comparison_expr(5);
-						}
-						break;
+					{
+					_localctx = new ComparisonExpressionWithOperatorContext(new Comparison_exprContext(_parentctx, _parentState));
+					pushNewRecursionContext(_localctx, _startState, RULE_comparison_expr);
+					setState(106);
+					if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)");
+					setState(107);
+					comp_operator();
+					setState(108);
+					comparison_expr(5);
 					}
 					} 
 				}
-				setState(99);
+				setState(114);
 				_errHandler.sync(this);
-				_alt = getInterpreter().adaptivePredict(_input,4,_ctx);
+				_alt = getInterpreter().adaptivePredict(_input,6,_ctx);
 			}
 			}
 		}
@@ -755,11 +929,11 @@ public class StellarParser extends Parser {
 
 	public final Transformation_entityContext transformation_entity() throws RecognitionException {
 		Transformation_entityContext _localctx = new Transformation_entityContext(_ctx, getState());
-		enterRule(_localctx, 8, RULE_transformation_entity);
+		enterRule(_localctx, 14, RULE_transformation_entity);
 		try {
 			enterOuterAlt(_localctx, 1);
 			{
-			setState(100);
+			setState(115);
 			identifier_operand();
 			}
 		}
@@ -805,13 +979,13 @@ public class StellarParser extends Parser {
 
 	public final Comp_operatorContext comp_operator() throws RecognitionException {
 		Comp_operatorContext _localctx = new Comp_operatorContext(_ctx, getState());
-		enterRule(_localctx, 10, RULE_comp_operator);
+		enterRule(_localctx, 16, RULE_comp_operator);
 		int _la;
 		try {
 			_localctx = new ComparisonOpContext(_localctx);
 			enterOuterAlt(_localctx, 1);
 			{
-			setState(102);
+			setState(117);
 			_la = _input.LA(1);
 			if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << EQ) | (1L << NEQ) | (1L << LT) | (1L << LTE) | (1L << GT) | (1L << GTE))) != 0)) ) {
 			_errHandler.recoverInline(this);
@@ -831,112 +1005,6 @@ public class StellarParser extends Parser {
 		return _localctx;
 	}
 
-	public static class Arith_operator_additionContext extends ParserRuleContext {
-		public Arith_operator_additionContext(ParserRuleContext parent, int invokingState) {
-			super(parent, invokingState);
-		}
-		@Override public int getRuleIndex() { return RULE_arith_operator_addition; }
-	 
-		public Arith_operator_additionContext() { }
-		public void copyFrom(Arith_operator_additionContext ctx) {
-			super.copyFrom(ctx);
-		}
-	}
-	public static class ArithOp_plusContext extends Arith_operator_additionContext {
-		public TerminalNode PLUS() { return getToken(StellarParser.PLUS, 0); }
-		public TerminalNode MINUS() { return getToken(StellarParser.MINUS, 0); }
-		public ArithOp_plusContext(Arith_operator_additionContext ctx) { copyFrom(ctx); }
-		@Override
-		public void enterRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterArithOp_plus(this);
-		}
-		@Override
-		public void exitRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitArithOp_plus(this);
-		}
-	}
-
-	public final Arith_operator_additionContext arith_operator_addition() throws RecognitionException {
-		Arith_operator_additionContext _localctx = new Arith_operator_additionContext(_ctx, getState());
-		enterRule(_localctx, 12, RULE_arith_operator_addition);
-		int _la;
-		try {
-			_localctx = new ArithOp_plusContext(_localctx);
-			enterOuterAlt(_localctx, 1);
-			{
-			setState(104);
-			_la = _input.LA(1);
-			if ( !(_la==MINUS || _la==PLUS) ) {
-			_errHandler.recoverInline(this);
-			} else {
-				consume();
-			}
-			}
-		}
-		catch (RecognitionException re) {
-			_localctx.exception = re;
-			_errHandler.reportError(this, re);
-			_errHandler.recover(this, re);
-		}
-		finally {
-			exitRule();
-		}
-		return _localctx;
-	}
-
-	public static class Arith_operator_mulContext extends ParserRuleContext {
-		public Arith_operator_mulContext(ParserRuleContext parent, int invokingState) {
-			super(parent, invokingState);
-		}
-		@Override public int getRuleIndex() { return RULE_arith_operator_mul; }
-	 
-		public Arith_operator_mulContext() { }
-		public void copyFrom(Arith_operator_mulContext ctx) {
-			super.copyFrom(ctx);
-		}
-	}
-	public static class ArithOp_mulContext extends Arith_operator_mulContext {
-		public TerminalNode MUL() { return getToken(StellarParser.MUL, 0); }
-		public TerminalNode DIV() { return getToken(StellarParser.DIV, 0); }
-		public ArithOp_mulContext(Arith_operator_mulContext ctx) { copyFrom(ctx); }
-		@Override
-		public void enterRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).enterArithOp_mul(this);
-		}
-		@Override
-		public void exitRule(ParseTreeListener listener) {
-			if ( listener instanceof StellarListener ) ((StellarListener)listener).exitArithOp_mul(this);
-		}
-	}
-
-	public final Arith_operator_mulContext arith_operator_mul() throws RecognitionException {
-		Arith_operator_mulContext _localctx = new Arith_operator_mulContext(_ctx, getState());
-		enterRule(_localctx, 14, RULE_arith_operator_mul);
-		int _la;
-		try {
-			_localctx = new ArithOp_mulContext(_localctx);
-			enterOuterAlt(_localctx, 1);
-			{
-			setState(106);
-			_la = _input.LA(1);
-			if ( !(_la==DIV || _la==MUL) ) {
-			_errHandler.recoverInline(this);
-			} else {
-				consume();
-			}
-			}
-		}
-		catch (RecognitionException re) {
-			_localctx.exception = re;
-			_errHandler.reportError(this, re);
-			_errHandler.recover(this, re);
-		}
-		finally {
-			exitRule();
-		}
-		return _localctx;
-	}
-
 	public static class Func_argsContext extends ParserRuleContext {
 		public TerminalNode LPAREN() { return getToken(StellarParser.LPAREN, 0); }
 		public Op_listContext op_list() {
@@ -959,27 +1027,27 @@ public class StellarParser extends Parser {
 
 	public final Func_argsContext func_args() throws RecognitionException {
 		Func_argsContext _localctx = new Func_argsContext(_ctx, getState());
-		enterRule(_localctx, 16, RULE_func_args);
+		enterRule(_localctx, 18, RULE_func_args);
 		try {
-			setState(114);
-			switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) {
+			setState(125);
+			switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) {
 			case 1:
 				enterOuterAlt(_localctx, 1);
 				{
-				setState(108);
+				setState(119);
 				match(LPAREN);
-				setState(109);
+				setState(120);
 				op_list(0);
-				setState(110);
+				setState(121);
 				match(RPAREN);
 				}
 				break;
 			case 2:
 				enterOuterAlt(_localctx, 2);
 				{
-				setState(112);
+				setState(123);
 				match(LPAREN);
-				setState(113);
+				setState(124);
 				match(RPAREN);
 				}
 				break;
@@ -1030,47 +1098,47 @@ public class StellarParser extends Parser {
 		int _parentState = getState();
 		Op_listContext _localctx = new Op_listContext(_ctx, _parentState);
 		Op_listContext _prevctx = _localctx;
-		int _startState = 18;
-		enterRecursionRule(_localctx, 18, RULE_op_list, _p);
+		int _startState = 20;
+		enterRecursionRule(_localctx, 20, RULE_op_list, _p);
 		try {
 			int _alt;
 			enterOuterAlt(_localctx, 1);
 			{
-			setState(119);
-			switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) {
+			setState(130);
+			switch ( getInterpreter().adaptivePredict(_input,8,_ctx) ) {
 			case 1:
 				{
-				setState(117);
+				setState(128);
 				identifier_operand();
 				}
 				break;
 			case 2:
 				{
-				setState(118);
+				setState(129);
 				conditional_expr();
 				}
 				break;
 			}
 			_ctx.stop = _input.LT(-1);
-			setState(129);
+			setState(140);
 			_errHandler.sync(this);
-			_alt = getInterpreter().adaptivePredict(_input,8,_ctx);
+			_alt = getInterpreter().adaptivePredict(_input,10,_ctx);
 			while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
 				if ( _alt==1 ) {
 					if ( _parseListeners!=null ) triggerExitRuleEvent();
 					_prevctx = _localctx;
 					{
-					setState(127);
-					switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) {
+					setState(138);
+					switch ( getInterpreter().adaptivePredict(_input,9,_ctx) ) {
 					case 1:
 						{
 						_localctx = new Op_listContext(_parentctx, _parentState);
 						pushNewRecursionContext(_localctx, _startState, RULE_op_list);
-						setState(121);
+						setState(132);
 						if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)");
-						setState(122);
+						setState(133);
 						match(COMMA);
-						setState(123);
+						setState(134);
 						identifier_operand();
 						}
 						break;
@@ -1078,20 +1146,20 @@ public class StellarParser extends Parser {
 						{
 						_localctx = new Op_listContext(_parentctx, _parentState);
 						pushNewRecursionContext(_localctx, _startState, RULE_op_list);
-						setState(124);
+						setState(135);
 						if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)");
-						setState(125);
+						setState(136);
 						match(COMMA);
-						setState(126);
+						setState(137);
 						conditional_expr();
 						}
 						break;
 					}
 					} 
 				}
-				setState(131);
+				setState(142);
 				_errHandler.sync(this);
-				_alt = getInterpreter().adaptivePredict(_input,8,_ctx);
+				_alt = getInterpreter().adaptivePredict(_input,10,_ctx);
 			}
 			}
 		}
@@ -1128,27 +1196,27 @@ public class StellarParser extends Parser {
 
 	public final List_entityContext list_entity() throws RecognitionException {
 		List_entityContext _localctx = new List_entityContext(_ctx, getState());
-		enterRule(_localctx, 20, RULE_list_entity);
+		enterRule(_localctx, 22, RULE_list_entity);
 		try {
-			setState(138);
-			switch ( getInterpreter().adaptivePredict(_input,9,_ctx) ) {
+			setState(149);
+			switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) {
 			case 1:
 				enterOuterAlt(_localctx, 1);
 				{
-				setState(132);
+				setState(143);
 				match(LBRACKET);
-				setState(133);
+				setState(144);
 				op_list(0);
-				setState(134);
+				setState(145);
 				match(RBRACKET);
 				}
 				break;
 			case 2:
 				enterOuterAlt(_localctx, 2);
 				{
-				setState(136);
+				setState(147);
 				match(LBRACKET);
-				setState(137);
+				setState(148);
 				match(RBRACKET);
 				}
 				break;
@@ -1200,24 +1268,24 @@ public class StellarParser extends Parser {
 		int _parentState = getState();
 		Kv_listContext _localctx = new Kv_listContext(_ctx, _parentState);
 		Kv_listContext _prevctx = _localctx;
-		int _startState = 22;
-		enterRecursionRule(_localctx, 22, RULE_kv_list, _p);
+		int _startState = 24;
+		enterRecursionRule(_localctx, 24, RULE_kv_list, _p);
 		try {
 			int _alt;
 			enterOuterAlt(_localctx, 1);
 			{
 			{
-			setState(141);
+			setState(152);
 			identifier_operand();
-			setState(142);
+			setState(153);
 			match(COLON);
-			setState(143);
+			setState(154);
 			transformation_expr();
 			}
 			_ctx.stop = _input.LT(-1);
-			setState(153);
+			setState(164);
 			_errHandler.sync(this);
-			_alt = getInterpreter().adaptivePredict(_input,10,_ctx);
+			_alt = getInterpreter().adaptivePredict(_input,12,_ctx);
 			while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
 				if ( _alt==1 ) {
 					if ( _parseListeners!=null ) triggerExitRuleEvent();
@@ -1226,22 +1294,22 @@ public class StellarParser extends Parser {
 					{
 					_localctx = new Kv_listContext(_parentctx, _parentState);
 					pushNewRecursionContext(_localctx, _startState, RULE_kv_list);
-					setState(145);
+					setState(156);
 					if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)");
-					setState(146);
+					setState(157);
 					match(COMMA);
-					setState(147);
+					setState(158);
 					identifier_operand();
-					setState(148);
+					setState(159);
 					match(COLON);
-					setState(149);
+					setState(160);
 					transformation_expr();
 					}
 					} 
 				}
-				setState(155);
+				setState(166);
 				_errHandler.sync(this);
-				_alt = getInterpreter().adaptivePredict(_input,10,_ctx);
+				_alt = getInterpreter().adaptivePredict(_input,12,_ctx);
 			}
 			}
 		}
@@ -1278,27 +1346,27 @@ public class StellarParser extends Parser {
 
 	public final Map_entityContext map_entity() throws RecognitionException {
 		Map_entityContext _localctx = new Map_entityContext(_ctx, getState());
-		enterRule(_localctx, 24, RULE_map_entity);
+		enterRule(_localctx, 26, RULE_map_entity);
 		try {
-			setState(162);
-			switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) {
+			setState(173);
+			switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) {
 			case 1:
 				enterOuterAlt(_localctx, 1);
 				{
-				setState(156);
+				setState(167);
 				match(LBRACE);
-				setState(157);
+				setState(168);
 				kv_list(0);
-				setState(158);
+				setState(169);
 				match(RBRACE);
 				}
 				break;
 			case 2:
 				enterOuterAlt(_localctx, 2);
 				{
-				setState(160);
+				setState(171);
 				match(LBRACE);
-				setState(161);
+				setState(172);
 				match(RBRACE);
 				}
 				break;
@@ -1386,8 +1454,8 @@ public class StellarParser extends Parser {
 		int _parentState = getState();
 		Arithmetic_exprContext _localctx = new Arithmetic_exprContext(_ctx, _parentState);
 		Arithmetic_exprContext _prevctx = _localctx;
-		int _startState = 26;
-		enterRecursionRule(_localctx, 26, RULE_arithmetic_expr, _p);
+		int _startState = 28;
+		enterRecursionRule(_localctx, 28, RULE_arithmetic_expr, _p);
 		try {
 			int _alt;
 			enterOuterAlt(_localctx, 1);
@@ -1397,29 +1465,29 @@ public class StellarParser extends Parser {
 			_ctx = _localctx;
 			_prevctx = _localctx;
 
-			setState(165);
+			setState(176);
 			arithmetic_expr_mul(0);
 			}
 			_ctx.stop = _input.LT(-1);
-			setState(175);
+			setState(186);
 			_errHandler.sync(this);
-			_alt = getInterpreter().adaptivePredict(_input,13,_ctx);
+			_alt = getInterpreter().adaptivePredict(_input,15,_ctx);
 			while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
 				if ( _alt==1 ) {
 					if ( _parseListeners!=null ) triggerExitRuleEvent();
 					_prevctx = _localctx;
 					{
-					setState(173);
-					switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) {
+					setState(184);
+					switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) {
 					case 1:
 						{
 						_localctx = new ArithExpr_plusContext(new Arithmetic_exprContext(_parentctx, _parentState));
 						pushNewRecursionContext(_localctx, _startState, RULE_arithmetic_expr);
-						setState(167);
+						setState(178);
 						if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)");
-						setState(168);
+						setState(179);
 						match(PLUS);
-						setState(169);
+						setState(180);
 						arithmetic_expr_mul(0);
 						}
 						break;
@@ -1427,20 +1495,20 @@ public class StellarParser extends Parser {
 						{
 						_localctx = new ArithExpr_minusContext(new Arithmetic_exprContext(_parentctx, _parentState));
 						pushNewRecursionContext(_localctx, _startState, RULE_arithmetic_expr);
-						setState(170);
+						setState(181);
 						if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)");
-						setState(171);
+						setState(182);
 						match(MINUS);
-						setState(172);
+						setState(183);
 						arithmetic_expr_mul(0);
 						}
 						break;
 					}
 					} 
 				}
-				setState(177);
+				setState(188);
 				_errHandler.sync(this);
-				_alt = getInterpreter().adaptivePredict(_input,13,_ctx);
+				_alt = getInterpreter().adaptivePredict(_input,15,_ctx);
 			}
 			}
 		}
@@ -1526,8 +1594,8 @@ public class StellarParser extends Parser {
 		int _parentState = getState();
 		Arithmetic_expr_mulContext _localctx = new Arithmetic_expr_mulContext(_ctx, _parentState);
 		Arithmetic_expr_mulContext _prevctx = _localctx;
-		int _startState = 28;
-		enterRecursionRule(_localctx, 28, RULE_arithmetic_expr_mul, _p);
+		int _startState = 30;
+		enterRecursionRule(_localctx, 30, RULE_arithmetic_expr_mul, _p);
 		try {
 			int _alt;
 			enterOuterAlt(_localctx, 1);
@@ -1537,29 +1605,29 @@ public class StellarParser extends Parser {
 			_ctx = _localctx;
 			_prevctx = _localctx;
 
-			setState(179);
+			setState(190);
 			arithmetic_operands();
 			}
 			_ctx.stop = _input.LT(-1);
-			setState(189);
+			setState(200);
 			_errHandler.sync(this);
-			_alt = getInterpreter().adaptivePredict(_input,15,_ctx);
+			_alt = getInterpreter().adaptivePredict(_input,17,_ctx);
 			while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
 				if ( _alt==1 ) {
 					if ( _parseListeners!=null ) triggerExitRuleEvent();
 					_prevctx = _localctx;
 					{
-					setState(187);
-					switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) {
+					setState(198);
+					switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) {
 					case 1:
 						{
 						_localctx = new ArithExpr_mulContext(new Arithmetic_expr_mulContext(_parentctx, _parentState));
 						pushNewRecursionContext(_localctx, _startState, RULE_arithmetic_expr_mul);
-						setState(181);
+						setState(192);
 						if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)");
-						setState(182);
+						setState(193);
 						match(MUL);
-						setState(183);
+						setState(194);
 						arithmetic_expr_mul(3);
 						}
 						break;
@@ -1567,20 +1635,20 @@ public class StellarParser extends Parser {
 						{
 						_localctx = new ArithExpr_divContext(new Arithmetic_expr_mulContext(_parentctx, _parentState));
 						pushNewRecursionContext(_localctx, _startState, RULE_arithmetic_expr_mul);
-						setState(184);
+						setState(195);
 						if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)");
-						setState(185);
+						setState(196);
 						match(DIV);
-						setState(186);
+						setState(197);
 						arithmetic_expr_mul(2);
 						}
 						break;
 					}
 					} 
 				}
-				setState(191);
+				setState(202);
 				_errHandler.sync(this);
-				_alt = getInterpreter().adaptivePredict(_input,15,_ctx);
+				_alt = getInterpreter().adaptivePredict(_input,17,_ctx);
 			}
 			}
 		}
@@ -1624,14 +1692,14 @@ public class StellarParser extends Parser {
 
 	public final FunctionsContext functions() throws RecognitionException {
 		FunctionsContext _localctx = new FunctionsContext(_ctx, getState());
-		enterRule(_localctx, 30, RULE_functions);
+		enterRule(_localctx, 32, RULE_functions);
 		try {
 			_localctx = new TransformationFuncContext(_localctx);
 			enterOuterAlt(_localctx, 1);
 			{
-			setState(192);
+			setState(203);
 			match(IDENTIFIER);
-			setState(193);
+			setState(204);
 			func_args();
 			}
 		}
@@ -1766,15 +1834,15 @@ public class StellarParser extends Parser {
 
 	public final Arithmetic_operandsContext arithmetic_operands() throws RecognitionException {
 		Arithmetic_operandsContext _localctx = new Arithmetic_operandsContext(_ctx, getState());
-		enterRule(_localctx, 32, RULE_arithmetic_operands);
+		enterRule(_localctx, 34, RULE_arithmetic_operands);
 		try {
-			setState(209);
-			switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) {
+			setState(220);
+			switch ( getInterpreter().adaptivePredict(_input,18,_ctx) ) {
 			case 1:
 				_localctx = new NumericFunctionsContext(_localctx);
 				enterOuterAlt(_localctx, 1);
 				{
-				setState(195);
+				setState(206);
 				functions();
 				}
 				break;
@@ -1782,7 +1850,7 @@ public class StellarParser extends Parser {
 				_localctx = new DoubleLiteralContext(_localctx);
 				enterOuterAlt(_localctx, 2);
 				{
-				setState(196);
+				setState(207);
 				match(DOUBLE_LITERAL);
 				}
 				break;
@@ -1790,7 +1858,7 @@ public class StellarParser extends Parser {
 				_localctx = new IntLiteralContext(_localctx);
 				enterOuterAlt(_localctx, 3);
 				{
-				setState(197);
+				setState(208);
 				match(INT_LITERAL);
 				}
 				break;
@@ -1798,7 +1866,7 @@ public class StellarParser extends Parser {
 				_localctx = new LongLiteralContext(_localctx);
 				enterOuterAlt(_localctx, 4);
 				{
-				setState(198);
+				setState(209);
 				match(LONG_LITERAL);
 				}
 				break;
@@ -1806,7 +1874,7 @@ public class StellarParser extends Parser {
 				_localctx = new FloatLiteralContext(_localctx);
 				enterOuterAlt(_localctx, 5);
 				{
-				setState(199);
+				setState(210);
 				match(FLOAT_LITERAL);
 				}
 				break;
@@ -1814,7 +1882,7 @@ public class StellarParser extends Parser {
 				_localctx = new VariableContext(_localctx);
 				enterOuterAlt(_localctx, 6);
 				{
-				setState(200);
+				setState(211);
 				match(IDENTIFIER);
 				}
 				break;
@@ -1822,11 +1890,11 @@ public class StellarParser extends Parser {
 				_localctx = new ParenArithContext(_localctx);
 				enterOuterAlt(_localctx, 7);
 				{
-				setState(201);
+				setState(212);
 				match(LPAREN);
-				setState(202);
+				setState(213);
 				arithmetic_expr(0);
-				setState(203);
+				setState(214);
 				match(RPAREN);
 				}
 				break;
@@ -1834,11 +1902,11 @@ public class StellarParser extends Parser {
 				_localctx = new CondExprContext(_localctx);
 				enterOuterAlt(_localctx, 8);
 				{
-				setState(205);
+				setState(216);
 				match(LPAREN);
-				setState(206);
+				setState(217);
 				conditional_expr();
-				setState(207);
+				setState(218);
 				match(RPAREN);
 				}
 				break;
@@ -1979,16 +2047,16 @@ public class StellarParser extends Parser {
 
 	public final Identifier_operandContext identifier_operand() throws RecognitionException {
 		Identifier_operandContext _localctx = new Identifier_operandContext(_ctx, getState());
-		enterRule(_localctx, 34, RULE_identifier_operand);
+		enterRule(_localctx, 36, RULE_identifier_operand);
 		int _la;
 		try {
-			setState(225);
-			switch ( getInterpreter().adaptivePredict(_input,17,_ctx) ) {
+			setState(236);
+			switch ( getInterpreter().adaptivePredict(_input,19,_ctx) ) {
 			case 1:
 				_localctx = new LogicalConstContext(_localctx);
 				enterOuterAlt(_localctx, 1);
 				{
-				setState(211);
+				setState(222);
 				_la = _input.LA(1);
 				if ( !(_la==TRUE || _la==FALSE) ) {
 				_errHandler.recoverInline(this);
@@ -2001,7 +2069,7 @@ public class StellarParser extends Parser {
 				_localctx = new ArithmeticOperandsContext(_localctx);
 				enterOuterAlt(_localctx, 2);
 				{
-				setState(212);
+				setState(223);
 				arithmetic_expr(0);
 				}
 				break;
@@ -2009,7 +2077,7 @@ public class StellarParser extends Parser {
 				_localctx = new StringLiteralContext(_localctx);
 				enterOuterAlt(_localctx, 3);
 				{
-				setState(213);
+				setState(224);
 				match(STRING_LITERAL);
 				}
 				break;
@@ -2017,7 +2085,7 @@ public class StellarParser extends Parser {
 				_localctx = new ListContext(_localctx);
 				enterOuterAlt(_localctx, 4);
 				{
-				setState(214);
+				setState(225);
 				list_entity();
 				}
 				break;
@@ -2025,7 +2093,7 @@ public class StellarParser extends Parser {
 				_localctx = new MapConstContext(_localctx);
 				enterOuterAlt(_localctx, 5);
 				{
-				setState(215);
+				setState(226);
 				map_entity();
 				}
 				break;
@@ -2033,7 +2101,7 @@ public class StellarParser extends Parser {
 				_localctx = new NullConstContext(_localctx);
 				enterOuterAlt(_localctx, 6);
 				{
-				setState(216);
+				setState(227);
 				match(NULL);
 				}
 				break;
@@ -2041,13 +2109,13 @@ public class StellarParser extends Parser {
 				_localctx = new ExistsFuncContext(_localctx);
 				enterOuterAlt(_localctx, 7);
 				{
-				setState(217);
+				setState(228);
 				match(EXISTS);
-				setState(218);
+				setState(229);
 				match(LPAREN);
-				setState(219);
+				setState(230);
 				match(IDENTIFIER);
-				setState(220);
+				setState(231);
 				match(RPAREN);
 				}
 				break;
@@ -2055,11 +2123,11 @@ public class StellarParser extends Parser {
 				_localctx = new CondExpr_parenContext(_localctx);
 				enterOuterAlt(_localctx, 8);
 				{
-				setState(221);
+				setState(232);
 				match(LPAREN);
-				setState(222);
+				setState(233);
 				conditional_expr();
-				setState(223);
+				setState(234);
 				match(RPAREN);
 				}
 				break;
@@ -2078,15 +2146,15 @@ public class StellarParser extends Parser {
 
 	public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) {
 		switch (ruleIndex) {
-		case 3:
+		case 6:
 			return comparison_expr_sempred((Comparison_exprContext)_localctx, predIndex);
-		case 9:
+		case 10:
 			return op_list_sempred((Op_listContext)_localctx, predIndex);
-		case 11:
+		case 12:
 			return kv_list_sempred((Kv_listContext)_localctx, predIndex);
-		case 13:
-			return arithmetic_expr_sempred((Arithmetic_exprContext)_localctx, predIndex);
 		case 14:
+			return arithmetic_expr_sempred((Arithmetic_exprContext)_localctx, predIndex);
+		case 15:
 			return arithmetic_expr_mul_sempred((Arithmetic_expr_mulContext)_localctx, predIndex);
 		}
 		return true;
@@ -2094,122 +2162,125 @@ public class StellarParser extends Parser {
 	private boolean comparison_expr_sempred(Comparison_exprContext _localctx, int predIndex) {
 		switch (predIndex) {
 		case 0:
-			return precpred(_ctx, 5);
-		case 1:
 			return precpred(_ctx, 4);
 		}
 		return true;
 	}
 	private boolean op_list_sempred(Op_listContext _localctx, int predIndex) {
 		switch (predIndex) {
-		case 2:
+		case 1:
 			return precpred(_ctx, 3);
-		case 3:
+		case 2:
 			return precpred(_ctx, 1);
 		}
 		return true;
 	}
 	private boolean kv_list_sempred(Kv_listContext _localctx, int predIndex) {
 		switch (predIndex) {
-		case 4:
+		case 3:
 			return precpred(_ctx, 1);
 		}
 		return true;
 	}
 	private boolean arithmetic_expr_sempred(Arithmetic_exprContext _localctx, int predIndex) {
 		switch (predIndex) {
-		case 5:
+		case 4:
 			return precpred(_ctx, 2);
-		case 6:
+		case 5:
 			return precpred(_ctx, 1);
 		}
 		return true;
 	}
 	private boolean arithmetic_expr_mul_sempred(Arithmetic_expr_mulContext _localctx, int predIndex) {
 		switch (predIndex) {
-		case 7:
+		case 6:
 			return precpred(_ctx, 2);
-		case 8:
+		case 7:
 			return precpred(_ctx, 1);
 		}
 		return true;
 	}
 
 	public static final String _serializedATN =
-		"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3-\u00e6\4\2\t\2\4"+
+		"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3-\u00f1\4\2\t\2\4"+
 		"\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+
 		"\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+
-		"\4\23\t\23\3\2\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3\62\n\3\3\4"+
-		"\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\5\4A\n\4\3\5\3\5\3\5"+
-		"\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3"+
-		"\5\3\5\3\5\5\5Z\n\5\3\5\3\5\3\5\3\5\3\5\3\5\7\5b\n\5\f\5\16\5e\13\5\3"+
-		"\6\3\6\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\n\3\n\3\n\3\n\5\nu\n\n\3\13\3"+
-		"\13\3\13\5\13z\n\13\3\13\3\13\3\13\3\13\3\13\3\13\7\13\u0082\n\13\f\13"+
-		"\16\13\u0085\13\13\3\f\3\f\3\f\3\f\3\f\3\f\5\f\u008d\n\f\3\r\3\r\3\r\3"+
-		"\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\7\r\u009a\n\r\f\r\16\r\u009d\13\r\3\16"+
-		"\3\16\3\16\3\16\3\16\3\16\5\16\u00a5\n\16\3\17\3\17\3\17\3\17\3\17\3\17"+
-		"\3\17\3\17\3\17\7\17\u00b0\n\17\f\17\16\17\u00b3\13\17\3\20\3\20\3\20"+
-		"\3\20\3\20\3\20\3\20\3\20\3\20\7\20\u00be\n\20\f\20\16\20\u00c1\13\20"+
-		"\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22"+
-		"\3\22\3\22\3\22\5\22\u00d4\n\22\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23"+
-		"\3\23\3\23\3\23\3\23\3\23\3\23\5\23\u00e4\n\23\3\23\2\7\b\24\30\34\36"+
-		"\24\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$\2\6\3\2\f\21\3\2\30\31"+
-		"\3\2\32\33\3\2\n\13\u00f8\2&\3\2\2\2\4\61\3\2\2\2\6@\3\2\2\2\bY\3\2\2"+
-		"\2\nf\3\2\2\2\fh\3\2\2\2\16j\3\2\2\2\20l\3\2\2\2\22t\3\2\2\2\24y\3\2\2"+
-		"\2\26\u008c\3\2\2\2\30\u008e\3\2\2\2\32\u00a4\3\2\2\2\34\u00a6\3\2\2\2"+
-		"\36\u00b4\3\2\2\2 \u00c2\3\2\2\2\"\u00d3\3\2\2\2$\u00e3\3\2\2\2&\'\5\4"+
-		"\3\2\'(\7\2\2\3(\3\3\2\2\2)\62\5\6\4\2*+\7 \2\2+,\5\4\3\2,-\7!\2\2-\62"+
-		"\3\2\2\2.\62\5\34\17\2/\62\5\n\6\2\60\62\5\b\5\2\61)\3\2\2\2\61*\3\2\2"+
-		"\2\61.\3\2\2\2\61/\3\2\2\2\61\60\3\2\2\2\62\5\3\2\2\2\63\64\5\b\5\2\64"+
-		"\65\7\22\2\2\65\66\5\4\3\2\66\67\7\23\2\2\678\5\4\3\28A\3\2\2\29:\7\24"+
-		"\2\2:;\5\b\5\2;<\7\25\2\2<=\5\4\3\2=>\7\26\2\2>?\5\4\3\2?A\3\2\2\2@\63"+
-		"\3\2\2\2@9\3\2\2\2A\7\3\2\2\2BC\b\5\1\2CD\5$\23\2DE\5\f\7\2EF\5$\23\2"+
-		"FZ\3\2\2\2GH\5$\23\2HI\7\"\2\2IJ\5$\23\2JZ\3\2\2\2KL\5$\23\2LM\7#\2\2"+
-		"MN\5$\23\2NZ\3\2\2\2OP\7\t\2\2PQ\7 \2\2QR\5\b\5\2RS\7!\2\2SZ\3\2\2\2T"+
-		"U\7 \2\2UV\5\b\5\2VW\7!\2\2WZ\3\2\2\2XZ\5$\23\2YB\3\2\2\2YG\3\2\2\2YK"+
-		"\3\2\2\2YO\3\2\2\2YT\3\2\2\2YX\3\2\2\2Zc\3\2\2\2[\\\f\7\2\2\\]\7\7\2\2"+
-		"]b\5\b\5\b^_\f\6\2\2_`\7\b\2\2`b\5\b\5\7a[\3\2\2\2a^\3\2\2\2be\3\2\2\2"+
-		"ca\3\2\2\2cd\3\2\2\2d\t\3\2\2\2ec\3\2\2\2fg\5$\23\2g\13\3\2\2\2hi\t\2"+
-		"\2\2i\r\3\2\2\2jk\t\3\2\2k\17\3\2\2\2lm\t\4\2\2m\21\3\2\2\2no\7 \2\2o"+
-		"p\5\24\13\2pq\7!\2\2qu\3\2\2\2rs\7 \2\2su\7!\2\2tn\3\2\2\2tr\3\2\2\2u"+
-		"\23\3\2\2\2vw\b\13\1\2wz\5$\23\2xz\5\6\4\2yv\3\2\2\2yx\3\2\2\2z\u0083"+
-		"\3\2\2\2{|\f\5\2\2|}\7\5\2\2}\u0082\5$\23\2~\177\f\3\2\2\177\u0080\7\5"+
-		"\2\2\u0080\u0082\5\6\4\2\u0081{\3\2\2\2\u0081~\3\2\2\2\u0082\u0085\3\2"+
-		"\2\2\u0083\u0081\3\2\2\2\u0083\u0084\3\2\2\2\u0084\25\3\2\2\2\u0085\u0083"+
-		"\3\2\2\2\u0086\u0087\7\36\2\2\u0087\u0088\5\24\13\2\u0088\u0089\7\37\2"+
-		"\2\u0089\u008d\3\2\2\2\u008a\u008b\7\36\2\2\u008b\u008d\7\37\2\2\u008c"+
-		"\u0086\3\2\2\2\u008c\u008a\3\2\2\2\u008d\27\3\2\2\2\u008e\u008f\b\r\1"+
-		"\2\u008f\u0090\5$\23\2\u0090\u0091\7\23\2\2\u0091\u0092\5\4\3\2\u0092"+
-		"\u009b\3\2\2\2\u0093\u0094\f\3\2\2\u0094\u0095\7\5\2\2\u0095\u0096\5$"+
-		"\23\2\u0096\u0097\7\23\2\2\u0097\u0098\5\4\3\2\u0098\u009a\3\2\2\2\u0099"+
-		"\u0093\3\2\2\2\u009a\u009d\3\2\2\2\u009b\u0099\3\2\2\2\u009b\u009c\3\2"+
-		"\2\2\u009c\31\3\2\2\2\u009d\u009b\3\2\2\2\u009e\u009f\7\34\2\2\u009f\u00a0"+
-		"\5\30\r\2\u00a0\u00a1\7\35\2\2\u00a1\u00a5\3\2\2\2\u00a2\u00a3\7\34\2"+
-		"\2\u00a3\u00a5\7\35\2\2\u00a4\u009e\3\2\2\2\u00a4\u00a2\3\2\2\2\u00a5"+
-		"\33\3\2\2\2\u00a6\u00a7\b\17\1\2\u00a7\u00a8\5\36\20\2\u00a8\u00b1\3\2"+
-		"\2\2\u00a9\u00aa\f\4\2\2\u00aa\u00ab\7\31\2\2\u00ab\u00b0\5\36\20\2\u00ac"+
-		"\u00ad\f\3\2\2\u00ad\u00ae\7\30\2\2\u00ae\u00b0\5\36\20\2\u00af\u00a9"+
-		"\3\2\2\2\u00af\u00ac\3\2\2\2\u00b0\u00b3\3\2\2\2\u00b1\u00af\3\2\2\2\u00b1"+
-		"\u00b2\3\2\2\2\u00b2\35\3\2\2\2\u00b3\u00b1\3\2\2\2\u00b4\u00b5\b\20\1"+
-		"\2\u00b5\u00b6\5\"\22\2\u00b6\u00bf\3\2\2\2\u00b7\u00b8\f\4\2\2\u00b8"+
-		"\u00b9\7\33\2\2\u00b9\u00be\5\36\20\5\u00ba\u00bb\f\3\2\2\u00bb\u00bc"+
-		"\7\32\2\2\u00bc\u00be\5\36\20\4\u00bd\u00b7\3\2\2\2\u00bd\u00ba\3\2\2"+
-		"\2\u00be\u00c1\3\2\2\2\u00bf\u00bd\3\2\2\2\u00bf\u00c0\3\2\2\2\u00c0\37"+
-		"\3\2\2\2\u00c1\u00bf\3\2\2\2\u00c2\u00c3\7*\2\2\u00c3\u00c4\5\22\n\2\u00c4"+
-		"!\3\2\2\2\u00c5\u00d4\5 \21\2\u00c6\u00d4\7\'\2\2\u00c7\u00d4\7&\2\2\u00c8"+
-		"\u00d4\7)\2\2\u00c9\u00d4\7(\2\2\u00ca\u00d4\7*\2\2\u00cb\u00cc\7 \2\2"+
-		"\u00cc\u00cd\5\34\17\2\u00cd\u00ce\7!\2\2\u00ce\u00d4\3\2\2\2\u00cf\u00d0"+
-		"\7 \2\2\u00d0\u00d1\5\6\4\2\u00d1\u00d2\7!\2\2\u00d2\u00d4\3\2\2\2\u00d3"+
-		"\u00c5\3\2\2\2\u00d3\u00c6\3\2\2\2\u00d3\u00c7\3\2\2\2\u00d3\u00c8\3\2"+
-		"\2\2\u00d3\u00c9\3\2\2\2\u00d3\u00ca\3\2\2\2\u00d3\u00cb\3\2\2\2\u00d3"+
-		"\u00cf\3\2\2\2\u00d4#\3\2\2\2\u00d5\u00e4\t\5\2\2\u00d6\u00e4\5\34\17"+
-		"\2\u00d7\u00e4\7+\2\2\u00d8\u00e4\5\26\f\2\u00d9\u00e4\5\32\16\2\u00da"+
-		"\u00e4\7\27\2\2\u00db\u00dc\7$\2\2\u00dc\u00dd\7 \2\2\u00dd\u00de\7*\2"+
-		"\2\u00de\u00e4\7!\2\2\u00df\u00e0\7 \2\2\u00e0\u00e1\5\6\4\2\u00e1\u00e2"+
-		"\7!\2\2\u00e2\u00e4\3\2\2\2\u00e3\u00d5\3\2\2\2\u00e3\u00d6\3\2\2\2\u00e3"+
-		"\u00d7\3\2\2\2\u00e3\u00d8\3\2\2\2\u00e3\u00d9\3\2\2\2\u00e3\u00da\3\2"+
-		"\2\2\u00e3\u00db\3\2\2\2\u00e3\u00df\3\2\2\2\u00e4%\3\2\2\2\24\61@Yac"+
-		"ty\u0081\u0083\u008c\u009b\u00a4\u00af\u00b1\u00bd\u00bf\u00d3\u00e3";
+		"\4\23\t\23\4\24\t\24\3\2\3\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3"+
+		"\3\5\3\66\n\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\5\4"+
+		"E\n\4\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\5\5P\n\5\3\6\3\6\5\6T\n\6\3"+
+		"\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\5\7^\n\7\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3"+
+		"\b\3\b\3\b\3\b\5\bk\n\b\3\b\3\b\3\b\3\b\7\bq\n\b\f\b\16\bt\13\b\3\t\3"+
+		"\t\3\n\3\n\3\13\3\13\3\13\3\13\3\13\3\13\5\13\u0080\n\13\3\f\3\f\3\f\5"+
+		"\f\u0085\n\f\3\f\3\f\3\f\3\f\3\f\3\f\7\f\u008d\n\f\f\f\16\f\u0090\13\f"+
+		"\3\r\3\r\3\r\3\r\3\r\3\r\5\r\u0098\n\r\3\16\3\16\3\16\3\16\3\16\3\16\3"+
+		"\16\3\16\3\16\3\16\3\16\7\16\u00a5\n\16\f\16\16\16\u00a8\13\16\3\17\3"+
+		"\17\3\17\3\17\3\17\3\17\5\17\u00b0\n\17\3\20\3\20\3\20\3\20\3\20\3\20"+
+		"\3\20\3\20\3\20\7\20\u00bb\n\20\f\20\16\20\u00be\13\20\3\21\3\21\3\21"+
+		"\3\21\3\21\3\21\3\21\3\21\3\21\7\21\u00c9\n\21\f\21\16\21\u00cc\13\21"+
+		"\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23"+
+		"\3\23\3\23\3\23\5\23\u00df\n\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24"+
+		"\3\24\3\24\3\24\3\24\3\24\3\24\5\24\u00ef\n\24\3\24\2\7\16\26\32\36 \25"+
+		"\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&\2\4\3\2\f\21\3\2\n\13\u0104"+
+		"\2(\3\2\2\2\4\65\3\2\2\2\6D\3\2\2\2\bO\3\2\2\2\nS\3\2\2\2\f]\3\2\2\2\16"+
+		"j\3\2\2\2\20u\3\2\2\2\22w\3\2\2\2\24\177\3\2\2\2\26\u0084\3\2\2\2\30\u0097"+
+		"\3\2\2\2\32\u0099\3\2\2\2\34\u00af\3\2\2\2\36\u00b1\3\2\2\2 \u00bf\3\2"+
+		"\2\2\"\u00cd\3\2\2\2$\u00de\3\2\2\2&\u00ee\3\2\2\2()\5\4\3\2)*\7\2\2\3"+
+		"*\3\3\2\2\2+\66\5\6\4\2,-\7 \2\2-.\5\4\3\2./\7!\2\2/\66\3\2\2\2\60\66"+
+		"\5\36\20\2\61\66\5\20\t\2\62\66\5\16\b\2\63\66\5\b\5\2\64\66\5\f\7\2\65"+
+		"+\3\2\2\2\65,\3\2\2\2\65\60\3\2\2\2\65\61\3\2\2\2\65\62\3\2\2\2\65\63"+
+		"\3\2\2\2\65\64\3\2\2\2\66\5\3\2\2\2\678\5\b\5\289\7\22\2\29:\5\4\3\2:"+
+		";\7\23\2\2;<\5\4\3\2<E\3\2\2\2=>\7\24\2\2>?\5\b\5\2?@\7\25\2\2@A\5\4\3"+
+		"\2AB\7\26\2\2BC\5\4\3\2CE\3\2\2\2D\67\3\2\2\2D=\3\2\2\2E\7\3\2\2\2FG\5"+
+		"\n\6\2GH\7\7\2\2HI\5\b\5\2IP\3\2\2\2JK\5\n\6\2KL\7\b\2\2LM\5\b\5\2MP\3"+
+		"\2\2\2NP\5\n\6\2OF\3\2\2\2OJ\3\2\2\2ON\3\2\2\2P\t\3\2\2\2QT\5\16\b\2R"+
+		"T\5\f\7\2SQ\3\2\2\2SR\3\2\2\2T\13\3\2\2\2UV\5&\24\2VW\7\"\2\2WX\5\n\6"+
+		"\2X^\3\2\2\2YZ\5&\24\2Z[\7#\2\2[\\\5\n\6\2\\^\3\2\2\2]U\3\2\2\2]Y\3\2"+
+		"\2\2^\r\3\2\2\2_`\b\b\1\2`a\7\t\2\2ab\7 \2\2bc\5\b\5\2cd\7!\2\2dk\3\2"+
+		"\2\2ef\7 \2\2fg\5\b\5\2gh\7!\2\2hk\3\2\2\2ik\5&\24\2j_\3\2\2\2je\3\2\2"+
+		"\2ji\3\2\2\2kr\3\2\2\2lm\f\6\2\2mn\5\22\n\2no\5\16\b\7oq\3\2\2\2pl\3\2"+
+		"\2\2qt\3\2\2\2rp\3\2\2\2rs\3\2\2\2s\17\3\2\2\2tr\3\2\2\2uv\5&\24\2v\21"+
+		"\3\2\2\2wx\t\2\2\2x\23\3\2\2\2yz\7 \2\2z{\5\26\f\2{|\7!\2\2|\u0080\3\2"+
+		"\2\2}~\7 \2\2~\u0080\7!\2\2\177y\3\2\2\2\177}\3\2\2\2\u0080\25\3\2\2\2"+
+		"\u0081\u0082\b\f\1\2\u0082\u0085\5&\24\2\u0083\u0085\5\6\4\2\u0084\u0081"+
+		"\3\2\2\2\u0084\u0083\3\2\2\2\u0085\u008e\3\2\2\2\u0086\u0087\f\5\2\2\u0087"+
+		"\u0088\7\5\2\2\u0088\u008d\5&\24\2\u0089\u008a\f\3\2\2\u008a\u008b\7\5"+
+		"\2\2\u008b\u008d\5\6\4\2\u008c\u0086\3\2\2\2\u008c\u0089\3\2\2\2\u008d"+
+		"\u0090\3\2\2\2\u008e\u008c\3\2\2\2\u008e\u008f\3\2\2\2\u008f\27\3\2\2"+
+		"\2\u0090\u008e\3\2\2\2\u0091\u0092\7\36\2\2\u0092\u0093\5\26\f\2\u0093"+
+		"\u0094\7\37\2\2\u0094\u0098\3\2\2\2\u0095\u0096\7\36\2\2\u0096\u0098\7"+
+		"\37\2\2\u0097\u0091\3\2\2\2\u0097\u0095\3\2\2\2\u0098\31\3\2\2\2\u0099"+
+		"\u009a\b\16\1\2\u009a\u009b\5&\24\2\u009b\u009c\7\23\2\2\u009c\u009d\5"+
+		"\4\3\2\u009d\u00a6\3\2\2\2\u009e\u009f\f\3\2\2\u009f\u00a0\7\5\2\2\u00a0"+
+		"\u00a1\5&\24\2\u00a1\u00a2\7\23\2\2\u00a2\u00a3\5\4\3\2\u00a3\u00a5\3"+
+		"\2\2\2\u00a4\u009e\3\2\2\2\u00a5\u00a8\3\2\2\2\u00a6\u00a4\3\2\2\2\u00a6"+
+		"\u00a7\3\2\2\2\u00a7\33\3\2\2\2\u00a8\u00a6\3\2\2\2\u00a9\u00aa\7\34\2"+
+		"\2\u00aa\u00ab\5\32\16\2\u00ab\u00ac\7\35\2\2\u00ac\u00b0\3\2\2\2\u00ad"+
+		"\u00ae\7\34\2\2\u00ae\u00b0\7\35\2\2\u00af\u00a9\3\2\2\2\u00af\u00ad\3"+
+		"\2\2\2\u00b0\35\3\2\2\2\u00b1\u00b2\b\20\1\2\u00b2\u00b3\5 \21\2\u00b3"+
+		"\u00bc\3\2\2\2\u00b4\u00b5\f\4\2\2\u00b5\u00b6\7\31\2\2\u00b6\u00bb\5"+
+		" \21\2\u00b7\u00b8\f\3\2\2\u00b8\u00b9\7\30\2\2\u00b9\u00bb\5 \21\2\u00ba"+
+		"\u00b4\3\2\2\2\u00ba\u00b7\3\2\2\2\u00bb\u00be\3\2\2\2\u00bc\u00ba\3\2"+
+		"\2\2\u00bc\u00bd\3\2\2\2\u00bd\37\3\2\2\2\u00be\u00bc\3\2\2\2\u00bf\u00c0"+
+		"\b\21\1\2\u00c0\u00c1\5$\23\2\u00c1\u00ca\3\2\2\2\u00c2\u00c3\f\4\2\2"+
+		"\u00c3\u00c4\7\33\2\2\u00c4\u00c9\5 \21\5\u00c5\u00c6\f\3\2\2\u00c6\u00c7"+
+		"\7\32\2\2\u00c7\u00c9\5 \21\4\u00c8\u00c2\3\2\2\2\u00c8\u00c5\3\2\2\2"+
+		"\u00c9\u00cc\3\2\2\2\u00ca\u00c8\3\2\2\2\u00ca\u00cb\3\2\2\2\u00cb!\3"+
+		"\2\2\2\u00cc\u00ca\3\2\2\2\u00cd\u00ce\7*\2\2\u00ce\u00cf\5\24\13\2\u00cf"+
+		"#\3\2\2\2\u00d0\u00df\5\"\22\2\u00d1\u00df\7\'\2\2\u00d2\u00df\7&\2\2"+
+		"\u00d3\u00df\7)\2\2\u00d4\u00df\7(\2\2\u00d5\u00df\7*\2\2\u00d6\u00d7"+
+		"\7 \2\2\u00d7\u00d8\5\36\20\2\u00d8\u00d9\7!\2\2\u00d9\u00df\3\2\2\2\u00da"+
+		"\u00db\7 \2\2\u00db\u00dc\5\6\4\2\u00dc\u00dd\7!\2\2\u00dd\u00df\3\2\2"+
+		"\2\u00de\u00d0\3\2\2\2\u00de\u00d1\3\2\2\2\u00de\u00d2\3\2\2\2\u00de\u00d3"+
+		"\3\2\2\2\u00de\u00d4\3\2\2\2\u00de\u00d5\3\2\2\2\u00de\u00d6\3\2\2\2\u00de"+
+		"\u00da\3\2\2\2\u00df%\3\2\2\2\u00e0\u00ef\t\3\2\2\u00e1\u00ef\5\36\20"+
+		"\2\u00e2\u00ef\7+\2\2\u00e3\u00ef\5\30\r\2\u00e4\u00ef\5\34\17\2\u00e5"+
+		"\u00ef\7\27\2\2\u00e6\u00e7\7$\2\2\u00e7\u00e8\7 \2\2\u00e8\u00e9\7*\2"+
+		"\2\u00e9\u00ef\7!\2\2\u00ea\u00eb\7 \2\2\u00eb\u00ec\5\6\4\2\u00ec\u00ed"+
+		"\7!\2\2\u00ed\u00ef\3\2\2\2\u00ee\u00e0\3\2\2\2\u00ee\u00e1\3\2\2\2\u00ee"+
+		"\u00e2\3\2\2\2\u00ee\u00e3\3\2\2\2\u00ee\u00e4\3\2\2\2\u00ee\u00e5\3\2"+
+		"\2\2\u00ee\u00e6\3\2\2\2\u00ee\u00ea\3\2\2\2\u00ef\'\3\2\2\2\26\65DOS"+
+		"]jr\177\u0084\u008c\u008e\u0097\u00a6\u00af\u00ba\u00bc\u00c8\u00ca\u00de"+
+		"\u00ee";
 	public static final ATN _ATN =
 		new ATNDeserializer().deserialize(_serializedATN.toCharArray());
 	static {

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/8340c0e2/metron-platform/metron-common/src/test/java/org/apache/metron/common/stellar/StellarTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/test/java/org/apache/metron/common/stellar/StellarTest.java b/metron-platform/metron-common/src/test/java/org/apache/metron/common/stellar/StellarTest.java
index 29df491..dabf293 100644
--- a/metron-platform/metron-common/src/test/java/org/apache/metron/common/stellar/StellarTest.java
+++ b/metron-platform/metron-common/src/test/java/org/apache/metron/common/stellar/StellarTest.java
@@ -41,6 +41,7 @@ import static org.apache.metron.common.dsl.functions.resolver.ClasspathFunctionR
 import static org.apache.metron.common.utils.StellarProcessorUtils.run;
 import static org.apache.metron.common.utils.StellarProcessorUtils.runPredicate;
 
+@SuppressWarnings("ALL")
 public class StellarTest {
 
   @Test
@@ -192,7 +193,14 @@ public class StellarTest {
       String query = "if 1 + 1 < 2 then 'one' else 'two'";
       Assert.assertEquals("two", run(query, new HashMap<>()));
     }
-
+    {
+      String query = "if 1 + 1 <= 2 AND 1 + 2 in [3] then 'one' else 'two'";
+      Assert.assertEquals("one", run(query, new HashMap<>()));
+    }
+    {
+      String query = "if 1 + 1 <= 2 AND (1 + 2 in [3]) then 'one' else 'two'";
+      Assert.assertEquals("one", run(query, new HashMap<>()));
+    }
     {
       String query = "if not(1 < 2) then 'one' else 'two'";
       Assert.assertEquals("two", run(query, new HashMap<>()));
@@ -210,6 +218,34 @@ public class StellarTest {
       Assert.assertEquals("two", run(query, ImmutableMap.of("one", 1, "very_nearly_one", 1.0000001)));
     }
     {
+      String query = "if one == very_nearly_one OR one == very_nearly_one then 'one' else 'two'";
+      Assert.assertEquals("two", run(query, ImmutableMap.of("one", 1, "very_nearly_one", 1.0000001)));
+    }
+    {
+      String query = "if one == very_nearly_one OR one != very_nearly_one then 'one' else 'two'";
+      Assert.assertEquals("one", run(query, ImmutableMap.of("one", 1, "very_nearly_one", 1.0000001)));
+    }
+    {
+      String query = "if one != very_nearly_one OR one == very_nearly_one then 'one' else 'two'";
+      Assert.assertEquals("one", run(query, ImmutableMap.of("one", 1, "very_nearly_one", 1.0000001)));
+    }
+    {
+      String query = "if 'foo' in ['foo'] OR one == very_nearly_one then 'one' else 'two'";
+      Assert.assertEquals("one", run(query, ImmutableMap.of("one", 1, "very_nearly_one", 1.0000001)));
+    }
+    {
+      String query = "if ('foo' in ['foo']) OR one == very_nearly_one then 'one' else 'two'";
+      Assert.assertEquals("one", run(query, ImmutableMap.of("one", 1, "very_nearly_one", 1.0000001)));
+    }
+    {
+      String query = "if not('foo' in ['foo']) OR one == very_nearly_one then 'one' else 'two'";
+      Assert.assertEquals("two", run(query, ImmutableMap.of("one", 1, "very_nearly_one", 1.0000001)));
+    }
+    {
+      String query = "if not('foo' in ['foo'] OR one == very_nearly_one) then 'one' else 'two'";
+      Assert.assertEquals("two", run(query, ImmutableMap.of("one", 1, "very_nearly_one", 1.0000001)));
+    }
+    {
       String query = "1 < 2 ? 'one' : 'two'";
       Assert.assertEquals("one", run(query, new HashMap<>()));
     }
@@ -225,6 +261,30 @@ public class StellarTest {
       String query = "1 < 2 ? one*3 : 'two'";
       Assert.assertTrue(Math.abs(3 - (int) run(query, ImmutableMap.of("one", 1))) < 1e-6);
     }
+    {
+      String query = "1 < 2 AND 1 < 2 ? one*3 : 'two'";
+      Assert.assertTrue(Math.abs(3 - (int) run(query, ImmutableMap.of("one", 1))) < 1e-6);
+    }
+    {
+      String query = "1 < 2 AND 1 > 2 ? one*3 : 'two'";
+      Assert.assertEquals("two", run(query, ImmutableMap.of("one", 1)));
+    }
+    {
+      String query = "1 > 2 AND 1 < 2 ? one*3 : 'two'";
+      Assert.assertEquals("two", run(query, ImmutableMap.of("one", 1)));
+    }
+    {
+      String query = "1 < 2 AND 'foo' in ['', 'foo'] ? one*3 : 'two'";
+      Assert.assertEquals(3, run(query, ImmutableMap.of("one", 1)));
+    }
+    {
+      String query = "1 < 2 AND ('foo' in ['', 'foo']) ? one*3 : 'two'";
+      Assert.assertEquals(3, run(query, ImmutableMap.of("one", 1)));
+    }
+    {
+      String query = "'foo' in ['', 'foo'] ? one*3 : 'two'";
+      Assert.assertEquals(3, run(query, ImmutableMap.of("one", 1)));
+    }
   }
 
   @Test
@@ -457,6 +517,26 @@ public class StellarTest {
   }
 
   @Test
+  public void inNestedInStatement() throws Exception {
+    final Map<String, String> variableMap = new HashMap<>();
+
+    Assert.assertTrue(runPredicate("('grok' not in 'foobar') == true", variableMap::get));
+    Assert.assertTrue(runPredicate("'grok' not in ('foobar' == true)", variableMap::get));
+    Assert.assertFalse(runPredicate("'grok' in 'grokbar' == true", variableMap::get));
+    Assert.assertTrue(runPredicate("false in 'grokbar' == true", variableMap::get));
+
+    Assert.assertTrue(runPredicate("('foo' in 'foobar') == true", variableMap::get));
+    Assert.assertFalse(runPredicate("'foo' in ('foobar' == true)", variableMap::get));
+    Assert.assertTrue(runPredicate("'grok' not in 'grokbar' == true", variableMap::get));
+    Assert.assertTrue(runPredicate("false in 'grokbar' == true", variableMap::get));
+    Assert.assertTrue(runPredicate("'foo' in ['foo'] AND 'bar' in ['bar']", variableMap::get));
+    Assert.assertTrue(runPredicate("('foo' in ['foo']) AND 'bar' in ['bar']", variableMap::get));
+    Assert.assertTrue(runPredicate("'foo' in ['foo'] AND ('bar' in ['bar'])", variableMap::get));
+    Assert.assertTrue(runPredicate("('foo' in ['foo']) AND ('bar' in ['bar'])", variableMap::get));
+    Assert.assertTrue(runPredicate("('foo' in ['foo'] AND 'bar' in ['bar'])", variableMap::get));
+  }
+
+  @Test
   public void testExists() throws Exception {
     final Map<String, String> variableMap = new HashMap<String, String>() {{
       put("foo", "casey");
@@ -494,6 +574,8 @@ public class StellarTest {
     }};
     Assert.assertTrue(runPredicate("IN_SUBNET(ip, '192.168.0.0/24')", v -> variableMap.get(v)));
     Assert.assertTrue(runPredicate("IN_SUBNET(ip, '192.168.0.0/24', '11.0.0.0/24')", v -> variableMap.get(v)));
+    Assert.assertTrue(runPredicate("IN_SUBNET(ip, '192.168.0.0/24', '11.0.0.0/24') in [true]", v -> variableMap.get(v)));
+    Assert.assertTrue(runPredicate("true in IN_SUBNET(ip, '192.168.0.0/24', '11.0.0.0/24')", v -> variableMap.get(v)));
     Assert.assertFalse(runPredicate("IN_SUBNET(ip_dst_addr, '192.168.0.0/24', '11.0.0.0/24')", v -> variableMap.get(v)));
     Assert.assertFalse(runPredicate("IN_SUBNET(other_ip, '192.168.0.0/24')", v -> variableMap.get(v)));
     Assert.assertFalse(runPredicate("IN_SUBNET(blah, '192.168.0.0/24')", v -> variableMap.get(v)));


[04/17] incubator-metron git commit: METRON-675: Make Threat Triage rules able to be assigned names and comments closes apache/incubator-metron#426

Posted by ce...@apache.org.
METRON-675: Make Threat Triage rules able to be assigned names and comments closes apache/incubator-metron#426


Project: http://git-wip-us.apache.org/repos/asf/incubator-metron/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-metron/commit/75d122d1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-metron/tree/75d122d1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-metron/diff/75d122d1

Branch: refs/heads/Metron_0.3.1
Commit: 75d122d1e4c3c7d7c4bd9e3fe3bdc67a133de463
Parents: ad8724e
Author: cstella <ce...@gmail.com>
Authored: Wed Feb 1 13:46:07 2017 -0500
Committer: cstella <ce...@gmail.com>
Committed: Wed Feb 1 13:46:07 2017 -0500

----------------------------------------------------------------------
 metron-analytics/metron-maas-service/README.md  |  9 +-
 metron-analytics/metron-statistics/README.md    |  9 +-
 .../enrichment/threatintel/RiskLevelRule.java   | 90 +++++++++++++++++++
 .../threatintel/ThreatTriageConfig.java         | 27 ++++--
 .../org/apache/metron/common/writer/test.json   | 11 ++-
 .../SensorEnrichmentUpdateConfigTest.java       |  9 +-
 metron-platform/metron-enrichment/README.md     | 29 +++++-
 .../config/zookeeper/enrichments/snort.json     |  9 +-
 .../enrichment/bolt/ThreatIntelJoinBolt.java    |  2 +-
 .../triage/ThreatTriageProcessor.java           |  7 +-
 .../bolt/ThreatIntelJoinBoltTest.java           | 27 ++++--
 .../threatintel/triage/ThreatTriageTest.java    | 54 ++++++++---
 .../main/config/zookeeper/enrichments/test.json |  9 +-
 metron-platform/metron-management/README.md     | 79 ++++++++--------
 .../management/ThreatTriageFunctions.java       | 86 ++++++++++++------
 .../management/ConfigurationFunctionsTest.java  |  2 +-
 .../management/ThreatTriageFunctionsTest.java   | 95 ++++++++++++--------
 17 files changed, 389 insertions(+), 165 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/75d122d1/metron-analytics/metron-maas-service/README.md
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-maas-service/README.md b/metron-analytics/metron-maas-service/README.md
index d1172b1..dd55558 100644
--- a/metron-analytics/metron-maas-service/README.md
+++ b/metron-analytics/metron-maas-service/README.md
@@ -169,9 +169,12 @@ Now that we have a deployed model, let's adjust the configurations for the Squid
   "threatIntel" : {
     "fieldMap":{},
     "triageConfig" : {
-      "riskLevelRules" : {
-        "is_malicious == 'malicious'" : 100
-      },
+      "riskLevelRules" : [
+        {
+          "rule" : "is_malicious == 'malicious'",
+          "score" : 100
+        }
+      ],
       "aggregator" : "MAX"
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/75d122d1/metron-analytics/metron-statistics/README.md
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-statistics/README.md b/metron-analytics/metron-statistics/README.md
index 7b26c33..257fd0b 100644
--- a/metron-analytics/metron-statistics/README.md
+++ b/metron-analytics/metron-statistics/README.md
@@ -352,9 +352,12 @@ PROFILE_GET( 'sketchy_mad', 'global', 10, 'MINUTES') ), value)"
     "fieldMap": { },
     "fieldToTypeMap": { },
     "triageConfig" : {
-      "riskLevelRules" : {
-        "parser_score > 3.5" : 10
-      },
+      "riskLevelRules" : [
+        {
+          "rule" : "parser_score > 3.5",
+          "score" : 10
+        }
+      ],
       "aggregator" : "MAX"
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/75d122d1/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/enrichment/threatintel/RiskLevelRule.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/enrichment/threatintel/RiskLevelRule.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/enrichment/threatintel/RiskLevelRule.java
new file mode 100644
index 0000000..7bf1d07
--- /dev/null
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/enrichment/threatintel/RiskLevelRule.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.common.configuration.enrichment.threatintel;
+
+public class RiskLevelRule {
+  String name;
+  String comment;
+  String rule;
+  Number score;
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public String getComment() {
+    return comment;
+  }
+
+  public void setComment(String comment) {
+    this.comment = comment;
+  }
+
+  public String getRule() {
+    return rule;
+  }
+
+  public void setRule(String rule) {
+    this.rule = rule;
+  }
+
+  public Number getScore() {
+    return score;
+  }
+
+  public void setScore(Number score) {
+    this.score = score;
+  }
+
+  @Override
+  public String toString() {
+    return "RiskLevelRule{" +
+            "name='" + name + '\'' +
+            ", comment='" + comment + '\'' +
+            ", rule='" + rule + '\'' +
+            ", score=" + score +
+            '}';
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) return true;
+    if (o == null || getClass() != o.getClass()) return false;
+
+    RiskLevelRule that = (RiskLevelRule) o;
+
+    if (name != null ? !name.equals(that.name) : that.name != null) return false;
+    if (comment != null ? !comment.equals(that.comment) : that.comment != null) return false;
+    if (rule != null ? !rule.equals(that.rule) : that.rule != null) return false;
+    return score != null ? score.equals(that.score) : that.score == null;
+
+  }
+
+  @Override
+  public int hashCode() {
+    int result = name != null ? name.hashCode() : 0;
+    result = 31 * result + (comment != null ? comment.hashCode() : 0);
+    result = 31 * result + (rule != null ? rule.hashCode() : 0);
+    result = 31 * result + (score != null ? score.hashCode() : 0);
+    return result;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/75d122d1/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/enrichment/threatintel/ThreatTriageConfig.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/enrichment/threatintel/ThreatTriageConfig.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/enrichment/threatintel/ThreatTriageConfig.java
index 00f0087..c3f5e55 100644
--- a/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/enrichment/threatintel/ThreatTriageConfig.java
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/configuration/enrichment/threatintel/ThreatTriageConfig.java
@@ -24,24 +24,35 @@ import org.apache.metron.common.aggregator.Aggregators;
 import org.apache.metron.common.stellar.StellarPredicateProcessor;
 import com.fasterxml.jackson.annotation.JsonIgnore;
 
-import java.util.HashMap;
-import java.util.Map;
+import java.util.*;
 
 public class ThreatTriageConfig {
-  private Map<String, Number> riskLevelRules = new HashMap<>();
+  private List<RiskLevelRule> riskLevelRules = new ArrayList<>();
   private Aggregators aggregator = Aggregators.MAX;
   private Map<String, Object> aggregationConfig = new HashMap<>();
 
-  public Map<String, Number> getRiskLevelRules() {
+  public List<RiskLevelRule> getRiskLevelRules() {
     return riskLevelRules;
   }
 
-  public void setRiskLevelRules(Map<String, Number> riskLevelRules) {
-    this.riskLevelRules = riskLevelRules;
+  public void setRiskLevelRules(List<RiskLevelRule> riskLevelRules) {
+    List<RiskLevelRule> rules = new ArrayList<>();
+    Set<String> ruleIndex = new HashSet<>();
     StellarPredicateProcessor processor = new StellarPredicateProcessor();
-    for(String rule : riskLevelRules.keySet()) {
-      processor.validate(rule);
+    for(RiskLevelRule rule : riskLevelRules) {
+      if(rule.getRule() == null || rule.getScore() == null) {
+        throw new IllegalStateException("Risk level rules must contain both a rule and a score.");
+      }
+      if(ruleIndex.contains(rule.getRule())) {
+        continue;
+      }
+      else {
+        ruleIndex.add(rule.getRule());
+      }
+      processor.validate(rule.getRule());
+      rules.add(rule);
     }
+    this.riskLevelRules = rules;
   }
 
   public Aggregators getAggregator() {

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/75d122d1/metron-platform/metron-common/src/main/java/org/apache/metron/common/writer/test.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/writer/test.json b/metron-platform/metron-common/src/main/java/org/apache/metron/common/writer/test.json
index 1defcd6..023cd63 100644
--- a/metron-platform/metron-common/src/main/java/org/apache/metron/common/writer/test.json
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/writer/test.json
@@ -19,10 +19,13 @@
       "ip_dst_addr" : ["malicious_ip"]
     },
     "triageConfig" : {
-      "riskLevelRules" : {
-        "ip_src_addr == '31.24.30.31'" : "Test"
-      },
+      "riskLevelRules" : [
+        {
+          "rule" : "ip_src_addr == '31.24.30.31'",
+          "score" : 10
+        }
+      ],
       "aggregator" : "MAX"
     }
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/75d122d1/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/SensorEnrichmentUpdateConfigTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/SensorEnrichmentUpdateConfigTest.java b/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/SensorEnrichmentUpdateConfigTest.java
index 01a697b..65ec920 100644
--- a/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/SensorEnrichmentUpdateConfigTest.java
+++ b/metron-platform/metron-common/src/test/java/org/apache/metron/common/configuration/SensorEnrichmentUpdateConfigTest.java
@@ -47,9 +47,12 @@ public class SensorEnrichmentUpdateConfigTest {
          ,"ip_src_addr" : [ "malicious_ip" ]
                           },
         "triageConfig" : {
-          "riskLevelRules" : {
-            "not(IN_SUBNET(ip_dst_addr, '192.168.0.0/24'))" : 10
-                             },
+          "riskLevelRules" : [
+            {
+              "rule" : "not(IN_SUBNET(ip_dst_addr, '192.168.0.0/24'))",
+              "score" : 10
+            }
+                             ],
           "aggregator" : "MAX"
                         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/75d122d1/metron-platform/metron-enrichment/README.md
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/README.md b/metron-platform/metron-enrichment/README.md
index ba518cb..d08929b 100644
--- a/metron-platform/metron-enrichment/README.md
+++ b/metron-platform/metron-enrichment/README.md
@@ -123,9 +123,27 @@ The `triageConfig` field is also a complex field and it bears some description:
 
 | Field            | Description                                                                                                                                             | Example                                                                  |
 |------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------|
-| `riskLevelRules` | The mapping of Stellar (see above) queries to a score.                                                                                                  | `"riskLevelRules" : { "IN_SUBNET(ip_dst_addr, '192.168.0.0/24')" : 10 }` |
+| `riskLevelRules` | This is a list of rules (represented as Stellar expressions) associated with scores with optional names and comments                                    |  see below|
 | `aggregator`     | An aggregation function that takes all non-zero scores representing the matching queries from `riskLevelRules` and aggregates them into a single score. | `"MAX"`                                                                  |
 
+A risk level rule is of the following format:
+* `name` : The name of the threat triage rule
+* `comment` : A comment describing the rule
+* `rule` : The rule, represented as a Stellar statement
+* `score` : Associated threat triage score for the rule
+
+An example of a rule is as follows:
+```
+    "riskLevelRules" : [ 
+        { 
+          "name" : "is internal"
+        , "comment" : "determines if the destination is internal."
+        , rule" : "IN_SUBNET(ip_dst_addr, '192.168.0.0/24')"
+        , "score" : 10 
+        }
+                       ]
+```
+
 The supported aggregation functions are:
 * `MAX` : The max of all of the associated values for matching queries
 * `MIN` : The min of all of the associated values for matching queries
@@ -177,9 +195,12 @@ An example configuration for the YAF sensor is as follows:
       ]
     },
     "triageConfig" : {
-      "riskLevelRules" : {
-        "ip_src_addr == '10.0.2.3' or ip_dst_addr == '10.0.2.3'" : 10
-      },
+      "riskLevelRules" : [ 
+        {
+          "rule" : "ip_src_addr == '10.0.2.3' or ip_dst_addr == '10.0.2.3'",
+          "score" : 10
+        }
+      ],
       "aggregator" : "MAX"
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/75d122d1/metron-platform/metron-enrichment/src/main/config/zookeeper/enrichments/snort.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/config/zookeeper/enrichments/snort.json b/metron-platform/metron-enrichment/src/main/config/zookeeper/enrichments/snort.json
index 5bf49d7..8cd8197 100644
--- a/metron-platform/metron-enrichment/src/main/config/zookeeper/enrichments/snort.json
+++ b/metron-platform/metron-enrichment/src/main/config/zookeeper/enrichments/snort.json
@@ -17,9 +17,12 @@
       "ip_dst_addr" : ["malicious_ip"]
     },
     "triageConfig" : {
-      "riskLevelRules" : {
-        "not(IN_SUBNET(ip_dst_addr, '192.168.0.0/24'))" : 10
-      },
+      "riskLevelRules" : [
+        {
+          "rule" : "not(IN_SUBNET(ip_dst_addr, '192.168.0.0/24'))",
+          "score" : 10
+        }
+      ],
       "aggregator" : "MAX"
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/75d122d1/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBolt.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBolt.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBolt.java
index 6d0ea44..6584a27 100644
--- a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBolt.java
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBolt.java
@@ -136,7 +136,7 @@ public class ThreatIntelJoinBolt extends EnrichmentJoinBolt {
         ThreatTriageProcessor threatTriageProcessor = new ThreatTriageProcessor(config, functionResolver, stellarContext);
         Double triageLevel = threatTriageProcessor.apply(ret);
         if(LOG.isDebugEnabled()) {
-          String rules = Joiner.on('\n').join(triageConfig.getRiskLevelRules().entrySet());
+          String rules = Joiner.on('\n').join(triageConfig.getRiskLevelRules());
           LOG.debug("Marked " + sourceType + " as triage level " + triageLevel + " with rules " + rules);
         }
         if(triageLevel != null && triageLevel > 0) {

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/75d122d1/metron-platform/metron-enrichment/src/main/java/org/apache/metron/threatintel/triage/ThreatTriageProcessor.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/threatintel/triage/ThreatTriageProcessor.java b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/threatintel/triage/ThreatTriageProcessor.java
index 824e94c..0c88437 100644
--- a/metron-platform/metron-enrichment/src/main/java/org/apache/metron/threatintel/triage/ThreatTriageProcessor.java
+++ b/metron-platform/metron-enrichment/src/main/java/org/apache/metron/threatintel/triage/ThreatTriageProcessor.java
@@ -20,6 +20,7 @@ package org.apache.metron.threatintel.triage;
 
 import com.google.common.base.Function;
 import org.apache.metron.common.configuration.enrichment.SensorEnrichmentConfig;
+import org.apache.metron.common.configuration.enrichment.threatintel.RiskLevelRule;
 import org.apache.metron.common.configuration.enrichment.threatintel.ThreatIntelConfig;
 import org.apache.metron.common.configuration.enrichment.threatintel.ThreatTriageConfig;
 import org.apache.metron.common.dsl.*;
@@ -55,9 +56,9 @@ public class ThreatTriageProcessor implements Function<Map, Double> {
     List<Number> scores = new ArrayList<>();
     StellarPredicateProcessor predicateProcessor = new StellarPredicateProcessor();
     VariableResolver resolver = new MapVariableResolver(input, sensorConfig.getConfiguration(), threatIntelConfig.getConfig());
-    for(Map.Entry<String, Number> kv : threatTriageConfig.getRiskLevelRules().entrySet()) {
-      if(predicateProcessor.parse(kv.getKey(), resolver, functionResolver, context)) {
-        scores.add(kv.getValue());
+    for(RiskLevelRule rule : threatTriageConfig.getRiskLevelRules()) {
+      if(predicateProcessor.parse(rule.getRule(), resolver, functionResolver, context)) {
+        scores.add(rule.getScore());
       }
     }
     return threatTriageConfig.getAggregator().aggregate(scores, threatTriageConfig.getAggregationConfig());

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/75d122d1/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBoltTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBoltTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBoltTest.java
index d942d9b..60687d8 100644
--- a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBoltTest.java
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/enrichment/bolt/ThreatIntelJoinBoltTest.java
@@ -89,9 +89,12 @@ public class ThreatIntelJoinBoltTest extends BaseEnrichmentBoltTest {
 
   /**
    * {
-   *  "riskLevelRules" : {
-   *    "enrichedField1 == 'enrichedValue1'" : 10
-   *  },
+   *  "riskLevelRules" : [
+   *   {
+   *    "rule" : "enrichedField1 == 'enrichedValue1'",
+   *    "score" : 10
+   *   }
+   *  ],
    *  "aggregator" : "MAX"
    * }
    */
@@ -105,9 +108,12 @@ public class ThreatIntelJoinBoltTest extends BaseEnrichmentBoltTest {
 
   /**
    * {
-   *  "riskLevelRules" : {
-   *    "enrichedField1 == 'enrichedValue1": 10
-   *  },
+   *  "riskLevelRules" : [
+   *  {
+   *    "rule" : "enrichedField1 == 'enrichedValue1",
+   *    "score" : 10
+   *  }
+   *  ],
    *  "aggregator" : "MAX"
    * }
    */
@@ -126,9 +132,12 @@ public class ThreatIntelJoinBoltTest extends BaseEnrichmentBoltTest {
 
   /**
    * {
-   *   "riskLevelRules": {
-   *      "not(IN_SUBNET(ip_dst_addr, '192.168.0.0/24'))": 10
-   *   },
+   *   "riskLevelRules": [
+   *   {
+   *      "rule" : "not(IN_SUBNET(ip_dst_addr, '192.168.0.0/24'))",
+   *      "score" : 10
+   *   }
+   *   ],
    *   "aggregator": "MAX"
    * }
    */

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/75d122d1/metron-platform/metron-enrichment/src/test/java/org/apache/metron/threatintel/triage/ThreatTriageTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/threatintel/triage/ThreatTriageTest.java b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/threatintel/triage/ThreatTriageTest.java
index 92191b6..d3389af 100644
--- a/metron-platform/metron-enrichment/src/test/java/org/apache/metron/threatintel/triage/ThreatTriageTest.java
+++ b/metron-platform/metron-enrichment/src/test/java/org/apache/metron/threatintel/triage/ThreatTriageTest.java
@@ -34,12 +34,26 @@ public class ThreatTriageTest {
    * {
    *  "threatIntel": {
    *    "triageConfig": {
-   *      "riskLevelRules" : {
-   *        "user.type in [ 'admin', 'power' ] and asset.type == 'web'" : 10,
-   *        "asset.type == 'web'" : 5,
-   *        "user.type == 'normal'  and asset.type == 'web'" : 0,
-   *        "user.type in whitelist" : -1
-   *      },
+   *      "riskLevelRules" : [
+   *        {
+   *          "name" : "rule 1",
+   *          "rule" : "user.type in [ 'admin', 'power' ] and asset.type == 'web'",
+   *          "score" : 10
+   *        },
+   *        {
+   *         "comment" : "web type!",
+   *         "rule" : "asset.type == 'web'",
+   *         "score" : 5
+   *        },
+   *        {
+   *          "rule" : "user.type == 'normal'  and asset.type == 'web'",
+   *          "score" : 0
+   *        },
+   *        {
+   *          "rule" : "user.type in whitelist",
+   *          "score" : -1
+   *        }
+   *      ],
    *      "aggregator" : "MAX"
    *    },
    *    "config": {
@@ -115,11 +129,20 @@ public class ThreatTriageTest {
    * {
    *  "threatIntel": {
    *  "triageConfig": {
-   *    "riskLevelRules" : {
-   *      "user.type in [ 'admin', 'power' ] and asset.type == 'web'" : 10,
-   *      "asset.type == 'web'" : 5,
-   *      "user.type == 'normal' and asset.type == 'web'" : 0
-   *     },
+   *    "riskLevelRules" : [
+   *      {
+   *        "rule" : "user.type in [ 'admin', 'power' ] and asset.type == 'web'",
+   *        "score" : 10
+   *      },
+   *      {
+   *        "rule" : "asset.type == 'web'",
+   *        "score" : 5
+   *      },
+   *      {
+   *        "rule" : "user.type == 'normal' and asset.type == 'web'",
+   *        "score" : 0
+   *      }
+   *     ],
    *     "aggregator" : "POSITIVE_MEAN"
    *    }
    *  }
@@ -167,9 +190,12 @@ public class ThreatTriageTest {
    * {
    *    "threatIntel" : {
    *      "triageConfig": {
-   *        "riskLevelRules": {
-   *          "not(IN_SUBNET(ip_dst_addr, '192.168.0.0/24'))" : 10
-   *        },
+   *        "riskLevelRules": [
+   *          {
+   *            "rule" : "not(IN_SUBNET(ip_dst_addr, '192.168.0.0/24'))",
+   *            "score" : 10
+   *          }
+   *        ],
    *        "aggregator" : "MAX"
    *      }
    *    }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/75d122d1/metron-platform/metron-integration-test/src/main/config/zookeeper/enrichments/test.json
----------------------------------------------------------------------
diff --git a/metron-platform/metron-integration-test/src/main/config/zookeeper/enrichments/test.json b/metron-platform/metron-integration-test/src/main/config/zookeeper/enrichments/test.json
index d99f741..77e0808 100644
--- a/metron-platform/metron-integration-test/src/main/config/zookeeper/enrichments/test.json
+++ b/metron-platform/metron-integration-test/src/main/config/zookeeper/enrichments/test.json
@@ -59,9 +59,12 @@
       ]
     },
     "triageConfig" : {
-      "riskLevelRules" : {
-        "ip_src_addr == '10.0.2.3' or ip_dst_addr == '10.0.2.3'" : 10
-      },
+      "riskLevelRules" : [
+        {
+          "rule" : "ip_src_addr == '10.0.2.3' or ip_dst_addr == '10.0.2.3'",
+          "score": 10
+        }
+      ],
       "aggregator" : "MAX"
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/75d122d1/metron-platform/metron-management/README.md
----------------------------------------------------------------------
diff --git a/metron-platform/metron-management/README.md b/metron-platform/metron-management/README.md
index cf922ba..f3fea80 100644
--- a/metron-platform/metron-management/README.md
+++ b/metron-platform/metron-management/README.md
@@ -166,7 +166,7 @@ The functions are split roughly into a few sections:
   * Returns: The String representation of the config in zookeeper
 
 
-### Enrichment Functions
+### Indexing Functions
 
 * `INDEXING_SET_BATCH`
   * Description: Set batch size
@@ -189,6 +189,9 @@ The functions are split roughly into a few sections:
     * writer - The writer to update (e.g. elasticsearch, solr or hdfs)
     * sensor - sensor name
   * Returns: The String representation of the config in zookeeper
+
+### Enrichment Functions
+
 * `ENRICHMENT_STELLAR_TRANSFORM_ADD`
   * Description: Add stellar field transformation.
   * Input:
@@ -219,6 +222,7 @@ The functions are split roughly into a few sections:
   * Input:
     * sensorConfig - Sensor config to add transformation to.
     * stellarTransforms - A Map associating stellar rules to scores
+    * triageRules - Map (or list of Maps) representing a triage rule.  It must contain 'rule' and 'score' keys, the stellar expression for the rule and triage score respectively.  It may contain 'name' and 'comment', the name of the rule and comment associated with the rule respectively."
   * Returns: The String representation of the threat triage rules
 * `THREAT_TRIAGE_PRINT`
   * Description: Retrieve stellar enrichment transformations.
@@ -229,7 +233,7 @@ The functions are split roughly into a few sections:
   * Description: Remove stellar threat triage rule(s).
   * Input:
     * sensorConfig - Sensor config to add transformation to.
-    * stellarTransforms - A list of stellar rules to remove
+    * rules - A list of stellar rules or rule names to remove
   * Returns: The String representation of the enrichment config
 * `THREAT_TRIAGE_SET_AGGREGATOR`
   * Description: Set the threat triage aggregator.
@@ -529,8 +533,6 @@ Functions loaded, you may refer to functions now...
 [Stellar]>>> # Just to make sure it looks right, we can view the JSON
 [Stellar]>>> squid_enrichment_config
 {
-  "index" : "squid",
-  "batchSize" : 0,
   "enrichment" : {
     "fieldMap" : { },
     "fieldToTypeMap" : { },
@@ -541,16 +543,13 @@ Functions loaded, you may refer to functions now...
     "fieldToTypeMap" : { },
     "config" : { },
     "triageConfig" : {
-      "riskLevelRules" : { },
+      "riskLevelRules" : [ ],
       "aggregator" : "MAX",
       "aggregationConfig" : { }
     }
   },
   "configuration" : { }
 }
-[Stellar]>>> # Wait, that batch size looks terrible.  That is because it did not exist in zookeeper, so it is the default.
-[Stellar]>>> # We can correct it, thankfully. 
-[Stellar]>>> squid_enrichment_config := INDEXING_SET_BATCH( squid_enrichment_config, 100)
 [Stellar]>>> # Now that we have a config, we can add an enrichment to the Stellar adapter
 [Stellar]>>> # We should make sure that the current enrichment does not have any already
 [Stellar]>>> ?ENRICHMENT_STELLAR_TRANSFORM_PRINT
@@ -659,7 +658,7 @@ Returns: The String representation of the config in zookeeper
     "fieldToTypeMap" : { },
     "config" : { },
     "triageConfig" : {
-      "riskLevelRules" : { },
+      "riskLevelRules" : [ ],
       "aggregator" : "MAX",
       "aggregationConfig" : { }
     }
@@ -689,7 +688,7 @@ Returns: The String representation of the config in zookeeper
     "fieldToTypeMap" : { },
     "config" : { },
     "triageConfig" : {
-      "riskLevelRules" : { },
+      "riskLevelRules" : [ ],
       "aggregator" : "MAX",
       "aggregationConfig" : { }
     }
@@ -741,7 +740,7 @@ Returns: The String representation of the config in zookeeper
     "fieldToTypeMap" : { },
     "config" : { },
     "triageConfig" : {
-      "riskLevelRules" : { },
+      "riskLevelRules" : [ ],
       "aggregator" : "MAX",
       "aggregationConfig" : { }
     }
@@ -765,8 +764,6 @@ Please note that functions are loading lazily in the background and will be unav
 26751 [Thread-1] INFO  o.r.Reflections - Reflections took 24407 ms to scan 22 urls, producing 17898 keys and 121520 values 
 26828 [Thread-1] INFO  o.a.m.c.d.FunctionResolverSingleton - Found 84 Stellar Functions...
 Functions loaded, you may refer to functions now...
-[Stellar]>>> # Just as in the previous example, we should adjust the batch size
-[Stellar]>>> squid_enrichment_config := INDEXING_SET_BATCH( squid_enrichment_config, 100)
 [Stellar]>>> # We should not have any threat triage rules
 [Stellar]>>> THREAT_TRIAGE_PRINT(squid_enrichment_config)
 \u2554\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2564\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2557
@@ -849,20 +846,19 @@ Returns: A Map associated with the indicator and enrichment type.  Empty otherwi
 [Stellar]>>> non_us := whois_info.home_country != 'US'
 [Stellar]>>> is_local := IN_SUBNET( if IS_IP(ip_src_addr) then ip_src_addr else NULL, '192.168.0.0/21')
 [Stellar]>>> is_both := whois_info.home_country != 'US' && IN_SUBNET( if IS_IP(ip_src_addr) then ip_src_addr else NULL, '192.168.0.0/21')
-[Stellar]>>> rules := { SHELL_GET_EXPRESSION('non_us') : 10, SHELL_GET_EXPRESSION('is_local') : 20, SHELL_GET_EXPRESSION('is_both') : 50 }
+[Stellar]>>> rules := [ { 'name' : 'is non-us', 'rule' : SHELL_GET_EXPRESSION('non_us'), 'score' : 10 } , { 'name' : 'is local', 'rule' : SHELL_GET_EXPRESSION('is_local '), 'score' : 20 } , { 'name' : 'both non-us and local', 'comment' : 'union of both rules.',  'rule' : SHELL_GET_EXPRESSION('is_both'), 'score' : 50 } ]  
 [Stellar]>>> # Now that we have our rules staged, we can add them to our config.
 [Stellar]>>> squid_enrichment_config_new := THREAT_TRIAGE_ADD( squid_enrichment_config_new, rules )
 [Stellar]>>> THREAT_TRIAGE_PRINT(squid_enrichment_config_new)
-\u2554\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2564\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2557
-\u2551 Triage Rule                                                                                                       \u2502 Score \u2551
-\u2560\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u256a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2563
-\u2551 whois_info.home_country != 'US' && IN_SUBNET( if IS_IP(ip_src_addr) then ip_src_addr else NULL, '192.168.0.0/21') \u2502 50    \u2551
-\u255f\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2562
-\u2551 IN_SUBNET( if IS_IP(ip_src_addr) then ip_src_addr else NULL, '192.168.0.0/21')                                    \u2502 20    \u2551
-\u255f\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2562
-\u2551 whois_info.home_country != 'US'                                                                                   \u2502 10    \u2551
-\u255a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2567\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u255d
-
+\u2554\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2564\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2564\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2564\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2557
+\u2551 Name                  \u2502 Comment              \u2502 Triage Rule                                                                                                       \u2502 Score \u2551
+\u2560\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u256a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u256a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u256a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2563
+\u2551 is non-us             \u2502                      \u2502 whois_info.home_country != 'US'                                                                                   \u2502 10    \u2551
+\u255f\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2562
+\u2551 is local              \u2502                      \u2502 IN_SUBNET( if IS_IP(ip_src_addr) then ip_src_addr else NULL, '192.168.0.0/21')                                    \u2502 20    \u2551
+\u255f\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2562
+\u2551 both non-us and local \u2502 union of both rules. \u2502 whois_info.home_country != 'US' && IN_SUBNET( if IS_IP(ip_src_addr) then ip_src_addr else NULL, '192.168.0.0/21') \u2502 50    \u2551
+\u255a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2567\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2567\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2567\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u255d
 
 Aggregation: MAX
 [Stellar]>>> # Looks good, we can push the configs up
@@ -870,8 +866,6 @@ Aggregation: MAX
 [Stellar]>>> # And admire the resulting JSON that you did not have to edit directly.
 [Stellar]>>> CONFIG_GET('ENRICHMENT', 'squid')
 {
-  "index" : "squid",
-  "batchSize" : 100,
   "enrichment" : {
     "fieldMap" : {
       "stellar" : {
@@ -894,11 +888,20 @@ Aggregation: MAX
     "fieldToTypeMap" : { },
     "config" : { },
     "triageConfig" : {
-      "riskLevelRules" : {
-        "whois_info.home_country != 'US' && IN_SUBNET( if IS_IP(ip_src_addr) then ip_src_addr else NULL, '192.168.0.0/21')" : 50.0,
-        "IN_SUBNET( if IS_IP(ip_src_addr) then ip_src_addr else NULL, '192.168.0.0/21')" : 20.0,
-        "whois_info.home_country != 'US'" : 10.0
-      },
+      "riskLevelRules" : [ {
+        "name" : "is non-us",
+        "rule" : "whois_info.home_country != 'US'",
+        "score" : 10.0
+      }, {
+        "name" : "is local",
+        "rule" : "IN_SUBNET( if IS_IP(ip_src_addr) then ip_src_addr else NULL, '192.168.0.0/21')",
+        "score" : 20.0
+      }, {
+        "name" : "both non-us and local",
+        "comment" : "union of both rules.",
+        "rule" : "whois_info.home_country != 'US' && IN_SUBNET( if IS_IP(ip_src_addr) then ip_src_addr else NULL, '192.168.0.0/21')",
+        "score" : 50.0
+      } ],
       "aggregator" : "MAX",
       "aggregationConfig" : { }
     }
@@ -909,19 +912,17 @@ Aggregation: MAX
 [Stellar]>>> squid_enrichment_config_new := THREAT_TRIAGE_REMOVE( squid_enrichment_config_new, [ SHELL_GET_EXPRESSION('non_us') , SHELL_GET_EXPRESSION('is_local') , SHELL_GET_EXPRES 
 SION('is_both') ] )
 [Stellar]>>> THREAT_TRIAGE_PRINT(squid_enrichment_config_new)
-\u2554\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2564\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2557
-\u2551 Triage Rule \u2502 Score \u2551
-\u2560\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2567\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2563
-\u2551 (empty)             \u2551
-\u255a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u255d
+\u2554\u2550\u2550\u2550\u2550\u2550\u2550\u2564\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2564\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2564\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2557
+\u2551 Name \u2502 Comment \u2502 Triage Rule \u2502 Score \u2551
+\u2560\u2550\u2550\u2550\u2550\u2550\u2550\u2567\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2567\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2567\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2563
+\u2551 (empty)                              \u2551
+\u255a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u255d
 
 [Stellar]>>> # and push configs
 [Stellar]>>> CONFIG_PUT('ENRICHMENT', squid_enrichment_config_new, 'squid')
 [Stellar]>>> # And admire the resulting JSON that is devoid of threat triage rules.
 [Stellar]>>> CONFIG_GET('ENRICHMENT', 'squid')
 {
-  "index" : "squid",
-  "batchSize" : 100,
   "enrichment" : {
     "fieldMap" : {
       "stellar" : {
@@ -944,7 +945,7 @@ SION('is_both') ] )
     "fieldToTypeMap" : { },
     "config" : { },
     "triageConfig" : {
-      "riskLevelRules" : { },
+      "riskLevelRules" : [ ],
       "aggregator" : "MAX",
       "aggregationConfig" : { }
     }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/75d122d1/metron-platform/metron-management/src/main/java/org/apache/metron/management/ThreatTriageFunctions.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-management/src/main/java/org/apache/metron/management/ThreatTriageFunctions.java b/metron-platform/metron-management/src/main/java/org/apache/metron/management/ThreatTriageFunctions.java
index 966f281..4a28cce 100644
--- a/metron-platform/metron-management/src/main/java/org/apache/metron/management/ThreatTriageFunctions.java
+++ b/metron-platform/metron-management/src/main/java/org/apache/metron/management/ThreatTriageFunctions.java
@@ -19,9 +19,12 @@ package org.apache.metron.management;
 
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.core.type.TypeReference;
+import com.google.common.collect.ArrayListMultimap;
+import com.google.common.collect.Multimap;
 import com.jakewharton.fliptables.FlipTable;
 import org.apache.log4j.Logger;
 import org.apache.metron.common.configuration.enrichment.SensorEnrichmentConfig;
+import org.apache.metron.common.configuration.enrichment.threatintel.RiskLevelRule;
 import org.apache.metron.common.configuration.enrichment.threatintel.ThreatIntelConfig;
 import org.apache.metron.common.configuration.enrichment.threatintel.ThreatTriageConfig;
 import org.apache.metron.common.dsl.Context;
@@ -32,9 +35,7 @@ import org.apache.metron.common.utils.ConversionUtils;
 import org.apache.metron.common.utils.JSONUtils;
 
 import java.io.IOException;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 
 import static org.apache.metron.common.configuration.ConfigurationType.ENRICHMENT;
 import static org.apache.metron.management.EnrichmentConfigFunctions.getConfig;
@@ -70,17 +71,19 @@ public class ThreatTriageFunctions {
       if(triageConfig == null) {
         return "";
       }
-      Map<String, Number> triageRules = triageConfig.getRiskLevelRules();
+      List<RiskLevelRule> triageRules = triageConfig.getRiskLevelRules();
       if(triageRules == null) {
-        triageRules = new LinkedHashMap<>();
+        triageRules = new ArrayList<>();
       }
-      String[] headers = new String[] {"Triage Rule", "Score"};
-      String[][] data = new String[triageRules.size()][2];
+      String[] headers = new String[] {"Name", "Comment", "Triage Rule", "Score"};
+      String[][] data = new String[triageRules.size()][4];
       int i = 0;
-      for(Map.Entry<String, Number> kv : triageRules.entrySet()) {
-        double d = kv.getValue().doubleValue();
+      for(RiskLevelRule rule : triageRules) {
+        double d = rule.getScore().doubleValue();
         String val = d == (long)d ? String.format("%d", (long)d) : String.format("%s", d);
-        data[i++]  = new String[] {kv.getKey(), val};
+        String name = Optional.ofNullable(rule.getName()).orElse("");
+        String comment = Optional.ofNullable(rule.getComment()).orElse("");
+        data[i++]  = new String[] {name, comment, rule.getRule(), val};
       }
       String ret = FlipTable.of(headers, data);
       if(!triageRules.isEmpty()) {
@@ -107,7 +110,9 @@ public class ThreatTriageFunctions {
           ,name = "ADD"
           ,description = "Add a threat triage rule."
           ,params = {"sensorConfig - Sensor config to add transformation to."
-                    ,"stellarTransforms - A Map associating stellar rules to scores"
+                    ,"triageRules - A Map (or list of Maps) representing a triage rule.  It must contain 'rule' and 'score' keys, " +
+                      "the stellar expression for the rule and triage score respectively.  " +
+                      "It may contain 'name' and 'comment', the name of the rule and comment associated with the rule respectively."
                     }
           ,returns = "The String representation of the threat triage rules"
           )
@@ -133,19 +138,36 @@ public class ThreatTriageFunctions {
         triageConfig = new ThreatTriageConfig();
         tiConfig.setTriageConfig(triageConfig);
       }
-      Map<String, Number> triageRules = triageConfig.getRiskLevelRules();
+      List<RiskLevelRule> triageRules = triageConfig.getRiskLevelRules();
       if(triageRules == null) {
-        triageRules = new LinkedHashMap<>();
-        triageConfig.setRiskLevelRules(triageRules);
+        triageRules = new ArrayList<>();
+      }
+      Object newRuleObj = args.get(1);
+      List<Map<String, Object>> newRules = new ArrayList<>();
+      if(newRuleObj != null && newRuleObj instanceof List) {
+        newRules = (List<Map<String, Object>>) newRuleObj;
+      }
+      else if(newRuleObj != null && newRuleObj instanceof Map) {
+        newRules.add((Map<String, Object>) newRuleObj);
+      }
+      else if(newRuleObj != null) {
+        throw new IllegalStateException("triageRule must be either a Map representing a single rule or a List of rules.");
       }
-      Map<String, Object> newRules = (Map<String, Object>) args.get(1);
-      for(Map.Entry<String, Object> kv : newRules.entrySet()) {
-        if(kv.getKey() == null || kv.getKey().equals("null")) {
-          continue;
+      for(Map<String, Object> newRule : newRules) {
+        if(!(newRule == null || !newRule.containsKey("rule") || !newRule.containsKey("score"))) {
+          RiskLevelRule ruleToAdd = new RiskLevelRule();
+          ruleToAdd.setRule((String) newRule.get("rule"));
+          ruleToAdd.setScore(ConversionUtils.convert(newRule.get("score"), Double.class));
+          if (newRule.containsKey("name")) {
+            ruleToAdd.setName((String) newRule.get("name"));
+          }
+          if (newRule.containsKey("comment")) {
+            ruleToAdd.setComment((String) newRule.get("comment"));
+          }
+          triageRules.add(ruleToAdd);
         }
-        Double ret = ConversionUtils.convert(kv.getValue(), Double.class);
-        triageConfig.getRiskLevelRules().put(kv.getKey(), ret);
       }
+      triageConfig.setRiskLevelRules(triageRules);
       try {
         return JSONUtils.INSTANCE.toJSON(configObj, true);
       } catch (JsonProcessingException e) {
@@ -171,7 +193,7 @@ public class ThreatTriageFunctions {
           ,name = "REMOVE"
           ,description = "Remove stellar threat triage rule(s)."
           ,params = {"sensorConfig - Sensor config to add transformation to."
-                    ,"stellarTransforms - A list of stellar rules to remove"
+                    ,"rules - A list of stellar rules or rule names to remove"
                     }
           ,returns = "The String representation of the enrichment config"
           )
@@ -197,14 +219,22 @@ public class ThreatTriageFunctions {
         triageConfig = new ThreatTriageConfig();
         tiConfig.setTriageConfig(triageConfig);
       }
-      Map<String, Number> triageRules = triageConfig.getRiskLevelRules();
+      List<RiskLevelRule> triageRules = triageConfig.getRiskLevelRules();
       if(triageRules == null) {
-        triageRules = new LinkedHashMap<>();
+        triageRules = new ArrayList<>();
         triageConfig.setRiskLevelRules(triageRules);
       }
-      List<String> rulesToRemove = (List<String>) args.get(1);
-      for(String rule : rulesToRemove) {
-        triageConfig.getRiskLevelRules().remove(rule);
+
+      Set<String> toRemove = new HashSet<>(Optional.ofNullable((List<String>) args.get(1)).orElse(new ArrayList<>()));
+      for (Iterator<RiskLevelRule> it = triageRules.iterator();it.hasNext();){
+        RiskLevelRule rule = it.next();
+        boolean remove = toRemove.contains(rule.getRule());
+        if(!remove && rule.getName() != null) {
+          remove = toRemove.contains(rule.getName());
+        }
+        if(remove) {
+          it.remove();
+        }
       }
       try {
         return JSONUtils.INSTANCE.toJSON(configObj, true);
@@ -260,9 +290,9 @@ public class ThreatTriageFunctions {
         triageConfig = new ThreatTriageConfig();
         tiConfig.setTriageConfig(triageConfig);
       }
-      Map<String, Number> triageRules = triageConfig.getRiskLevelRules();
+      List<RiskLevelRule> triageRules = triageConfig.getRiskLevelRules();
       if(triageRules == null) {
-        triageRules = new LinkedHashMap<>();
+        triageRules = new ArrayList<>();
         triageConfig.setRiskLevelRules(triageRules);
       }
       String aggregator = (String) args.get(1);

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/75d122d1/metron-platform/metron-management/src/test/java/org/apache/metron/management/ConfigurationFunctionsTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-management/src/test/java/org/apache/metron/management/ConfigurationFunctionsTest.java b/metron-platform/metron-management/src/test/java/org/apache/metron/management/ConfigurationFunctionsTest.java
index 794f208..ee6a362 100644
--- a/metron-platform/metron-management/src/test/java/org/apache/metron/management/ConfigurationFunctionsTest.java
+++ b/metron-platform/metron-management/src/test/java/org/apache/metron/management/ConfigurationFunctionsTest.java
@@ -131,7 +131,7 @@ public class ConfigurationFunctionsTest {
         "fieldToTypeMap" : { },
         "config" : { },
         "triageConfig" : {
-          "riskLevelRules" : { },
+          "riskLevelRules" : [ ],
           "aggregator" : "MAX",
           "aggregationConfig" : { }
         }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/75d122d1/metron-platform/metron-management/src/test/java/org/apache/metron/management/ThreatTriageFunctionsTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-management/src/test/java/org/apache/metron/management/ThreatTriageFunctionsTest.java b/metron-platform/metron-management/src/test/java/org/apache/metron/management/ThreatTriageFunctionsTest.java
index 0383a54..0c4505e 100644
--- a/metron-platform/metron-management/src/test/java/org/apache/metron/management/ThreatTriageFunctionsTest.java
+++ b/metron-platform/metron-management/src/test/java/org/apache/metron/management/ThreatTriageFunctionsTest.java
@@ -20,6 +20,7 @@ package org.apache.metron.management;
 import com.google.common.collect.ImmutableMap;
 import org.adrianwalker.multilinestring.Multiline;
 import org.apache.metron.common.configuration.enrichment.SensorEnrichmentConfig;
+import org.apache.metron.common.configuration.enrichment.threatintel.RiskLevelRule;
 import org.apache.metron.common.dsl.Context;
 import org.apache.metron.common.dsl.ParseException;
 import org.apache.metron.common.dsl.StellarFunctions;
@@ -30,6 +31,7 @@ import org.junit.Before;
 import org.junit.Test;
 
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 
 import static org.apache.metron.management.EnrichmentConfigFunctionsTest.emptyTransformationsConfig;
@@ -54,7 +56,7 @@ public class ThreatTriageFunctionsTest {
             .build();
   }
 
-  public static Map<String, Number> getTriageRules(String config) {
+  public static List<RiskLevelRule> getTriageRules(String config) {
     SensorEnrichmentConfig sensorConfig = (SensorEnrichmentConfig) ENRICHMENT.deserialize(config);
     return sensorConfig.getThreatIntel().getTriageConfig().getRiskLevelRules();
   }
@@ -81,41 +83,48 @@ public class ThreatTriageFunctionsTest {
   public void testAddEmpty() {
 
     String newConfig = (String) run(
-            "THREAT_TRIAGE_ADD(config, { SHELL_GET_EXPRESSION('less') : 10 } )"
+            "THREAT_TRIAGE_ADD(config, { 'rule' : SHELL_GET_EXPRESSION('less'), 'score' : 10 } )"
             , toMap("config", configStr
             )
     );
 
-    Map<String, Number> triageRules = getTriageRules(newConfig);
+    List<RiskLevelRule> triageRules = getTriageRules(newConfig);
     Assert.assertEquals(1, triageRules.size());
-    Assert.assertEquals(10.0, triageRules.get(variables.get("less").getExpression()).doubleValue(), 1e-6 );
+    RiskLevelRule rule = triageRules.get(0);
+    Assert.assertEquals(variables.get("less").getExpression(), rule.getRule() );
+    Assert.assertEquals(10.0, rule.getScore().doubleValue(), 1e-6 );
   }
 
   @Test
   public void testAddHasExisting() {
 
     String newConfig = (String) run(
-            "THREAT_TRIAGE_ADD(config, { SHELL_GET_EXPRESSION('less') : 10 } )"
+            "THREAT_TRIAGE_ADD(config, { 'rule' : SHELL_GET_EXPRESSION('less'), 'score' : 10 } )"
             , toMap("config", configStr
             )
     );
 
     newConfig = (String) run(
-            "THREAT_TRIAGE_ADD(config, { SHELL_GET_EXPRESSION('greater') : 20 } )"
+            "THREAT_TRIAGE_ADD(config, { 'rule' : SHELL_GET_EXPRESSION('greater'), 'score' : 20 } )"
             , toMap("config",newConfig
             )
     );
 
-    Map<String, Number> triageRules = getTriageRules(newConfig);
+    List<RiskLevelRule> triageRules = getTriageRules(newConfig);
     Assert.assertEquals(2, triageRules.size());
-    Assert.assertEquals(10.0, triageRules.get(variables.get("less").getExpression()).doubleValue(), 1e-6 );
-    Assert.assertEquals(20.0, triageRules.get(variables.get("greater").getExpression()).doubleValue(), 1e-6 );
+    RiskLevelRule less = triageRules.get(0);
+    Assert.assertEquals(variables.get("less").getExpression(), less.getRule() );
+    Assert.assertEquals(10.0, less.getScore().doubleValue(), 1e-6 );
+
+    RiskLevelRule greater = triageRules.get(1);
+    Assert.assertEquals(variables.get("greater").getExpression(), greater.getRule() );
+    Assert.assertEquals(20.0, greater.getScore().doubleValue(), 1e-6 );
   }
 
-  @Test
+  @Test(expected=ParseException.class)
   public void testAddMalformed() {
     Object o = run(
-            "THREAT_TRIAGE_ADD(config, { SHELL_GET_EXPRESSION('foo') : 10 } )"
+            "THREAT_TRIAGE_ADD(config, { 'rule': SHELL_GET_EXPRESSION('foo'), 'score' : 10 } )"
             , toMap("config", configStr
             )
     );
@@ -125,26 +134,28 @@ public class ThreatTriageFunctionsTest {
   @Test
   public void testAddDuplicate() {
     String newConfig = (String) run(
-            "THREAT_TRIAGE_ADD(config, { SHELL_GET_EXPRESSION('less') : 10 } )"
+            "THREAT_TRIAGE_ADD(config, { 'rule' : SHELL_GET_EXPRESSION('less'), 'score' : 10 } )"
             , toMap("config", configStr
             )
     );
 
     newConfig = (String) run(
-            "THREAT_TRIAGE_ADD(config, { SHELL_GET_EXPRESSION('less') : 10 } )"
+            "THREAT_TRIAGE_ADD(config, { 'rule' : SHELL_GET_EXPRESSION('less'), 'score' : 10 } )"
             , toMap("config",newConfig
             )
     );
 
-    Map<String, Number> triageRules = getTriageRules(newConfig);
+    List<RiskLevelRule> triageRules = getTriageRules(newConfig);
     Assert.assertEquals(1, triageRules.size());
-    Assert.assertEquals(10.0, triageRules.get(variables.get("less").getExpression()).doubleValue(), 1e-6 );
+    RiskLevelRule rule = triageRules.get(0);
+    Assert.assertEquals(variables.get("less").getExpression(), rule.getRule() );
+    Assert.assertEquals(10.0, rule.getScore().doubleValue(), 1e-6 );
   }
 
   @Test
   public void testRemove() {
     String newConfig = (String) run(
-            "THREAT_TRIAGE_ADD(config, { SHELL_GET_EXPRESSION('less') : 10, SHELL_GET_EXPRESSION('greater') : 20 } )"
+            "THREAT_TRIAGE_ADD(config, [ { 'rule' : SHELL_GET_EXPRESSION('less'), 'score' : 10 }, { 'rule' : SHELL_GET_EXPRESSION('greater'), 'score' : 20 } ] )"
             , toMap("config", configStr
             )
     );
@@ -155,17 +166,18 @@ public class ThreatTriageFunctionsTest {
             )
     );
 
-    Map<String, Number> triageRules = getTriageRules(newConfig);
+    List<RiskLevelRule> triageRules = getTriageRules(newConfig);
     Assert.assertEquals(1, triageRules.size());
-    Assert.assertEquals(10.0, triageRules.get(variables.get("less").getExpression()).doubleValue(), 1e-6 );
+    RiskLevelRule rule = triageRules.get(0);
+    Assert.assertEquals(variables.get("less").getExpression(), rule.getRule() );
+    Assert.assertEquals(10.0, rule.getScore().doubleValue(), 1e-6 );
   }
 
   @Test
   public void testRemoveMultiple() {
     String newConfig = (String) run(
-            "THREAT_TRIAGE_ADD(config, { SHELL_GET_EXPRESSION('less') : 10, SHELL_GET_EXPRESSION('greater') : 20 } )"
-            , toMap("config", configStr
-            )
+            "THREAT_TRIAGE_ADD(config, [ { 'rule' : SHELL_GET_EXPRESSION('less'), 'score' : 10 }, { 'rule' : SHELL_GET_EXPRESSION('greater'), 'score' : 20 } ] )"
+            , toMap("config", configStr )
     );
 
     newConfig = (String) run(
@@ -174,7 +186,7 @@ public class ThreatTriageFunctionsTest {
             )
     );
 
-    Map<String, Number> triageRules = getTriageRules(newConfig);
+    List<RiskLevelRule> triageRules = getTriageRules(newConfig);
     Assert.assertEquals(0, triageRules.size());
   }
 
@@ -182,7 +194,7 @@ public class ThreatTriageFunctionsTest {
   public void testRemoveMissing() {
 
     String newConfig = (String) run(
-            "THREAT_TRIAGE_ADD(config, { SHELL_GET_EXPRESSION('less') : 10, SHELL_GET_EXPRESSION('greater') : 20 } )"
+            "THREAT_TRIAGE_ADD(config, [ { 'rule' : SHELL_GET_EXPRESSION('less'), 'score' : 10 }, { 'rule' : SHELL_GET_EXPRESSION('greater'), 'score' : 20 } ] )"
             , toMap("config", configStr
             )
     );
@@ -193,20 +205,25 @@ public class ThreatTriageFunctionsTest {
             )
     );
 
-    Map<String, Number> triageRules = getTriageRules(newConfig);
+    List<RiskLevelRule> triageRules = getTriageRules(newConfig);
     Assert.assertEquals(2, triageRules.size());
-    Assert.assertEquals(10.0, triageRules.get(variables.get("less").getExpression()).doubleValue(), 1e-6 );
-    Assert.assertEquals(20.0, triageRules.get(variables.get("greater").getExpression()).doubleValue(), 1e-6 );
+    RiskLevelRule less = triageRules.get(0);
+    Assert.assertEquals(variables.get("less").getExpression(), less.getRule() );
+    Assert.assertEquals(10.0, less.getScore().doubleValue(), 1e-6 );
+
+    RiskLevelRule greater = triageRules.get(1);
+    Assert.assertEquals(variables.get("greater").getExpression(), greater.getRule() );
+    Assert.assertEquals(20.0, greater.getScore().doubleValue(), 1e-6 );
   }
 
   /**
-\u2554\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2564\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2557
-\u2551 Triage Rule \u2502 Score \u2551
-\u2560\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u256a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2563
-\u2551 1 > 2       \u2502 20    \u2551
-\u255f\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2562
-\u2551 1 < 2       \u2502 10    \u2551
-\u255a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2567\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u255d
+\u2554\u2550\u2550\u2550\u2550\u2550\u2550\u2564\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2564\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2564\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2557
+\u2551 Name \u2502 Comment \u2502 Triage Rule \u2502 Score \u2551
+\u2560\u2550\u2550\u2550\u2550\u2550\u2550\u256a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u256a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u256a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2563
+\u2551      \u2502         \u2502 1 < 2       \u2502 10    \u2551
+\u255f\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u253c\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2562
+\u2551      \u2502         \u2502 1 > 2       \u2502 20    \u2551
+\u255a\u2550\u2550\u2550\u2550\u2550\u2550\u2567\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2567\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2567\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u255d
 
 
 Aggregation: MAX*/
@@ -217,7 +234,7 @@ Aggregation: MAX*/
   public void testPrint() {
 
     String newConfig = (String) run(
-            "THREAT_TRIAGE_ADD(config, { SHELL_GET_EXPRESSION('less') : 10, SHELL_GET_EXPRESSION('greater') : 20 } )"
+            "THREAT_TRIAGE_ADD(config, [ { 'rule' : SHELL_GET_EXPRESSION('less'), 'score' : 10 }, { 'rule' : SHELL_GET_EXPRESSION('greater'), 'score' : 20 } ] )"
             , toMap("config", configStr
             )
     );
@@ -231,11 +248,11 @@ Aggregation: MAX*/
   }
 
   /**
-\u2554\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2564\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2557
-\u2551 Triage Rule \u2502 Score \u2551
-\u2560\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2567\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2563
-\u2551 (empty)             \u2551
-\u255a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u255d
+\u2554\u2550\u2550\u2550\u2550\u2550\u2550\u2564\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2564\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2564\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2557
+\u2551 Name \u2502 Comment \u2502 Triage Rule \u2502 Score \u2551
+\u2560\u2550\u2550\u2550\u2550\u2550\u2550\u2567\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2567\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2567\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2563
+\u2551 (empty)                              \u2551
+\u255a\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u2550\u255d
    */
   @Multiline
   static String testPrintEmptyExpected;


[05/17] incubator-metron git commit: METRON-687 Create String Formatting Function for Stellar (nickwallen) closes apache/incubator-metron#434

Posted by ce...@apache.org.
METRON-687 Create String Formatting Function for Stellar (nickwallen) closes apache/incubator-metron#434


Project: http://git-wip-us.apache.org/repos/asf/incubator-metron/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-metron/commit/fd77ec32
Tree: http://git-wip-us.apache.org/repos/asf/incubator-metron/tree/fd77ec32
Diff: http://git-wip-us.apache.org/repos/asf/incubator-metron/diff/fd77ec32

Branch: refs/heads/Metron_0.3.1
Commit: fd77ec329e96490e8401e300ac4104bf3f4a9901
Parents: 75d122d
Author: nickwallen <ni...@nickallen.org>
Authored: Fri Feb 3 10:27:45 2017 -0500
Committer: Nick Allen <ni...@nickallen.org>
Committed: Fri Feb 3 10:27:45 2017 -0500

----------------------------------------------------------------------
 metron-platform/metron-common/README.md         |  8 +++++
 .../common/dsl/functions/StringFunctions.java   | 21 ++++++++++++
 .../dsl/functions/StringFunctionsTest.java      | 35 ++++++++++++++++++++
 3 files changed, 64 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/fd77ec32/metron-platform/metron-common/README.md
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/README.md b/metron-platform/metron-common/README.md
index ace1a0c..c24ae73 100644
--- a/metron-platform/metron-common/README.md
+++ b/metron-platform/metron-common/README.md
@@ -96,6 +96,7 @@ The `!=` operator is the negation of the above.
 | [ `ENRICHMENT_GET`](#enrichment_get)                                                               |
 | [ `FILL_LEFT`](#fill_left)                                                                         |
 | [ `FILL_RIGHT`](#fill_right)                                                                       |
+| [ `FORMAT`](#format)                                                                       |
 | [ `HLLP_CARDINALITY`](../../metron-analytics/metron-statistics#hllp_cardinality)                   |
 | [ `HLLP_INIT`](../../metron-analytics/metron-statistics#hllp_init)                                 |
 | [ `HLLP_MERGE`](../../metron-analytics/metron-statistics#hllp_merge)                               |
@@ -270,6 +271,13 @@ The `!=` operator is the negation of the above.
     * len - the required length
   * Returns: Last element of the list
 
+### `FORMAT`
+  * Description: Returns a formatted string using the specified format string and arguments. Uses Java's string formatting conventions.
+  * Input:
+    * format - string
+    * arguments... - object(s)
+  * Returns: A formatted string.
+
 ### `GEO_GET`
   * Description: Look up an IPV4 address and returns geographic information about it
   * Input:

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/fd77ec32/metron-platform/metron-common/src/main/java/org/apache/metron/common/dsl/functions/StringFunctions.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/main/java/org/apache/metron/common/dsl/functions/StringFunctions.java b/metron-platform/metron-common/src/main/java/org/apache/metron/common/dsl/functions/StringFunctions.java
index fa2b55b..1086da3 100644
--- a/metron-platform/metron-common/src/main/java/org/apache/metron/common/dsl/functions/StringFunctions.java
+++ b/metron-platform/metron-common/src/main/java/org/apache/metron/common/dsl/functions/StringFunctions.java
@@ -322,4 +322,25 @@ public class StringFunctions {
       return ret;
     }
   }
+
+  @Stellar( name="FORMAT"
+          , description = "Returns a formatted string using the specified format string and arguments. Uses Java's string formatting conventions."
+          , params = { "format - string", "arguments... - object(s)" }
+          , returns = "A formatted string."
+  )
+  public static class Format extends BaseStellarFunction {
+
+    @Override
+    public Object apply(List<Object> args) {
+
+      if(args.size() == 0) {
+        throw new IllegalArgumentException("[FORMAT] missing argument: format string");
+      }
+
+      String format = ConversionUtils.convert(args.get(0), String.class);
+      Object[] formatArgs = args.subList(1, args.size()).toArray();
+
+      return String.format(format, formatArgs);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/fd77ec32/metron-platform/metron-common/src/test/java/org/apache/metron/common/dsl/functions/StringFunctionsTest.java
----------------------------------------------------------------------
diff --git a/metron-platform/metron-common/src/test/java/org/apache/metron/common/dsl/functions/StringFunctionsTest.java b/metron-platform/metron-common/src/test/java/org/apache/metron/common/dsl/functions/StringFunctionsTest.java
index 18d2eb2..d8854f8 100644
--- a/metron-platform/metron-common/src/test/java/org/apache/metron/common/dsl/functions/StringFunctionsTest.java
+++ b/metron-platform/metron-common/src/test/java/org/apache/metron/common/dsl/functions/StringFunctionsTest.java
@@ -21,10 +21,13 @@ package org.apache.metron.common.dsl.functions;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import org.apache.commons.collections.map.HashedMap;
+import org.apache.commons.collections.map.SingletonMap;
 import org.apache.metron.common.dsl.ParseException;
 import org.junit.Assert;
 import org.junit.Test;
 
+import java.util.Calendar;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -172,4 +175,36 @@ public class StringFunctionsTest {
      */
     Assert.assertEquals(1.5, (Double)run("STRING_ENTROPY(foo)", ImmutableMap.of("foo", "aaaaaaaaaabbbbbccccc")), 0.0);
   }
+
+  @Test
+  public void testFormat() throws Exception {
+
+    Map<String, Object> vars = ImmutableMap.of(
+            "cal", new Calendar.Builder().setDate(2017, 02, 02).build(),
+            "x", 234,
+            "y", 3);
+
+    Assert.assertEquals("no args",        run("FORMAT('no args')", vars));
+    Assert.assertEquals("234.0",          run("FORMAT('%.1f', TO_DOUBLE(234))", vars));
+    Assert.assertEquals("000234",         run("FORMAT('%06d', 234)", vars));
+    Assert.assertEquals("03 2,2017",      run("FORMAT('%1$tm %1$te,%1$tY', cal)", vars));
+    Assert.assertEquals("234 > 3",        run("FORMAT('%d > %d', x, y)", vars));
+    Assert.assertEquals("missing: null",  run("FORMAT('missing: %d', missing)", vars));
+  }
+
+  /**
+   * FORMAT - Not passing a format string will throw an exception
+   */
+  @Test(expected = ParseException.class)
+  public void testFormatWithNoArguments() throws Exception {
+    run("FORMAT()", Collections.emptyMap());
+  }
+
+  /**
+   * FORMAT - Forgetting to pass an argument required by the format string will throw an exception.
+   */
+  @Test(expected = ParseException.class)
+  public void testFormatWithMissingArguments() throws Exception {
+    run("FORMAT('missing arg: %d')", Collections.emptyMap());
+  }
 }