You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@apex.apache.org by th...@apache.org on 2017/11/05 08:43:50 UTC

[apex-site] 02/02: from 0c948eb6b24c203e9786bf4c2f3488dd2a27c0c1

This is an automated email from the ASF dual-hosted git repository.

thw pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/apex-site.git

commit 9f0c541dc47c0260678d8049ba225f6ad3176c68
Author: Apex Dev <de...@apex.apache.org>
AuthorDate: Sun Nov 5 01:43:13 2017 -0700

    from 0c948eb6b24c203e9786bf4c2f3488dd2a27c0c1
---
 content/docs/malhar-3.8/__init__.py                |    0
 .../malhar-3.8/__pycache__/__init__.cpython-35.pyc |  Bin 0 -> 159 bytes
 content/docs/malhar-3.8/apis/calcite/index.html    |  561 ++++++
 .../docs/malhar-3.8/apis/images/calcite-apex.png   |  Bin 0 -> 186300 bytes
 content/docs/malhar-3.8/apis/images/image_1.jpg    |  Bin 0 -> 16919 bytes
 content/docs/malhar-3.8/apis/images/image_2.jpg    |  Bin 0 -> 26550 bytes
 content/docs/malhar-3.8/base.html                  |  118 ++
 content/docs/malhar-3.8/breadcrumbs.html           |   25 +
 content/docs/malhar-3.8/css/highlight.css          |  124 ++
 content/docs/malhar-3.8/css/theme.css              |   12 +
 content/docs/malhar-3.8/css/theme_extra.css        |  154 ++
 content/docs/malhar-3.8/favicon.ico                |  Bin 0 -> 25597 bytes
 .../docs/malhar-3.8/fonts/fontawesome-webfont.eot  |  Bin 0 -> 37405 bytes
 .../docs/malhar-3.8/fonts/fontawesome-webfont.svg  |  399 ++++
 .../docs/malhar-3.8/fonts/fontawesome-webfont.ttf  |  Bin 0 -> 79076 bytes
 .../docs/malhar-3.8/fonts/fontawesome-webfont.woff |  Bin 0 -> 43572 bytes
 content/docs/malhar-3.8/footer.html                |   23 +
 .../docs/malhar-3.8/images/malhar-operators.png    |  Bin 0 -> 109734 bytes
 content/docs/malhar-3.8/img/favicon.ico            |  Bin 0 -> 1150 bytes
 content/docs/malhar-3.8/index.html                 |  389 ++++
 content/docs/malhar-3.8/js/highlight.pack.js       |    2 +
 content/docs/malhar-3.8/js/jquery-2.1.1.min.js     |    4 +
 content/docs/malhar-3.8/js/modernizr-2.8.3.min.js  |    1 +
 content/docs/malhar-3.8/js/theme.js                |   55 +
 content/docs/malhar-3.8/mkdocs/js/lunr.min.js      |    7 +
 content/docs/malhar-3.8/mkdocs/js/mustache.min.js  |    1 +
 content/docs/malhar-3.8/mkdocs/js/require.js       |   36 +
 .../mkdocs/js/search-results-template.mustache     |    4 +
 content/docs/malhar-3.8/mkdocs/js/search.js        |   88 +
 content/docs/malhar-3.8/mkdocs/js/text.js          |  390 ++++
 content/docs/malhar-3.8/mkdocs/search_index.json   | 2044 ++++++++++++++++++++
 .../index.html                                     |  594 ++++++
 .../malhar-3.8/operators/block_reader/index.html   |  541 ++++++
 .../operators/csvParserOperator/index.html         |  626 ++++++
 .../malhar-3.8/operators/csvformatter/index.html   |  493 +++++
 .../docs/malhar-3.8/operators/deduper/index.html   |  855 ++++++++
 .../docs/malhar-3.8/operators/enricher/index.html  |  641 ++++++
 .../malhar-3.8/operators/file_output/index.html    |  476 +++++
 .../malhar-3.8/operators/file_splitter/index.html  |  493 +++++
 .../docs/malhar-3.8/operators/filter/index.html    |  449 +++++
 .../operators/fixedWidthParserOperator/index.html  |  613 ++++++
 .../operators/fsInputOperator/index.html           |  825 ++++++++
 .../operators/ftpInputOperator/index.html          |  447 +++++
 .../operators/images/blockreader/classdiagram.png  |  Bin 0 -> 48613 bytes
 .../operators/images/blockreader/flowdiagram.png   |  Bin 0 -> 48160 bytes
 .../images/blockreader/fsreaderexample.png         |  Bin 0 -> 29927 bytes
 .../images/blockreader/totalBacklogProcessing.png  |  Bin 0 -> 55944 bytes
 .../operators/images/csvParser/CSVParser.png       |  Bin 0 -> 47140 bytes
 .../operators/images/deduper/image00.png           |  Bin 0 -> 8612 bytes
 .../operators/images/deduper/image01.png           |  Bin 0 -> 23903 bytes
 .../operators/images/deduper/image02.png           |  Bin 0 -> 25300 bytes
 .../operators/images/deduper/image03.png           |  Bin 0 -> 10901 bytes
 .../operators/images/deduper/image04.png           |  Bin 0 -> 17387 bytes
 .../operators/images/fileoutput/FileRotation.png   |  Bin 0 -> 26067 bytes
 .../operators/images/fileoutput/diagram1.png       |  Bin 0 -> 30754 bytes
 .../operators/images/filesplitter/baseexample.png  |  Bin 0 -> 14493 bytes
 .../operators/images/filesplitter/classdiagram.png |  Bin 0 -> 14513 bytes
 .../operators/images/filesplitter/inputexample.png |  Bin 0 -> 16012 bytes
 .../operators/images/filesplitter/sequence.png     |  Bin 0 -> 17020 bytes
 .../images/fixedWidthParser/fixedWidthParser.png   |  Bin 0 -> 91569 bytes
 .../images/fsInput/operatorsClassDiagram.png       |  Bin 0 -> 71104 bytes
 .../images/ftpInputOperator/classdiagram.png       |  Bin 0 -> 27557 bytes
 .../images/jdbcinput/operatorsClassDiagram.png     |  Bin 0 -> 49841 bytes
 .../images/jdbcoutput/operatorsClassDiagrams.png   |  Bin 0 -> 136942 bytes
 .../images/jsonFormatter/JsonFormatter.png         |  Bin 0 -> 29253 bytes
 .../operators/images/jsonParser/JsonParser.png     |  Bin 0 -> 57233 bytes
 .../operators/images/kafkainput/image00.png        |  Bin 0 -> 36143 bytes
 .../images/regexparser/logcapturedgroups.png       |  Bin 0 -> 86956 bytes
 .../images/regexparser/regexcapturedgroups.png     |  Bin 0 -> 45038 bytes
 .../operators/images/s3output/s3outputmodule.png   |  Bin 0 -> 51067 bytes
 .../images/windowedOperator/allowed-lateness.png   |  Bin 0 -> 17901 bytes
 .../images/windowedOperator/session-windows-1.png  |  Bin 0 -> 15139 bytes
 .../images/windowedOperator/session-windows-2.png  |  Bin 0 -> 14238 bytes
 .../images/windowedOperator/session-windows-3.png  |  Bin 0 -> 11613 bytes
 .../images/windowedOperator/session-windows-4.png  |  Bin 0 -> 15176 bytes
 .../windowedOperator/sliding-time-windows.png      |  Bin 0 -> 15855 bytes
 .../images/windowedOperator/time-windows.png       |  Bin 0 -> 8184 bytes
 .../operators/images/xmlParser/XmlParser.png       |  Bin 0 -> 22196 bytes
 .../operators/jdbcPollInputOperator/index.html     |  668 +++++++
 .../operators/jmsInputOperator/index.html          |  532 +++++
 .../malhar-3.8/operators/jsonFormatter/index.html  |  478 +++++
 .../malhar-3.8/operators/jsonParser/index.html     |  475 +++++
 .../operators/kafkaInputOperator/index.html        |  770 ++++++++
 .../malhar-3.8/operators/regexparser/index.html    |  647 +++++++
 .../malhar-3.8/operators/s3outputmodule/index.html |  578 ++++++
 .../docs/malhar-3.8/operators/transform/index.html |  591 ++++++
 .../operators/windowedOperator/index.html          |  540 ++++++
 .../operators/xmlParserOperator/index.html         |  471 +++++
 content/docs/malhar-3.8/search.html                |  312 +++
 content/docs/malhar-3.8/searchbox.html             |    5 +
 content/docs/malhar-3.8/sitemap.xml                |  158 ++
 content/docs/malhar-3.8/toc.html                   |   23 +
 content/docs/malhar-3.8/versions.html              |   15 +
 93 files changed, 17753 insertions(+)

diff --git a/content/docs/malhar-3.8/__init__.py b/content/docs/malhar-3.8/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/content/docs/malhar-3.8/__pycache__/__init__.cpython-35.pyc b/content/docs/malhar-3.8/__pycache__/__init__.cpython-35.pyc
new file mode 100644
index 0000000..4e9201e
Binary files /dev/null and b/content/docs/malhar-3.8/__pycache__/__init__.cpython-35.pyc differ
diff --git a/content/docs/malhar-3.8/apis/calcite/index.html b/content/docs/malhar-3.8/apis/calcite/index.html
new file mode 100644
index 0000000..8949f5f
--- /dev/null
+++ b/content/docs/malhar-3.8/apis/calcite/index.html
@@ -0,0 +1,561 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>SQL - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "SQL";
+    var mkdocs_page_input_path = "apis/calcite.md";
+    var mkdocs_page_url = "/apis/calcite/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">SQL</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#apex-calcite-integration">Apex-Calcite Integration</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#sql-apis-for-apache-apex">SQL APIs for Apache Apex</a></li>
+                
+                    <li><a class="toctree-l4" href="#example-1-pure-style-sql-application">Example 1: Pure Style SQL Application</a></li>
+                
+                    <li><a class="toctree-l4" href="#example-2-fusion-style-sql-application">Example 2: Fusion Style SQL Application</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#ongoing-efforts">Ongoing efforts</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../operators/xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>APIs &raquo;</li>
+        
+      
+    
+    <li>SQL</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <p>Apache Apex is a unified stream and batch processing engine that enables application developers to process data at very high throughput with low latency. Although the different types of data have different processing needs, SQL remains a popular and a generic way for processing data. To ensure that existing ETL developers and developers who are well versed with Database applications adopt stream processing application development with ease, integration of SQL with Apex [...]
+<h1 id="apex-calcite-integration">Apex-Calcite Integration</h1>
+<p>Apache Calcite is a highly customizable engine for parsing and planning queries on relational data from various data sources; it provides storage independent optimization of queries and ways to integrate them into other frameworks which would like to take advantage and expose SQL capability to their users. For details, please read at <a href="https://calcite.apache.org/">Apache Calcite Website</a>. </p>
+<p>Particularly in SQL on Apex, Calcite processes a query and then creates relational algebra to create processing pipelines. These relational algebra processing pipelines are converted to a DAG with a set of operators to perform business logic on streaming data.</p>
+<p><img alt="image alt text" src="../images/calcite-apex.png" /></p>
+<p>Above figure explains how SQL query gets converted to Apex DAG.</p>
+<ol>
+<li>User specified query is processed by Calcite Query planner; this involves parsing and optimizing the query to generate Relation Expression Tree. </li>
+<li>This Relation Expression Tree is received by Apache Apex’s SQL module to finally convert to an Apex DAG having series of operators.</li>
+</ol>
+<p>One peculiarity of Calcite queries is that the data source and destination need not be RDBMS systems; in the above example, <code>File</code> refers to a file in the filesystem and <code>Kafka</code> to a Kafka message broker. Calcite allows Apex to register table sources and destinations as anything which can return a row type results. So a “scan” relational expression gets converted to “KafkaInputOperator + ParseOperator”, a result of which is series of POJOs reflecting a Row Type.  [...]
+<p>For more details about the integration, click <a href="https://github.com/apache/apex-malhar/tree/master/sql">here</a>.</p>
+<h1 id="sql-apis-for-apache-apex">SQL APIs for Apache Apex</h1>
+<p>Listed below are the Java APIs which can be used by SQL/Apex users to create a DAG in the implementation of the <em>populateDAG</em> method of the <code>StreamingApplication</code> interface.</p>
+<table>
+<thead>
+<tr>
+<th>API</th>
+<th align="center">Description</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><strong>SQLExecEnvironment.getEnvironment()</strong></td>
+<td align="center">Creates a new SQL execution environment</td>
+</tr>
+<tr>
+<td><strong>SQLExecEnvironment.registerTable(tableName, endpointInstance)</strong></td>
+<td align="center">Registers a new abstract table with existing environment. <em>endpointInstance</em> is an object of type <a href="https://github.com/apache/apex-malhar/blob/master/sql/src/main/java/org/apache/apex/malhar/sql/table/Endpoint.java">Endpoint</a> which defines a table.</td>
+</tr>
+<tr>
+<td><strong>SQLExecEnvironment.registerFunction(sqlFunctionName, holderClass, staticFunctionName)</strong></td>
+<td align="center">Registers a new User Defined Scalar function</td>
+</tr>
+<tr>
+<td><strong>SQLExecEnvironment.executeSQL(dag, sqlStatement)</strong></td>
+<td align="center">Creates a DAG for a particular SQL statement</td>
+</tr>
+</tbody>
+</table>
+<p>Usage of above APIs is described in detail in following sections.</p>
+<h2 id="example-1-pure-style-sql-application">Example 1: Pure Style SQL Application</h2>
+<p>With Apache Calcite Integration, you can use SQL queries across different data sources and provide UDFs (User Defined Functions) as per your business logic. This example will use a Kafka topic as the source and a HDFS file as the destination.
+Following application code will be used to explain APIs. Actual source code can be found <a href="https://github.com/apache/apex-malhar/blob/master/demos/sql/src/main/java/org/apache/apex/malhar/sql/sample/PureStyleSQLApplication.java">here</a>.</p>
+<pre><code class="java">  public class PureStyleSQLApplication implements StreamingApplication
+  {
+    @Override
+    public void populateDAG(DAG dag, Configuration conf)
+    {
+       // Create new SQLExecEnvironment
+       SQLExecEnvironment sqlEnv = SQLExecEnvironment.getEnvironment();
+
+      // This is a string that defines a schema and is discussed in more detail in &quot;Registering tables with SQLExecEnvironment&quot; section 
+      String inputSchemaString = &quot;...&quot;;
+
+      // similar to inputSchemaString, we also need to define outputSchemaString
+      String outputSchemaString = &quot;...&quot;;
+
+       // Register KafkaEnpoint as &quot;ORDERS&quot; table with kafka topic and data format as CSV
+       sqlEnv = sqlEnv.registerTable( 
+                                    &quot;ORDERS&quot;, 
+                                    new KafkaEndpoint(&quot;localhost:9090&quot;, 
+                                                      &quot;inputTopic&quot;, 
+                                                      new CSVMessageFormat(inputSchemaString))
+                                  );
+
+       // Register FileEndpoint as &quot;SALES&quot; table with file path and data format as CSV
+       sqlEnv = sqlEnv.registerTable( 
+                                    &quot;SALES&quot;, 
+                                    new FileEndpoint(&quot;/tmp/output&quot;, 
+                                                     &quot;out.file&quot;, 
+                                                     new CSVMessageFormat(outputSchemaString))
+                                  );
+
+       // Register scalar SQL UDF 
+       sqlEnv = sqlEnv.registerFunction(&quot;APEXCONCAT&quot;, PureStyleSQLApplication.class, &quot;apex_concat_str&quot;);
+
+       // Converting SQL statement to DAG 
+       String sql = &quot;INSERT INTO SALES 
+                       SELECT STREAM ROWTIME, FLOOR(ROWTIME TO DAY), APEXCONCAT('OILPAINT', SUBSTRING(PRODUCT, 6, 7)) 
+                       FROM ORDERS 
+                       WHERE ID &gt; 3 AND PRODUCT LIKE 'paint%'&quot;;
+       sqlEnv.executeSQL(dag, sql);
+    }// populateDAG finished
+
+    public static String apex_concat_str(String s1, String s2)
+    {
+        return s1 + s2;
+    } 
+  }
+</code></pre>
+
+<h3 id="constructing-sqlexecenvironment">Constructing SQLExecEnvironment</h3>
+<p>The class <em><strong>SQLExecEnvironment</strong></em> provides a starting point and a simple way to define metadata needed for running a SQL statement; a new instance of this class is returned by the <code>getEnvironment</code> static method.  </p>
+<pre><code class="java">  // Creates SQLExecEnvironment instance by using static method getEnvironment
+  SQLExecEnvironment sqlEnv = SQLExecEnvironment.getEnvironment();
+</code></pre>
+
+<h3 id="registering-tables-with-sqlexecenvironment">Registering tables with SQLExecEnvironment</h3>
+<p>Next, we need to register tables which can be used in a query. For this purpose, we can use <em>registerTable</em> method from SQLExecEnvironment.</p>
+<pre><code class="java">  // Register KafkaEnpoint as &quot;ORDERS&quot; table with kafka topic and data format as CSV
+  sqlEnv = sqlEnv.registerTable( 
+                              &quot;ORDERS&quot;, 
+                              new KafkaEndpoint(&quot;localhost:9090&quot;, 
+                                                &quot;inputTopic&quot;, 
+                                                new CSVMessageFormat(inputSchemaString))
+                            );
+
+  // Register FileEndpoint as &quot;SALES&quot; table with file path and data format as CSV
+  sqlEnv = sqlEnv.registerTable( 
+                              &quot;SALES&quot;, 
+                              new FileEndpoint(&quot;/tmp/output&quot;, 
+                                               &quot;out.file&quot;, 
+                                               new CSVMessageFormat(inputSchemaString))
+                            );
+</code></pre>
+
+<p><strong><em>"registerTable"</em></strong> method takes the name of the table and an instance of endpoint as parameters. Endpoint signifies data storage mechanism and type of source/destination for the data. These endpoints require different types of configurations and possibly data formats. The data format is defined using an implementation of the <code>MessageFormat</code> interface; the <code>CSVMessageFormat</code> implementation can be configured with a schema string as follows:</p>
+<pre><code class="sql">{
+  &quot;separator&quot;: &quot;,&quot;,
+  &quot;quoteChar&quot;: &quot;\&quot;&quot;,
+  &quot;fields&quot;: [
+    {
+      &quot;name&quot;: &quot;RowTime&quot;,
+      &quot;type&quot;: &quot;Date&quot;,
+      &quot;constraints&quot;: {
+        &quot;format&quot;: &quot;dd/MM/yyyy hh:mm:ss Z&quot;
+      }
+    },
+    {
+      &quot;name&quot;: &quot;id&quot;,
+      &quot;type&quot;: &quot;Integer&quot;
+    },
+    {
+      &quot;name&quot;: &quot;Product&quot;,
+      &quot;type&quot;: &quot;String&quot;
+    },
+    {
+      &quot;name&quot;: &quot;units&quot;,
+      &quot;type&quot;: &quot;Integer&quot;
+    }
+  ]
+}
+</code></pre>
+
+<p>The schema string is a JSON string defining a separator character, quote character for fields with String type and a list of fields where, for each field, its name, type and any additional constraints are specified.</p>
+<p>Following data endpoints are supported: </p>
+<ul>
+<li><strong>KafkaEnpoint</strong>
+: To define a Kafka Endpoint we need to specify the Kafka broker (as host:port), topic name and MessageFormat as seen in line 1 in the code above.</li>
+<li><strong>FileEndpoint</strong>
+: It needs to be configured with the filesystem path, file name and MessageFormat as in line 2 in the code above. </li>
+<li><strong>StreamEndpoint</strong> 
+: This allows us to connect existing operator output or input ports to the SQL query as a data source or sink respectively. StreamEndpoint needs immediate downstream operator's input port or immediate upstream operator's output port and the field mapping for CSV data or POJO class. This will be explained in detail in next <a href="#fusion-style-sql-application">example</a>.</li>
+</ul>
+<h3 id="using-user-defined-functions-udf-in-a-sql-query">Using User Defined Functions (UDF) in a SQL query</h3>
+<p>We can use our own scalar UDF, implemented in Java, in a SQL statement for data manipulation but first, we need to register the function with the execution environment by using the <code>registerFunction</code> method.</p>
+<pre><code class="java">  sqlEnv = sqlEnv.registerFunction(&quot;APEXCONCAT&quot;, PureStyleSQLApplication.class, &quot;apex_concat_str&quot;);
+</code></pre>
+
+<p>In above code, <strong><em>registerFunction</em></strong> takes the UDF name to be used in SQL, JAVA class which implements the static method and name of that method as parameters. 
+The static method <code>apex_concat_str</code> takes two String objects as input parameters from the SQL query.</p>
+<pre><code class="java">  public static String apex_concat_str(String s1, String s2)
+  {
+    return s1 + s2;
+  }
+</code></pre>
+
+<p>The scalar UDF "APEXCONCAT" that was registered above can be used in SQL as described below. FLOOR and SUBSTRING are standard SQL scalar functions supported by Apache Calcite.</p>
+<pre><code class="sql">INSERT INTO SALES 
+       SELECT STREAM ROWTIME, FLOOR(ROWTIME TO DAY), APEXCONCAT('OILPAINT', SUBSTRING(PRODUCT, 6, 7)) 
+       FROM ORDERS 
+       WHERE ID &gt; 3 AND PRODUCT LIKE 'paint%'
+</code></pre>
+
+<p>To read about all functions and operators supported by Apache Calcite, click <a href="https://calcite.apache.org/docs/reference.html#operators-and-functions">here</a>.</p>
+<h3 id="executing-sql-query">Executing SQL Query</h3>
+<p>Finally to execute the query we need to use <strong><em>executeSQL</em></strong> function with a DAG and SQL statement as parameters.</p>
+<pre><code class="java">  // Converting SQL statement to DAG 
+  String sql = &quot;INSERT INTO SALES 
+                SELECT STREAM ROWTIME, FLOOR(ROWTIME TO DAY), APEXCONCAT('OILPAINT', SUBSTRING(PRODUCT, 6, 7)) 
+                FROM ORDERS 
+                WHERE ID &gt; 3 AND PRODUCT LIKE 'paint%'&quot;;
+  sqlEnv.executeSQL(dag, sql);
+</code></pre>
+
+<p>When executeSQL method is called, the query goes through various phases like conversion to relational algebra, optimization and planning in Calcite to generate Relation Expression Tree. 
+The generated Relation Expression Tree is consumed by Apex SQL and converted to a DAG using operators available in Apache Malhar. In the above example, the ORDERS and SALES tables will be converted to the operators KafkaInputOperator and FileOutputFormatter respectively, paired with the CSVParser formatter in both cases.</p>
+<p>A <em>WHERE</em> clause is used in this query; it defines the desired filter for rows and is converted to a <em>FilterTransformOperator</em> in the DAG. Similarly, the projection defining desired columns is converted into another instance of the <em>FilterTransformOperator</em>. The DAG created for this application will look like this:</p>
+<p><img alt="image alt text" src="../images/image_2.jpg" title="DAG for Pure Style SQL Application" /></p>
+<p><a name="fusion-style-sql-application"></a></p>
+<h2 id="example-2-fusion-style-sql-application">Example 2: Fusion Style SQL Application</h2>
+<p>As described in Pure Style SQL application, we can use different data sources as source and sink while developing Apex Applications with Calcite. This example will describe how we can develop Apex application with Apex stream as abstract table for SQL query. Actual source code can be found <a href="https://github.com/apache/apex-malhar/blob/master/demos/sql/src/main/java/org/apache/apex/malhar/sql/sample/FusionStyleSQLApplication.java">here</a>.</p>
+<pre><code class="java">  // Define Kafka Input Operator for reading data from Kafka
+  KafkaSinglePortInputOperator kafkaInput = dag.addOperator(&quot;KafkaInput&quot;, 
+                                                           KafkaSinglePortInputOperator.class);
+
+  kafkaInput.setInitialOffset(&quot;EARLIEST&quot;);
+
+  // Add CSVParser
+  CsvParser csvParser = dag.addOperator(&quot;CSVParser&quot;, CsvParser.class);
+  dag.addStream(&quot;KafkaToCSV&quot;, kafkaInput.outputPort, csvParser.in);
+</code></pre>
+
+<p>Once we define DAG with KafkaInputOperator and CSVParser, it can parse data from Kafka topic. Upto this point, this is a regular Apex application without SQL. After this, we can register the output of CSVParser as a table using <a href="https://github.com/apache/apex-malhar/blob/master/sql/src/main/java/org/apache/apex/malhar/sql/table/StreamEndpoint.java">StreamEndpoint</a> to run a SQL statement. This way we can develop applications in fusion style where the DAG is part SQL and part [...]
+<p>The following code will describe how we can define StreamEndpoint. </p>
+<pre><code class="java">  SQLExecEnvironment sqlEnv = sqlEnv.getEnvironment();
+  Map&lt;String, Class&gt; fieldMapping = ImmutableMap.&lt;String, Class&gt;of(&quot;RowTime&quot;, Date.class,
+                                                                 &quot;id&quot;, Integer.class,
+                                                                 &quot;Product&quot;, String.class,
+                                                                 &quot;units&quot;, Integer.class);
+  sqlEnv = sqlEnv.registerTable(&quot;FROMCSV&quot;, new StreamEndpoint(csvParser.out, fieldMapping));
+</code></pre>
+
+<p>To read existing data stream, we need to register it as a table with SQL execution environment with the name of the table and StreamEndpoint. StreamEndpoint can serve as input table or output table in SQL. For input table configuration we need to initialise StreamEndpoint with immediate upstream operator's output port and fieldMapping or POJO class for input tuple(as shown above). For output table configuration, we need to initialise StreamEndpoint with immediate downstream operator's [...]
+<p>When executeSQL method is called, the specified SQL is converted to DAG as described in the previous section. Both examples read CSV data from Kafka. But in the pure style SQL example the <code>KafkaInputOperator</code> and <code>CSVParser</code> in the DAG are created implicitly by the use of the KafkaEndpoint usage while in the fusion style example, they are explicitly defined as part of the DAG which is then extended with other operators as shown in the image below. </p>
+<p><img alt="image alt text" src="../images/image_2.jpg" title="DAG for Fusion Style SQL Application" /></p>
+<p>For all Apex-Calcite integration examples, click <a href="https://github.com/apache/apex-malhar/tree/master/demos/sql">here</a>. </p>
+<h1 id="ongoing-efforts">Ongoing efforts</h1>
+<p>Apache Apex-Calcite integration provides support for basic queries and efforts are underway to extend support for aggregations, sorting and other features using Tumbling, Hopping and Session Windows.
+Support for JSON, XML and JDBC endpoint are also planned. The goal of this integration is to make developing a streaming application using SQL easy so that SQL Developers don't have to write any java code at all.</p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../../operators/block_reader/" class="btn btn-neutral float-right" title="Block Reader">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../.." class="btn btn-neutral" title="Apache Apex Malhar"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../.." style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../../operators/block_reader/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/apis/images/calcite-apex.png b/content/docs/malhar-3.8/apis/images/calcite-apex.png
new file mode 100644
index 0000000..674b2b8
Binary files /dev/null and b/content/docs/malhar-3.8/apis/images/calcite-apex.png differ
diff --git a/content/docs/malhar-3.8/apis/images/image_1.jpg b/content/docs/malhar-3.8/apis/images/image_1.jpg
new file mode 100644
index 0000000..1779430
Binary files /dev/null and b/content/docs/malhar-3.8/apis/images/image_1.jpg differ
diff --git a/content/docs/malhar-3.8/apis/images/image_2.jpg b/content/docs/malhar-3.8/apis/images/image_2.jpg
new file mode 100644
index 0000000..5f6c0b4
Binary files /dev/null and b/content/docs/malhar-3.8/apis/images/image_2.jpg differ
diff --git a/content/docs/malhar-3.8/base.html b/content/docs/malhar-3.8/base.html
new file mode 100644
index 0000000..87c9f8a
--- /dev/null
+++ b/content/docs/malhar-3.8/base.html
@@ -0,0 +1,118 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  {% if page_description %}<meta name="description" content="{{ page_description }}">{% endif %}
+  {% if site_author %}<meta name="author" content="{{ site_author }}">{% endif %}
+  {% block htmltitle %}
+  <title>{% if page_title %}{{ page_title }} - {% endif %}{{ site_name }}</title>
+  {% endblock %}
+
+  {% if favicon %}<link rel="shortcut icon" href="{{ favicon }}">
+  {% else %}<link rel="shortcut icon" href="{{ base_url }}/img/favicon.ico">{% endif %}
+
+  {# CSS #}
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="{{ base_url }}/css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="{{ base_url }}/css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="{{ base_url }}/css/highlight.css">
+  {%- for path in extra_css %}
+  <link href="{{ path }}" rel="stylesheet">
+  {%- endfor %}
+
+  {% if current_page %}
+  <script>
+    // Current page data
+    var mkdocs_page_name = {{ page_title|tojson|safe }};
+    var mkdocs_page_input_path = {{ current_page.input_path|tojson|safe }};
+    var mkdocs_page_url = {{ current_page.abs_url|tojson|safe }};
+  </script>
+  {% endif %}
+  <script src="{{ base_url }}/js/jquery-2.1.1.min.js"></script>
+  <script src="{{ base_url }}/js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="{{ base_url }}/js/highlight.pack.js"></script>
+  <script src="{{ base_url }}/js/theme.js"></script>
+
+  {%- block extrahead %} {% endblock %}
+
+  {%- for path in extra_javascript %}
+  <script src="{{ path }}"></script>
+  {%- endfor %}
+
+  {% if google_analytics %}
+  <script>
+      (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
+      (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
+      m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
+      })(window,document,'script','//www.google-analytics.com/analytics.js','ga');
+
+      ga('create', '{{ google_analytics[0] }}', '{{ google_analytics[1] }}');
+      ga('send', 'pageview');
+  </script>
+  {% endif %}
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    {# SIDE NAV, TOGGLES ON MOBILE #}
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="{{ homepage_url }}" class="icon icon-home"> {{ site_name }}</a>
+        {% include "searchbox.html" %}
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          {% for nav_item in nav %}
+            <li>{% include "toc.html" %}<li>
+          {% endfor %}
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      {# MOBILE NAV, TRIGGLES SIDE NAV ON TOGGLE #}
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="{{ homepage_url }}">{{ site_name }}</a>
+      </nav>
+
+      {# PAGE CONTENT #}
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          {% include "breadcrumbs.html" %}
+          <div role="main">
+            <div class="section">
+              {% block content %}
+                {{ content }}
+              {% endblock %}
+            </div>
+          </div>
+	  {%- block footer %}
+          {% include "footer.html" %}
+	  {% endblock %}
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+{% include "versions.html" %}
+
+</body>
+</html>
+{% if current_page and current_page.is_homepage %}
+<!--
+MkDocs version : {{ mkdocs_version }}
+Build Date UTC : {{ build_date_utc }}
+-->
+{% endif %}
diff --git a/content/docs/malhar-3.8/breadcrumbs.html b/content/docs/malhar-3.8/breadcrumbs.html
new file mode 100644
index 0000000..01960e6
--- /dev/null
+++ b/content/docs/malhar-3.8/breadcrumbs.html
@@ -0,0 +1,25 @@
+<div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="{{ homepage_url }}">Docs</a> &raquo;</li>
+    {% if current_page %}
+      {% for doc in current_page.ancestors %}
+        {% if doc.link %}
+          <li><a href="{{ doc.link|e }}">{{ doc.title }}</a> &raquo;</li>
+        {% else %}
+          <li>{{ doc.title }} &raquo;</li>
+        {% endif %}
+      {% endfor %}
+    {% endif %}
+    {% if current_page %}<li>{{ current_page.title }}</li>{% endif %}
+    <li class="wy-breadcrumbs-aside">
+      {% if repo_url %}
+        {% if repo_name == 'GitHub' %}
+          <a href="{{ repo_url }}" class="icon icon-github"> Edit on GitHub</a>
+        {% elif repo_name == 'Bitbucket' %}
+          <a href="{{ repo_url }}" class="icon icon-bitbucket"> Edit on BitBucket</a>
+        {% endif %}
+      {% endif %}
+    </li>
+  </ul>
+  <hr/>
+</div>
diff --git a/content/docs/malhar-3.8/css/highlight.css b/content/docs/malhar-3.8/css/highlight.css
new file mode 100644
index 0000000..0ae40a7
--- /dev/null
+++ b/content/docs/malhar-3.8/css/highlight.css
@@ -0,0 +1,124 @@
+/*
+This is the GitHub theme for highlight.js
+
+github.com style (c) Vasily Polovnyov <va...@whiteants.net>
+
+*/
+
+.hljs {
+  display: block;
+  overflow-x: auto;
+  color: #333;
+  -webkit-text-size-adjust: none;
+}
+
+.hljs-comment,
+.diff .hljs-header,
+.hljs-javadoc {
+  color: #998;
+  font-style: italic;
+}
+
+.hljs-keyword,
+.css .rule .hljs-keyword,
+.hljs-winutils,
+.nginx .hljs-title,
+.hljs-subst,
+.hljs-request,
+.hljs-status {
+  color: #333;
+  font-weight: bold;
+}
+
+.hljs-number,
+.hljs-hexcolor,
+.ruby .hljs-constant {
+  color: #008080;
+}
+
+.hljs-string,
+.hljs-tag .hljs-value,
+.hljs-phpdoc,
+.hljs-dartdoc,
+.tex .hljs-formula {
+  color: #d14;
+}
+
+.hljs-title,
+.hljs-id,
+.scss .hljs-preprocessor {
+  color: #900;
+  font-weight: bold;
+}
+
+.hljs-list .hljs-keyword,
+.hljs-subst {
+  font-weight: normal;
+}
+
+.hljs-class .hljs-title,
+.hljs-type,
+.vhdl .hljs-literal,
+.tex .hljs-command {
+  color: #458;
+  font-weight: bold;
+}
+
+.hljs-tag,
+.hljs-tag .hljs-title,
+.hljs-rule .hljs-property,
+.django .hljs-tag .hljs-keyword {
+  color: #000080;
+  font-weight: normal;
+}
+
+.hljs-attribute,
+.hljs-variable,
+.lisp .hljs-body,
+.hljs-name {
+  color: #008080;
+}
+
+.hljs-regexp {
+  color: #009926;
+}
+
+.hljs-symbol,
+.ruby .hljs-symbol .hljs-string,
+.lisp .hljs-keyword,
+.clojure .hljs-keyword,
+.scheme .hljs-keyword,
+.tex .hljs-special,
+.hljs-prompt {
+  color: #990073;
+}
+
+.hljs-built_in {
+  color: #0086b3;
+}
+
+.hljs-preprocessor,
+.hljs-pragma,
+.hljs-pi,
+.hljs-doctype,
+.hljs-shebang,
+.hljs-cdata {
+  color: #999;
+  font-weight: bold;
+}
+
+.hljs-deletion {
+  background: #fdd;
+}
+
+.hljs-addition {
+  background: #dfd;
+}
+
+.diff .hljs-change {
+  background: #0086b3;
+}
+
+.hljs-chunk {
+  color: #aaa;
+}
diff --git a/content/docs/malhar-3.8/css/theme.css b/content/docs/malhar-3.8/css/theme.css
new file mode 100644
index 0000000..3e564a4
--- /dev/null
+++ b/content/docs/malhar-3.8/css/theme.css
@@ -0,0 +1,12 @@
+/*
+ * This file is copied from the upstream ReadTheDocs Sphinx
+ * theme. To aid upgradability this file should *not* be edited.
+ * modifications we need should be included in theme_extra.css.
+ *
+ * https://github.com/rtfd/readthedocs.org/blob/master/media/css/sphinx_rtd_theme.css
+ */
+
+*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}audio:not([controls]){display:none}[hidden]{display:none}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:hover,a:active{outline:0}abbr[ [...]
+ *  Font Awesome 4.1.0 by @davegandy - http://fontawesome.io - @fontawesome
+ *  License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License)
+ */@font-face{font-family:'FontAwesome';src:url("../fonts/fontawesome-webfont.eot?v=4.1.0");src:url("../fonts/fontawesome-webfont.eot?#iefix&v=4.1.0") format("embedded-opentype"),url("../fonts/fontawesome-webfont.woff?v=4.1.0") format("woff"),url("../fonts/fontawesome-webfont.ttf?v=4.1.0") format("truetype"),url("../fonts/fontawesome-webfont.svg?v=4.1.0#fontawesomeregular") format("svg");font-weight:normal;font-style:normal}.fa,.rst-content .admonition-title,.rst-content h1 .headerlink,. [...]
diff --git a/content/docs/malhar-3.8/css/theme_extra.css b/content/docs/malhar-3.8/css/theme_extra.css
new file mode 100644
index 0000000..9845d00
--- /dev/null
+++ b/content/docs/malhar-3.8/css/theme_extra.css
@@ -0,0 +1,154 @@
+/*
+ * Sphinx doesn't have support for section dividers like we do in
+ * MkDocs, this styles the section titles in the nav
+ *
+ * https://github.com/mkdocs/mkdocs/issues/175
+ */
+.wy-menu-vertical span {
+    line-height: 18px;
+    padding: 0.4045em 1.618em;
+    display: block;
+    position: relative;
+    font-size: 90%;
+    color: #838383;
+}
+
+.wy-menu-vertical .subnav a {
+    padding: 0.4045em 2.427em;
+}
+
+/*
+ * Long navigations run off the bottom of the screen as the nav
+ * area doesn't scroll.
+ *
+ * https://github.com/mkdocs/mkdocs/pull/202
+ */
+.wy-nav-side {
+    height: 100%;
+    overflow-y: auto;
+}
+
+/*
+ * readthedocs theme hides nav items when the window height is
+ * too small to contain them.
+ *
+ * https://github.com/mkdocs/mkdocs/issues/#348
+ */
+.wy-menu-vertical ul {
+  margin-bottom: 2em;
+}
+
+/*
+ * Fix wrapping in the code highlighting
+ *
+ * https://github.com/mkdocs/mkdocs/issues/233
+ */
+code {
+    white-space: pre;
+    padding: 2px 5px;
+}
+
+/*
+ * Wrap inline code samples otherwise they shoot of the side and
+ * can't be read at all.
+ *
+ * https://github.com/mkdocs/mkdocs/issues/313
+ */
+p code {
+    word-wrap: break-word;
+}
+
+/**
+ * Make code blocks display as blocks and give them the appropriate
+ * font size and padding.
+ *
+ * https://github.com/mkdocs/mkdocs/issues/855
+ */
+pre code {
+  display: block;
+  padding: 12px;
+  font-size: 12px;
+}
+
+/*
+ * Fix link colors when the link text is inline code.
+ *
+ * https://github.com/mkdocs/mkdocs/issues/718
+ */
+a code {
+    color: #2980B9;
+}
+a:hover code {
+    color: #3091d1;
+}
+a:visited code {
+    color: #9B59B6;
+}
+
+/*
+ * The CSS classes from highlight.js seem to clash with the
+ * ReadTheDocs theme causing some code to be incorrectly made
+ * bold and italic.
+ *
+ * https://github.com/mkdocs/mkdocs/issues/411
+ */
+code.cs, code.c {
+    font-weight: inherit;
+    font-style: inherit;
+}
+
+/*
+ * Fix some issues with the theme and non-highlighted code
+ * samples. Without and highlighting styles attached the
+ * formatting is broken.
+ *
+ * https://github.com/mkdocs/mkdocs/issues/319
+ */
+.no-highlight {
+  display: block;
+  padding: 0.5em;
+  color: #333;
+}
+
+
+/*
+ * Additions specific to the search functionality provided by MkDocs
+ */
+
+#mkdocs-search-results article h3
+{
+    margin-top: 23px;
+    border-top: 1px solid #E1E4E5;
+    padding-top: 24px;
+}
+
+#mkdocs-search-results article:first-child h3 {
+    border-top: none;
+}
+
+#mkdocs-search-query{
+    width: 100%;
+    border-radius: 50px;
+    padding: 6px 12px;
+    border-color: #D1D4D5;
+}
+
+.wy-menu-vertical li ul {
+    display: inherit;
+}
+
+.wy-menu-vertical li ul.subnav ul.subnav{
+    padding-left: 1em;
+}
+
+
+/*
+ * Improve inline code blocks within admonitions.
+ *
+ * https://github.com/mkdocs/mkdocs/issues/656
+ */
+ div.admonition code {
+  color: #404040;
+  border: 1px solid rgba(0, 0, 0, 0.2);
+  background: rgba(255, 255, 255, 0.7);
+}
diff --git a/content/docs/malhar-3.8/favicon.ico b/content/docs/malhar-3.8/favicon.ico
new file mode 100644
index 0000000..c0b3dae
Binary files /dev/null and b/content/docs/malhar-3.8/favicon.ico differ
diff --git a/content/docs/malhar-3.8/fonts/fontawesome-webfont.eot b/content/docs/malhar-3.8/fonts/fontawesome-webfont.eot
new file mode 100755
index 0000000..0662cb9
Binary files /dev/null and b/content/docs/malhar-3.8/fonts/fontawesome-webfont.eot differ
diff --git a/content/docs/malhar-3.8/fonts/fontawesome-webfont.svg b/content/docs/malhar-3.8/fonts/fontawesome-webfont.svg
new file mode 100755
index 0000000..2edb4ec
--- /dev/null
+++ b/content/docs/malhar-3.8/fonts/fontawesome-webfont.svg
@@ -0,0 +1,399 @@
+<?xml version="1.0" standalone="no"?>
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" >
+<svg xmlns="http://www.w3.org/2000/svg">
+<metadata></metadata>
+<defs>
+<font id="fontawesomeregular" horiz-adv-x="1536" >
+<font-face units-per-em="1792" ascent="1536" descent="-256" />
+<missing-glyph horiz-adv-x="448" />
+<glyph unicode=" "  horiz-adv-x="448" />
+<glyph unicode="&#x09;" horiz-adv-x="448" />
+<glyph unicode="&#xa0;" horiz-adv-x="448" />
+<glyph unicode="&#xa8;" horiz-adv-x="1792" />
+<glyph unicode="&#xa9;" horiz-adv-x="1792" />
+<glyph unicode="&#xae;" horiz-adv-x="1792" />
+<glyph unicode="&#xb4;" horiz-adv-x="1792" />
+<glyph unicode="&#xc6;" horiz-adv-x="1792" />
+<glyph unicode="&#x2000;" horiz-adv-x="768" />
+<glyph unicode="&#x2001;" />
+<glyph unicode="&#x2002;" horiz-adv-x="768" />
+<glyph unicode="&#x2003;" />
+<glyph unicode="&#x2004;" horiz-adv-x="512" />
+<glyph unicode="&#x2005;" horiz-adv-x="384" />
+<glyph unicode="&#x2006;" horiz-adv-x="256" />
+<glyph unicode="&#x2007;" horiz-adv-x="256" />
+<glyph unicode="&#x2008;" horiz-adv-x="192" />
+<glyph unicode="&#x2009;" horiz-adv-x="307" />
+<glyph unicode="&#x200a;" horiz-adv-x="85" />
+<glyph unicode="&#x202f;" horiz-adv-x="307" />
+<glyph unicode="&#x205f;" horiz-adv-x="384" />
+<glyph unicode="&#x2122;" horiz-adv-x="1792" />
+<glyph unicode="&#x221e;" horiz-adv-x="1792" />
+<glyph unicode="&#x2260;" horiz-adv-x="1792" />
+<glyph unicode="&#xe000;" horiz-adv-x="500" d="M0 0z" />
+<glyph unicode="&#xf000;" horiz-adv-x="1792" d="M1699 1350q0 -35 -43 -78l-632 -632v-768h320q26 0 45 -19t19 -45t-19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45t45 19h320v768l-632 632q-43 43 -43 78q0 23 18 36.5t38 17.5t43 4h1408q23 0 43 -4t38 -17.5t18 -36.5z" />
+<glyph unicode="&#xf001;" d="M1536 1312v-1120q0 -50 -34 -89t-86 -60.5t-103.5 -32t-96.5 -10.5t-96.5 10.5t-103.5 32t-86 60.5t-34 89t34 89t86 60.5t103.5 32t96.5 10.5q105 0 192 -39v537l-768 -237v-709q0 -50 -34 -89t-86 -60.5t-103.5 -32t-96.5 -10.5t-96.5 10.5t-103.5 32t-86 60.5t-34 89 t34 89t86 60.5t103.5 32t96.5 10.5q105 0 192 -39v967q0 31 19 56.5t49 35.5l832 256q12 4 28 4q40 0 68 -28t28 -68z" />
+<glyph unicode="&#xf002;" horiz-adv-x="1664" d="M1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5zM1664 -128q0 -52 -38 -90t-90 -38q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5 t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150 -225t55.5 -273.5q0 -220 -124 -399l343 -343q37 -37 37 -90z" />
+<glyph unicode="&#xf003;" horiz-adv-x="1792" d="M1664 32v768q-32 -36 -69 -66q-268 -206 -426 -338q-51 -43 -83 -67t-86.5 -48.5t-102.5 -24.5h-1h-1q-48 0 -102.5 24.5t-86.5 48.5t-83 67q-158 132 -426 338q-37 30 -69 66v-768q0 -13 9.5 -22.5t22.5 -9.5h1472q13 0 22.5 9.5t9.5 22.5zM1664 1083v11v13.5t-0.5 13 t-3 12.5t-5.5 9t-9 7.5t-14 2.5h-1472q-13 0 -22.5 -9.5t-9.5 -22.5q0 -168 147 -284q193 -152 401 -317q6 -5 35 -29.5t46 -37.5t44.5 -31.5t50.5 -27.5t43 -9h1h1q20 0 43 9t50.5 27.5t44.5 31.5t46 37.5t35 [...]
+<glyph unicode="&#xf004;" horiz-adv-x="1792" d="M896 -128q-26 0 -44 18l-624 602q-10 8 -27.5 26t-55.5 65.5t-68 97.5t-53.5 121t-23.5 138q0 220 127 344t351 124q62 0 126.5 -21.5t120 -58t95.5 -68.5t76 -68q36 36 76 68t95.5 68.5t120 58t126.5 21.5q224 0 351 -124t127 -344q0 -221 -229 -450l-623 -600 q-18 -18 -44 -18z" />
+<glyph unicode="&#xf005;" horiz-adv-x="1664" d="M1664 889q0 -22 -26 -48l-363 -354l86 -500q1 -7 1 -20q0 -21 -10.5 -35.5t-30.5 -14.5q-19 0 -40 12l-449 236l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41t49 -41l225 -455 l502 -73q56 -9 56 -46z" />
+<glyph unicode="&#xf006;" horiz-adv-x="1664" d="M1137 532l306 297l-422 62l-189 382l-189 -382l-422 -62l306 -297l-73 -421l378 199l377 -199zM1664 889q0 -22 -26 -48l-363 -354l86 -500q1 -7 1 -20q0 -50 -41 -50q-19 0 -40 12l-449 236l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500 l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41t49 -41l225 -455l502 -73q56 -9 56 -46z" />
+<glyph unicode="&#xf007;" horiz-adv-x="1408" d="M1408 131q0 -120 -73 -189.5t-194 -69.5h-874q-121 0 -194 69.5t-73 189.5q0 53 3.5 103.5t14 109t26.5 108.5t43 97.5t62 81t85.5 53.5t111.5 20q9 0 42 -21.5t74.5 -48t108 -48t133.5 -21.5t133.5 21.5t108 48t74.5 48t42 21.5q61 0 111.5 -20t85.5 -53.5t62 -81 t43 -97.5t26.5 -108.5t14 -109t3.5 -103.5zM1088 1024q0 -159 -112.5 -271.5t-271.5 -112.5t-271.5 112.5t-112.5 271.5t112.5 271.5t271.5 112.5t271.5 -112.5t112.5 -271.5z" />
+<glyph unicode="&#xf008;" horiz-adv-x="1920" d="M384 -64v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM384 320v128q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM384 704v128q0 26 -19 45t-45 19h-128 q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1408 -64v512q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-512q0 -26 19 -45t45 -19h768q26 0 45 19t19 45zM384 1088v128q0 26 -19 [...]
+<glyph unicode="&#xf009;" horiz-adv-x="1664" d="M768 512v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM768 1280v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM1664 512v-384q0 -52 -38 -90t-90 -38 h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90zM1664 1280v-384q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v384q0 52 38 90t90 38h512q52 0 90 -38t38 -90z" />
+<glyph unicode="&#xf00a;" horiz-adv-x="1792" d="M512 288v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 800v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1152 288v-192q0 -40 -28 -68t-68 -28h-320 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1152 800v-192q0 -40  [...]
+<glyph unicode="&#xf00b;" horiz-adv-x="1792" d="M512 288v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM512 800v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 288v-192q0 -40 -28 -68t-68 -28h-960 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h960q40 0 68 -28t28 -68zM512 1312v-192q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h320q40 0 68 -28t28 -68zM1792 800v-192q0 -40  [...]
+<glyph unicode="&#xf00c;" horiz-adv-x="1792" d="M1671 970q0 -40 -28 -68l-724 -724l-136 -136q-28 -28 -68 -28t-68 28l-136 136l-362 362q-28 28 -28 68t28 68l136 136q28 28 68 28t68 -28l294 -295l656 657q28 28 68 28t68 -28l136 -136q28 -28 28 -68z" />
+<glyph unicode="&#xf00d;" horiz-adv-x="1408" d="M1298 214q0 -40 -28 -68l-136 -136q-28 -28 -68 -28t-68 28l-294 294l-294 -294q-28 -28 -68 -28t-68 28l-136 136q-28 28 -28 68t28 68l294 294l-294 294q-28 28 -28 68t28 68l136 136q28 28 68 28t68 -28l294 -294l294 294q28 28 68 28t68 -28l136 -136q28 -28 28 -68 t-28 -68l-294 -294l294 -294q28 -28 28 -68z" />
+<glyph unicode="&#xf00e;" horiz-adv-x="1664" d="M1024 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-224v-224q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v224h-224q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h224v224q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5v-224h224 q13 0 22.5 -9.5t9.5 -22.5zM1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5zM1664 -128q0 -53 -37.5 -90.5t-90.5 -37.5q-54 0 -90 38l-343  [...]
+<glyph unicode="&#xf010;" horiz-adv-x="1664" d="M1024 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-576q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h576q13 0 22.5 -9.5t9.5 -22.5zM1152 704q0 185 -131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5t316.5 131.5t131.5 316.5z M1664 -128q0 -53 -37.5 -90.5t-90.5 -37.5q-54 0 -90 38l-343 342q-179 -124 -399 -124q-143 0 -273.5 55.5t-225 150t-150 225t-55.5 273.5t55.5 273.5t150 225t225 150t273.5 55.5t273.5 -55.5t225 -150t150  [...]
+<glyph unicode="&#xf011;" d="M1536 640q0 -156 -61 -298t-164 -245t-245 -164t-298 -61t-298 61t-245 164t-164 245t-61 298q0 182 80.5 343t226.5 270q43 32 95.5 25t83.5 -50q32 -42 24.5 -94.5t-49.5 -84.5q-98 -74 -151.5 -181t-53.5 -228q0 -104 40.5 -198.5t109.5 -163.5t163.5 -109.5 t198.5 -40.5t198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5q0 121 -53.5 228t-151.5 181q-42 32 -49.5 84.5t24.5 94.5q31 43 84 50t95 -25q146 -109 226.5 -270t80.5 -343zM896 1408v-640q0 -52 -38 -90t-90 -38t-90 38t-38 90v640q0  [...]
+<glyph unicode="&#xf012;" horiz-adv-x="1792" d="M256 96v-192q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM640 224v-320q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v320q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1024 480v-576q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23 v576q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1408 864v-960q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v960q0 14 9 23t23 9h192q14 0 23 -9t9 -23zM1792 1376v-1472q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t [...]
+<glyph unicode="&#xf013;" d="M1024 640q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1536 749v-222q0 -12 -8 -23t-20 -13l-185 -28q-19 -54 -39 -91q35 -50 107 -138q10 -12 10 -25t-9 -23q-27 -37 -99 -108t-94 -71q-12 0 -26 9l-138 108q-44 -23 -91 -38 q-16 -136 -29 -186q-7 -28 -36 -28h-222q-14 0 -24.5 8.5t-11.5 21.5l-28 184q-49 16 -90 37l-141 -107q-10 -9 -25 -9q-14 0 -25 11q-126 114 -165 168q-7 10 -7 23q0 12 8 23q15 21 51 66.5t54 70.5q-27 50 -41 99l-183 27q-13 2 -21 12.5 [...]
+<glyph unicode="&#xf014;" horiz-adv-x="1408" d="M512 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM768 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1024 800v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576 q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1152 76v948h-896v-948q0 -22 7 -40.5t14.5 -27t10.5 -8.5h832q3 0 10.5 8.5t14.5 27t7 40.5zM480 1152h448l-48 117q-7 9 -17 11h-317q-10 -2 -17 -11zM1408 1120v-64q [...]
+<glyph unicode="&#xf015;" horiz-adv-x="1664" d="M1408 544v-480q0 -26 -19 -45t-45 -19h-384v384h-256v-384h-384q-26 0 -45 19t-19 45v480q0 1 0.5 3t0.5 3l575 474l575 -474q1 -2 1 -6zM1631 613l-62 -74q-8 -9 -21 -11h-3q-13 0 -21 7l-692 577l-692 -577q-12 -8 -24 -7q-13 2 -21 11l-62 74q-8 10 -7 23.5t11 21.5 l719 599q32 26 76 26t76 -26l244 -204v195q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-408l219 -182q10 -8 11 -21.5t-7 -23.5z" />
+<glyph unicode="&#xf016;" horiz-adv-x="1280" d="M128 0h1024v768h-416q-40 0 -68 28t-28 68v416h-512v-1280zM768 896h376q-10 29 -22 41l-313 313q-12 12 -41 22v-376zM1280 864v-896q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h640q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88z " />
+<glyph unicode="&#xf017;" d="M896 992v-448q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h224v352q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf018;" horiz-adv-x="1920" d="M1111 540v4l-24 320q-1 13 -11 22.5t-23 9.5h-186q-13 0 -23 -9.5t-11 -22.5l-24 -320v-4q-1 -12 8 -20t21 -8h244q12 0 21 8t8 20zM1870 73q0 -73 -46 -73h-704q13 0 22 9.5t8 22.5l-20 256q-1 13 -11 22.5t-23 9.5h-272q-13 0 -23 -9.5t-11 -22.5l-20 -256 q-1 -13 8 -22.5t22 -9.5h-704q-46 0 -46 73q0 54 26 116l417 1044q8 19 26 33t38 14h339q-13 0 -23 -9.5t-11 -22.5l-15 -192q-1 -14 8 -23t22 -9h166q13 0 22 9t8 23l-15 192q-1 13 -11 22.5t-23 9.5h339q20 0 38 -14t2 [...]
+<glyph unicode="&#xf019;" horiz-adv-x="1664" d="M1280 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1536 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 416v-320q0 -40 -28 -68t-68 -28h-1472q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h465l135 -136 q58 -56 136 -56t136 56l136 136h464q40 0 68 -28t28 -68zM1339 985q17 -41 -14 -70l-448 -448q-18 -19 -45 -19t-45 19l-448 448q-31 29 -14 70q17 39 59 39h256v448q0 26 19 45t45 19h256q26 0 45 -19t19 -45v-448h256q4 [...]
+<glyph unicode="&#xf01a;" d="M1120 608q0 -12 -10 -24l-319 -319q-11 -9 -23 -9t-23 9l-320 320q-15 16 -7 35q8 20 30 20h192v352q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-352h192q14 0 23 -9t9 -23zM768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273 t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf01b;" d="M1118 660q-8 -20 -30 -20h-192v-352q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v352h-192q-14 0 -23 9t-9 23q0 12 10 24l319 319q11 9 23 9t23 -9l320 -320q15 -16 7 -35zM768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198 t73 273t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf01c;" d="M1023 576h316q-1 3 -2.5 8t-2.5 8l-212 496h-708l-212 -496q-1 -2 -2.5 -8t-2.5 -8h316l95 -192h320zM1536 546v-482q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v482q0 62 25 123l238 552q10 25 36.5 42t52.5 17h832q26 0 52.5 -17t36.5 -42l238 -552 q25 -61 25 -123z" />
+<glyph unicode="&#xf01d;" d="M1184 640q0 -37 -32 -55l-544 -320q-15 -9 -32 -9q-16 0 -32 8q-32 19 -32 56v640q0 37 32 56q33 18 64 -1l544 -320q32 -18 32 -55zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf01e;" d="M1536 1280v-448q0 -26 -19 -45t-45 -19h-448q-42 0 -59 40q-17 39 14 69l138 138q-148 137 -349 137q-104 0 -198.5 -40.5t-163.5 -109.5t-109.5 -163.5t-40.5 -198.5t40.5 -198.5t109.5 -163.5t163.5 -109.5t198.5 -40.5q119 0 225 52t179 147q7 10 23 12q14 0 25 -9 l137 -138q9 -8 9.5 -20.5t-7.5 -22.5q-109 -132 -264 -204.5t-327 -72.5q-156 0 -298 61t-245 164t-164 245t-61 298t61 298t164 245t245 164t298 61q147 0 284.5 -55.5t244.5 -156.5l130 129q29 31 70 14q39 -17 39 -59z" />
+<glyph unicode="&#xf021;" d="M1511 480q0 -5 -1 -7q-64 -268 -268 -434.5t-478 -166.5q-146 0 -282.5 55t-243.5 157l-129 -129q-19 -19 -45 -19t-45 19t-19 45v448q0 26 19 45t45 19h448q26 0 45 -19t19 -45t-19 -45l-137 -137q71 -66 161 -102t187 -36q134 0 250 65t186 179q11 17 53 117 q8 23 30 23h192q13 0 22.5 -9.5t9.5 -22.5zM1536 1280v-448q0 -26 -19 -45t-45 -19h-448q-26 0 -45 19t-19 45t19 45l138 138q-148 137 -349 137q-134 0 -250 -65t-186 -179q-11 -17 -53 -117q-8 -23 -30 -23h-199q-13 0 -22.5 9.5t-9.5 2 [...]
+<glyph unicode="&#xf022;" horiz-adv-x="1792" d="M384 352v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 608v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M384 864v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM1536 352v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-13 0 -22.5 9.5t-9.5  [...]
+<glyph unicode="&#xf023;" horiz-adv-x="1152" d="M320 768h512v192q0 106 -75 181t-181 75t-181 -75t-75 -181v-192zM1152 672v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h32v192q0 184 132 316t316 132t316 -132t132 -316v-192h32q40 0 68 -28t28 -68z" />
+<glyph unicode="&#xf024;" horiz-adv-x="1792" d="M320 1280q0 -72 -64 -110v-1266q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v1266q-64 38 -64 110q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1792 1216v-763q0 -25 -12.5 -38.5t-39.5 -27.5q-215 -116 -369 -116q-61 0 -123.5 22t-108.5 48 t-115.5 48t-142.5 22q-192 0 -464 -146q-17 -9 -33 -9q-26 0 -45 19t-19 45v742q0 32 31 55q21 14 79 43q236 120 421 120q107 0 200 -29t219 -88q38 -19 88 -19q54 0 117.5 21t110 47t88 47t54.5 21q26 0 45 -1 [...]
+<glyph unicode="&#xf025;" horiz-adv-x="1664" d="M1664 650q0 -166 -60 -314l-20 -49l-185 -33q-22 -83 -90.5 -136.5t-156.5 -53.5v-32q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v576q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-32q71 0 130 -35.5t93 -95.5l68 12q29 95 29 193q0 148 -88 279t-236.5 209t-315.5 78 t-315.5 -78t-236.5 -209t-88 -279q0 -98 29 -193l68 -12q34 60 93 95.5t130 35.5v32q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-576q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v32q-88 0 -156.5 53.5t-90.5 136.5l-185  [...]
+<glyph unicode="&#xf026;" horiz-adv-x="768" d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45z" />
+<glyph unicode="&#xf027;" horiz-adv-x="1152" d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45zM1152 640q0 -76 -42.5 -141.5t-112.5 -93.5q-10 -5 -25 -5q-26 0 -45 18.5t-19 45.5q0 21 12 35.5t29 25t34 23t29 35.5 t12 57t-12 57t-29 35.5t-34 23t-29 25t-12 35.5q0 27 19 45.5t45 18.5q15 0 25 -5q70 -27 112.5 -93t42.5 -142z" />
+<glyph unicode="&#xf028;" horiz-adv-x="1664" d="M768 1184v-1088q0 -26 -19 -45t-45 -19t-45 19l-333 333h-262q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h262l333 333q19 19 45 19t45 -19t19 -45zM1152 640q0 -76 -42.5 -141.5t-112.5 -93.5q-10 -5 -25 -5q-26 0 -45 18.5t-19 45.5q0 21 12 35.5t29 25t34 23t29 35.5 t12 57t-12 57t-29 35.5t-34 23t-29 25t-12 35.5q0 27 19 45.5t45 18.5q15 0 25 -5q70 -27 112.5 -93t42.5 -142zM1408 640q0 -153 -85 -282.5t-225 -188.5q-13 -5 -25 -5q-27 0 -46 19t-19 45q0 39 39 59q56  [...]
+<glyph unicode="&#xf029;" horiz-adv-x="1408" d="M384 384v-128h-128v128h128zM384 1152v-128h-128v128h128zM1152 1152v-128h-128v128h128zM128 129h384v383h-384v-383zM128 896h384v384h-384v-384zM896 896h384v384h-384v-384zM640 640v-640h-640v640h640zM1152 128v-128h-128v128h128zM1408 128v-128h-128v128h128z M1408 640v-384h-384v128h-128v-384h-128v640h384v-128h128v128h128zM640 1408v-640h-640v640h640zM1408 1408v-640h-640v640h640z" />
+<glyph unicode="&#xf02a;" horiz-adv-x="1792" d="M63 0h-63v1408h63v-1408zM126 1h-32v1407h32v-1407zM220 1h-31v1407h31v-1407zM377 1h-31v1407h31v-1407zM534 1h-62v1407h62v-1407zM660 1h-31v1407h31v-1407zM723 1h-31v1407h31v-1407zM786 1h-31v1407h31v-1407zM943 1h-63v1407h63v-1407zM1100 1h-63v1407h63v-1407z M1226 1h-63v1407h63v-1407zM1352 1h-63v1407h63v-1407zM1446 1h-63v1407h63v-1407zM1635 1h-94v1407h94v-1407zM1698 1h-32v1407h32v-1407zM1792 0h-63v1408h63v-1408z" />
+<glyph unicode="&#xf02b;" d="M448 1088q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1515 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-53 0 -90 37l-715 716q-38 37 -64.5 101t-26.5 117v416q0 52 38 90t90 38h416q53 0 117 -26.5t102 -64.5 l715 -714q37 -39 37 -91z" />
+<glyph unicode="&#xf02c;" horiz-adv-x="1920" d="M448 1088q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1515 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-53 0 -90 37l-715 716q-38 37 -64.5 101t-26.5 117v416q0 52 38 90t90 38h416q53 0 117 -26.5t102 -64.5 l715 -714q37 -39 37 -91zM1899 512q0 -53 -37 -90l-491 -492q-39 -37 -91 -37q-36 0 -59 14t-53 45l470 470q37 37 37 90q0 52 -37 91l-715 714q-38 38 -102 64.5t-117 26.5h224q53 0 117 -26.5t102 -64.5l7 [...]
+<glyph unicode="&#xf02d;" horiz-adv-x="1664" d="M1639 1058q40 -57 18 -129l-275 -906q-19 -64 -76.5 -107.5t-122.5 -43.5h-923q-77 0 -148.5 53.5t-99.5 131.5q-24 67 -2 127q0 4 3 27t4 37q1 8 -3 21.5t-3 19.5q2 11 8 21t16.5 23.5t16.5 23.5q23 38 45 91.5t30 91.5q3 10 0.5 30t-0.5 28q3 11 17 28t17 23 q21 36 42 92t25 90q1 9 -2.5 32t0.5 28q4 13 22 30.5t22 22.5q19 26 42.5 84.5t27.5 96.5q1 8 -3 25.5t-2 26.5q2 8 9 18t18 23t17 21q8 12 16.5 30.5t15 35t16 36t19.5 32t26.5 23.5t36 11.5t47.5 -5.5l-1 -3q38 9 51 [...]
+<glyph unicode="&#xf02e;" horiz-adv-x="1280" d="M1164 1408q23 0 44 -9q33 -13 52.5 -41t19.5 -62v-1289q0 -34 -19.5 -62t-52.5 -41q-19 -8 -44 -8q-48 0 -83 32l-441 424l-441 -424q-36 -33 -83 -33q-23 0 -44 9q-33 13 -52.5 41t-19.5 62v1289q0 34 19.5 62t52.5 41q21 9 44 9h1048z" />
+<glyph unicode="&#xf02f;" horiz-adv-x="1664" d="M384 0h896v256h-896v-256zM384 640h896v384h-160q-40 0 -68 28t-28 68v160h-640v-640zM1536 576q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 576v-416q0 -13 -9.5 -22.5t-22.5 -9.5h-224v-160q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68 v160h-224q-13 0 -22.5 9.5t-9.5 22.5v416q0 79 56.5 135.5t135.5 56.5h64v544q0 40 28 68t68 28h672q40 0 88 -20t76 -48l152 -152q28 -28 48 -76t20 -88v-256h64q79 0 135.5 -56.5t56.5 -135.5z" />
+<glyph unicode="&#xf030;" horiz-adv-x="1920" d="M960 864q119 0 203.5 -84.5t84.5 -203.5t-84.5 -203.5t-203.5 -84.5t-203.5 84.5t-84.5 203.5t84.5 203.5t203.5 84.5zM1664 1280q106 0 181 -75t75 -181v-896q0 -106 -75 -181t-181 -75h-1408q-106 0 -181 75t-75 181v896q0 106 75 181t181 75h224l51 136 q19 49 69.5 84.5t103.5 35.5h512q53 0 103.5 -35.5t69.5 -84.5l51 -136h224zM960 128q185 0 316.5 131.5t131.5 316.5t-131.5 316.5t-316.5 131.5t-316.5 -131.5t-131.5 -316.5t131.5 -316.5t316.5 -131.5z" />
+<glyph unicode="&#xf031;" horiz-adv-x="1664" d="M725 977l-170 -450q73 -1 153.5 -2t119 -1.5t52.5 -0.5l29 2q-32 95 -92 241q-53 132 -92 211zM21 -128h-21l2 79q22 7 80 18q89 16 110 31q20 16 48 68l237 616l280 724h75h53l11 -21l205 -480q103 -242 124 -297q39 -102 96 -235q26 -58 65 -164q24 -67 65 -149 q22 -49 35 -57q22 -19 69 -23q47 -6 103 -27q6 -39 6 -57q0 -14 -1 -26q-80 0 -192 8q-93 8 -189 8q-79 0 -135 -2l-200 -11l-58 -2q0 45 4 78l131 28q56 13 68 23q12 12 12 27t-6 32l-47 114l-92 228l-450 2q-29 - [...]
+<glyph unicode="&#xf032;" horiz-adv-x="1408" d="M555 15q76 -32 140 -32q131 0 216 41t122 113q38 70 38 181q0 114 -41 180q-58 94 -141 126q-80 32 -247 32q-74 0 -101 -10v-144l-1 -173l3 -270q0 -15 12 -44zM541 761q43 -7 109 -7q175 0 264 65t89 224q0 112 -85 187q-84 75 -255 75q-52 0 -130 -13q0 -44 2 -77 q7 -122 6 -279l-1 -98q0 -43 1 -77zM0 -128l2 94q45 9 68 12q77 12 123 31q17 27 21 51q9 66 9 194l-2 497q-5 256 -9 404q-1 87 -11 109q-1 4 -12 12q-18 12 -69 15q-30 2 -114 13l-4 83l260 6l380 13l45 1q5 0 [...]
+<glyph unicode="&#xf033;" horiz-adv-x="1024" d="M0 -126l17 85q4 1 77 20q76 19 116 39q29 37 41 101l27 139l56 268l12 64q8 44 17 84.5t16 67t12.5 46.5t9 30.5t3.5 11.5l29 157l16 63l22 135l8 50v38q-41 22 -144 28q-28 2 -38 4l19 103l317 -14q39 -2 73 -2q66 0 214 9q33 2 68 4.5t36 2.5q-2 -19 -6 -38 q-7 -29 -13 -51q-55 -19 -109 -31q-64 -16 -101 -31q-12 -31 -24 -88q-9 -44 -13 -82q-44 -199 -66 -306l-61 -311l-38 -158l-43 -235l-12 -45q-2 -7 1 -27q64 -15 119 -21q36 -5 66 -10q-1 -29 -7 -58q-7 -31 -9 -41q- [...]
+<glyph unicode="&#xf034;" horiz-adv-x="1792" d="M81 1407l54 -27q20 -5 211 -5h130l19 3l115 1l215 -1h293l34 -2q14 -1 28 7t21 16l7 8l42 1q15 0 28 -1v-104.5t1 -131.5l1 -100l-1 -58q0 -32 -4 -51q-39 -15 -68 -18q-25 43 -54 128q-8 24 -15.5 62.5t-11.5 65.5t-6 29q-13 15 -27 19q-7 2 -42.5 2t-103.5 -1t-111 -1 q-34 0 -67 -5q-10 -97 -8 -136l1 -152v-332l3 -359l-1 -147q-1 -46 11 -85q49 -25 89 -32q2 0 18 -5t44 -13t43 -12q30 -8 50 -18q5 -45 5 -50q0 -10 -3 -29q-14 -1 -34 -1q-110 0 -187 10q-72 8 -238 8q-88  [...]
+<glyph unicode="&#xf035;" d="M81 1407l54 -27q20 -5 211 -5h130l19 3l115 1l446 -1h318l34 -2q14 -1 28 7t21 16l7 8l42 1q15 0 28 -1v-104.5t1 -131.5l1 -100l-1 -58q0 -32 -4 -51q-39 -15 -68 -18q-25 43 -54 128q-8 24 -15.5 62.5t-11.5 65.5t-6 29q-13 15 -27 19q-7 2 -58.5 2t-138.5 -1t-128 -1 q-94 0 -127 -5q-10 -97 -8 -136l1 -152v52l3 -359l-1 -147q-1 -46 11 -85q49 -25 89 -32q2 0 18 -5t44 -13t43 -12q30 -8 50 -18q5 -45 5 -50q0 -10 -3 -29q-14 -1 -34 -1q-110 0 -187 10q-72 8 -238 8q-82 0 -233 -13q-45 -5 -7 [...]
+<glyph unicode="&#xf036;" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1408 576v-128q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1280q26 0 45 -19t19 -45zM1664 960v-128q0 -26 -19 -45 t-45 -19h-1536q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1536q26 0 45 -19t19 -45zM1280 1344v-128q0 -26 -19 -45t-45 -19h-1152q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" />
+<glyph unicode="&#xf037;" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1408 576v-128q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h896q26 0 45 -19t19 -45zM1664 960v-128q0 -26 -19 -45t-45 -19 h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1280 1344v-128q0 -26 -19 -45t-45 -19h-640q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h640q26 0 45 -19t19 -45z" />
+<glyph unicode="&#xf038;" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 576v-128q0 -26 -19 -45t-45 -19h-1280q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1280q26 0 45 -19t19 -45zM1792 960v-128q0 -26 -19 -45 t-45 -19h-1536q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1536q26 0 45 -19t19 -45zM1792 1344v-128q0 -26 -19 -45t-45 -19h-1152q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" />
+<glyph unicode="&#xf039;" horiz-adv-x="1792" d="M1792 192v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 576v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 960v-128q0 -26 -19 -45 t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 1344v-128q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1664q26 0 45 -19t19 -45z" />
+<glyph unicode="&#xf03a;" horiz-adv-x="1792" d="M256 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5zM256 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5 t9.5 -22.5zM256 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-192q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h192q13 0 22.5 -9.5t9.5 -22.5zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 - [...]
+<glyph unicode="&#xf03b;" horiz-adv-x="1792" d="M384 992v-576q0 -13 -9.5 -22.5t-22.5 -9.5q-14 0 -23 9l-288 288q-9 9 -9 23t9 23l288 288q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5 t9.5 -22.5zM1792 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088q13 0 22.5 -9.5t9.5 -22.5zM1792 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22 [...]
+<glyph unicode="&#xf03c;" horiz-adv-x="1792" d="M352 704q0 -14 -9 -23l-288 -288q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v576q0 13 9.5 22.5t22.5 9.5q14 0 23 -9l288 -288q9 -9 9 -23zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1728q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1728q13 0 22.5 -9.5 t9.5 -22.5zM1792 608v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1088q13 0 22.5 -9.5t9.5 -22.5zM1792 992v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1088q- [...]
+<glyph unicode="&#xf03d;" horiz-adv-x="1792" d="M1792 1184v-1088q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-403 403v-166q0 -119 -84.5 -203.5t-203.5 -84.5h-704q-119 0 -203.5 84.5t-84.5 203.5v704q0 119 84.5 203.5t203.5 84.5h704q119 0 203.5 -84.5t84.5 -203.5v-165l403 402q18 19 45 19q12 0 25 -5 q39 -17 39 -59z" />
+<glyph unicode="&#xf03e;" horiz-adv-x="1920" d="M640 960q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1664 576v-448h-1408v192l320 320l160 -160l512 512zM1760 1280h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-1216q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5v1216 q0 13 -9.5 22.5t-22.5 9.5zM1920 1248v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" />
+<glyph unicode="&#xf040;" d="M363 0l91 91l-235 235l-91 -91v-107h128v-128h107zM886 928q0 22 -22 22q-10 0 -17 -7l-542 -542q-7 -7 -7 -17q0 -22 22 -22q10 0 17 7l542 542q7 7 7 17zM832 1120l416 -416l-832 -832h-416v416zM1515 1024q0 -53 -37 -90l-166 -166l-416 416l166 165q36 38 90 38 q53 0 91 -38l235 -234q37 -39 37 -91z" />
+<glyph unicode="&#xf041;" horiz-adv-x="1024" d="M768 896q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1024 896q0 -109 -33 -179l-364 -774q-16 -33 -47.5 -52t-67.5 -19t-67.5 19t-46.5 52l-365 774q-33 70 -33 179q0 212 150 362t362 150t362 -150t150 -362z" />
+<glyph unicode="&#xf042;" d="M768 96v1088q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf043;" horiz-adv-x="1024" d="M512 384q0 36 -20 69q-1 1 -15.5 22.5t-25.5 38t-25 44t-21 50.5q-4 16 -21 16t-21 -16q-7 -23 -21 -50.5t-25 -44t-25.5 -38t-15.5 -22.5q-20 -33 -20 -69q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1024 512q0 -212 -150 -362t-362 -150t-362 150t-150 362 q0 145 81 275q6 9 62.5 90.5t101 151t99.5 178t83 201.5q9 30 34 47t51 17t51.5 -17t33.5 -47q28 -93 83 -201.5t99.5 -178t101 -151t62.5 -90.5q81 -127 81 -275z" />
+<glyph unicode="&#xf044;" horiz-adv-x="1792" d="M888 352l116 116l-152 152l-116 -116v-56h96v-96h56zM1328 1072q-16 16 -33 -1l-350 -350q-17 -17 -1 -33t33 1l350 350q17 17 1 33zM1408 478v-190q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832 q63 0 117 -25q15 -7 18 -23q3 -17 -9 -29l-49 -49q-14 -14 -32 -8q-23 6 -45 6h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v126q0 13 9 22l64 64q15 15 35 7t20 -29zM1312 12 [...]
+<glyph unicode="&#xf045;" horiz-adv-x="1664" d="M1408 547v-259q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h255v0q13 0 22.5 -9.5t9.5 -22.5q0 -27 -26 -32q-77 -26 -133 -60q-10 -4 -16 -4h-112q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832 q66 0 113 47t47 113v214q0 19 18 29q28 13 54 37q16 16 35 8q21 -9 21 -29zM1645 1043l-384 -384q-18 -19 -45 -19q-12 0 -25 5q-39 17 -39 59v192h-160q-323 0 -438 -131q-119 -137 -74 -473q3 -23 -20 -34 [...]
+<glyph unicode="&#xf046;" horiz-adv-x="1664" d="M1408 606v-318q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832q63 0 117 -25q15 -7 18 -23q3 -17 -9 -29l-49 -49q-10 -10 -23 -10q-3 0 -9 2q-23 6 -45 6h-832q-66 0 -113 -47t-47 -113v-832 q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v254q0 13 9 22l64 64q10 10 23 10q6 0 12 -3q20 -8 20 -29zM1639 1095l-814 -814q-24 -24 -57 -24t-57 24l-430 430q-24 24 -24 57t24 57l110 110q24 24 57 24t57 -24l263  [...]
+<glyph unicode="&#xf047;" horiz-adv-x="1792" d="M1792 640q0 -26 -19 -45l-256 -256q-19 -19 -45 -19t-45 19t-19 45v128h-384v-384h128q26 0 45 -19t19 -45t-19 -45l-256 -256q-19 -19 -45 -19t-45 19l-256 256q-19 19 -19 45t19 45t45 19h128v384h-384v-128q0 -26 -19 -45t-45 -19t-45 19l-256 256q-19 19 -19 45 t19 45l256 256q19 19 45 19t45 -19t19 -45v-128h384v384h-128q-26 0 -45 19t-19 45t19 45l256 256q19 19 45 19t45 -19l256 -256q19 -19 19 -45t-19 -45t-45 -19h-128v-384h384v128q0 26 19 45t45 19t45 -19l256  [...]
+<glyph unicode="&#xf048;" horiz-adv-x="1024" d="M979 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-678q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-678q4 11 13 19z" />
+<glyph unicode="&#xf049;" horiz-adv-x="1792" d="M1747 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-710q0 -26 -13 -32t-32 13l-710 710q-9 9 -13 19v-678q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-678q4 11 13 19l710 710 q19 19 32 13t13 -32v-710q4 11 13 19z" />
+<glyph unicode="&#xf04a;" horiz-adv-x="1664" d="M1619 1395q19 19 32 13t13 -32v-1472q0 -26 -13 -32t-32 13l-710 710q-8 9 -13 19v-710q0 -26 -13 -32t-32 13l-710 710q-19 19 -19 45t19 45l710 710q19 19 32 13t13 -32v-710q5 11 13 19z" />
+<glyph unicode="&#xf04b;" horiz-adv-x="1408" d="M1384 609l-1328 -738q-23 -13 -39.5 -3t-16.5 36v1472q0 26 16.5 36t39.5 -3l1328 -738q23 -13 23 -31t-23 -31z" />
+<glyph unicode="&#xf04c;" d="M1536 1344v-1408q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h512q26 0 45 -19t19 -45zM640 1344v-1408q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h512q26 0 45 -19t19 -45z" />
+<glyph unicode="&#xf04d;" d="M1536 1344v-1408q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" />
+<glyph unicode="&#xf04e;" horiz-adv-x="1664" d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v710q0 26 13 32t32 -13l710 -710q19 -19 19 -45t-19 -45l-710 -710q-19 -19 -32 -13t-13 32v710q-5 -10 -13 -19z" />
+<glyph unicode="&#xf050;" horiz-adv-x="1792" d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v710q0 26 13 32t32 -13l710 -710q8 -8 13 -19v678q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v678q-5 -10 -13 -19l-710 -710 q-19 -19 -32 -13t-13 32v710q-5 -10 -13 -19z" />
+<glyph unicode="&#xf051;" horiz-adv-x="1024" d="M45 -115q-19 -19 -32 -13t-13 32v1472q0 26 13 32t32 -13l710 -710q8 -8 13 -19v678q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-1408q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v678q-5 -10 -13 -19z" />
+<glyph unicode="&#xf052;" horiz-adv-x="1538" d="M14 557l710 710q19 19 45 19t45 -19l710 -710q19 -19 13 -32t-32 -13h-1472q-26 0 -32 13t13 32zM1473 0h-1408q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1408q26 0 45 -19t19 -45v-256q0 -26 -19 -45t-45 -19z" />
+<glyph unicode="&#xf053;" horiz-adv-x="1152" d="M742 -37l-652 651q-37 37 -37 90.5t37 90.5l652 651q37 37 90.5 37t90.5 -37l75 -75q37 -37 37 -90.5t-37 -90.5l-486 -486l486 -485q37 -38 37 -91t-37 -90l-75 -75q-37 -37 -90.5 -37t-90.5 37z" />
+<glyph unicode="&#xf054;" horiz-adv-x="1152" d="M1099 704q0 -52 -37 -91l-652 -651q-37 -37 -90 -37t-90 37l-76 75q-37 39 -37 91q0 53 37 90l486 486l-486 485q-37 39 -37 91q0 53 37 90l76 75q36 38 90 38t90 -38l652 -651q37 -37 37 -90z" />
+<glyph unicode="&#xf055;" d="M1216 576v128q0 26 -19 45t-45 19h-256v256q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-256h-256q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h256v-256q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v256h256q26 0 45 19t19 45zM1536 640q0 -209 -103 -385.5 t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf056;" d="M1216 576v128q0 26 -19 45t-45 19h-768q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h768q26 0 45 19t19 45zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5 t103 -385.5z" />
+<glyph unicode="&#xf057;" d="M1149 414q0 26 -19 45l-181 181l181 181q19 19 19 45q0 27 -19 46l-90 90q-19 19 -46 19q-26 0 -45 -19l-181 -181l-181 181q-19 19 -45 19q-27 0 -46 -19l-90 -90q-19 -19 -19 -46q0 -26 19 -45l181 -181l-181 -181q-19 -19 -19 -45q0 -27 19 -46l90 -90q19 -19 46 -19 q26 0 45 19l181 181l181 -181q19 -19 45 -19q27 0 46 19l90 90q19 19 19 46zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 - [...]
+<glyph unicode="&#xf058;" d="M1284 802q0 28 -18 46l-91 90q-19 19 -45 19t-45 -19l-408 -407l-226 226q-19 19 -45 19t-45 -19l-91 -90q-18 -18 -18 -46q0 -27 18 -45l362 -362q19 -19 45 -19q27 0 46 19l543 543q18 18 18 45zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf059;" d="M896 160v192q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h192q14 0 23 9t9 23zM1152 832q0 88 -55.5 163t-138.5 116t-170 41q-243 0 -371 -213q-15 -24 8 -42l132 -100q7 -6 19 -6q16 0 25 12q53 68 86 92q34 24 86 24q48 0 85.5 -26t37.5 -59 q0 -38 -20 -61t-68 -45q-63 -28 -115.5 -86.5t-52.5 -125.5v-36q0 -14 9 -23t23 -9h192q14 0 23 9t9 23q0 19 21.5 49.5t54.5 49.5q32 18 49 28.5t46 35t44.5 48t28 60.5t12.5 81zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-38 [...]
+<glyph unicode="&#xf05a;" d="M1024 160v160q0 14 -9 23t-23 9h-96v512q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23t23 -9h96v-320h-96q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23t23 -9h448q14 0 23 9t9 23zM896 1056v160q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-160q0 -14 9 -23 t23 -9h192q14 0 23 9t9 23zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf05b;" d="M1197 512h-109q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h109q-32 108 -112.5 188.5t-188.5 112.5v-109q0 -26 -19 -45t-45 -19h-128q-26 0 -45 19t-19 45v109q-108 -32 -188.5 -112.5t-112.5 -188.5h109q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-109 q32 -108 112.5 -188.5t188.5 -112.5v109q0 26 19 45t45 19h128q26 0 45 -19t19 -45v-109q108 32 188.5 112.5t112.5 188.5zM1536 704v-128q0 -26 -19 -45t-45 -19h-143q-37 -161 -154.5 -278.5t-278.5 -154.5v-143q0 -26 -19 -45t-45 -19h [...]
+<glyph unicode="&#xf05c;" d="M1097 457l-146 -146q-10 -10 -23 -10t-23 10l-137 137l-137 -137q-10 -10 -23 -10t-23 10l-146 146q-10 10 -10 23t10 23l137 137l-137 137q-10 10 -10 23t10 23l146 146q10 10 23 10t23 -10l137 -137l137 137q10 10 23 10t23 -10l146 -146q10 -10 10 -23t-10 -23 l-137 -137l137 -137q10 -10 10 -23t-10 -23zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385. [...]
+<glyph unicode="&#xf05d;" d="M1171 723l-422 -422q-19 -19 -45 -19t-45 19l-294 294q-19 19 -19 45t19 45l102 102q19 19 45 19t45 -19l147 -147l275 275q19 19 45 19t45 -19l102 -102q19 -19 19 -45t-19 -45zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198 t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf05e;" d="M1312 643q0 161 -87 295l-754 -753q137 -89 297 -89q111 0 211.5 43.5t173.5 116.5t116 174.5t43 212.5zM313 344l755 754q-135 91 -300 91q-148 0 -273 -73t-198 -199t-73 -274q0 -162 89 -299zM1536 643q0 -157 -61 -300t-163.5 -246t-245 -164t-298.5 -61t-298.5 61 t-245 164t-163.5 246t-61 300t61 299.5t163.5 245.5t245 164t298.5 61t298.5 -61t245 -164t163.5 -245.5t61 -299.5z" />
+<glyph unicode="&#xf060;" d="M1536 640v-128q0 -53 -32.5 -90.5t-84.5 -37.5h-704l293 -294q38 -36 38 -90t-38 -90l-75 -76q-37 -37 -90 -37q-52 0 -91 37l-651 652q-37 37 -37 90q0 52 37 91l651 650q38 38 91 38q52 0 90 -38l75 -74q38 -38 38 -91t-38 -91l-293 -293h704q52 0 84.5 -37.5 t32.5 -90.5z" />
+<glyph unicode="&#xf061;" d="M1472 576q0 -54 -37 -91l-651 -651q-39 -37 -91 -37q-51 0 -90 37l-75 75q-38 38 -38 91t38 91l293 293h-704q-52 0 -84.5 37.5t-32.5 90.5v128q0 53 32.5 90.5t84.5 37.5h704l-293 294q-38 36 -38 90t38 90l75 75q38 38 90 38q53 0 91 -38l651 -651q37 -35 37 -90z" />
+<glyph unicode="&#xf062;" horiz-adv-x="1664" d="M1611 565q0 -51 -37 -90l-75 -75q-38 -38 -91 -38q-54 0 -90 38l-294 293v-704q0 -52 -37.5 -84.5t-90.5 -32.5h-128q-53 0 -90.5 32.5t-37.5 84.5v704l-294 -293q-36 -38 -90 -38t-90 38l-75 75q-38 38 -38 90q0 53 38 91l651 651q35 37 90 37q54 0 91 -37l651 -651 q37 -39 37 -91z" />
+<glyph unicode="&#xf063;" horiz-adv-x="1664" d="M1611 704q0 -53 -37 -90l-651 -652q-39 -37 -91 -37q-53 0 -90 37l-651 652q-38 36 -38 90q0 53 38 91l74 75q39 37 91 37q53 0 90 -37l294 -294v704q0 52 38 90t90 38h128q52 0 90 -38t38 -90v-704l294 294q37 37 90 37q52 0 91 -37l75 -75q37 -39 37 -91z" />
+<glyph unicode="&#xf064;" horiz-adv-x="1792" d="M1792 896q0 -26 -19 -45l-512 -512q-19 -19 -45 -19t-45 19t-19 45v256h-224q-98 0 -175.5 -6t-154 -21.5t-133 -42.5t-105.5 -69.5t-80 -101t-48.5 -138.5t-17.5 -181q0 -55 5 -123q0 -6 2.5 -23.5t2.5 -26.5q0 -15 -8.5 -25t-23.5 -10q-16 0 -28 17q-7 9 -13 22 t-13.5 30t-10.5 24q-127 285 -127 451q0 199 53 333q162 403 875 403h224v256q0 26 19 45t45 19t45 -19l512 -512q19 -19 19 -45z" />
+<glyph unicode="&#xf065;" d="M755 480q0 -13 -10 -23l-332 -332l144 -144q19 -19 19 -45t-19 -45t-45 -19h-448q-26 0 -45 19t-19 45v448q0 26 19 45t45 19t45 -19l144 -144l332 332q10 10 23 10t23 -10l114 -114q10 -10 10 -23zM1536 1344v-448q0 -26 -19 -45t-45 -19t-45 19l-144 144l-332 -332 q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l332 332l-144 144q-19 19 -19 45t19 45t45 19h448q26 0 45 -19t19 -45z" />
+<glyph unicode="&#xf066;" d="M768 576v-448q0 -26 -19 -45t-45 -19t-45 19l-144 144l-332 -332q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l332 332l-144 144q-19 19 -19 45t19 45t45 19h448q26 0 45 -19t19 -45zM1523 1248q0 -13 -10 -23l-332 -332l144 -144q19 -19 19 -45t-19 -45 t-45 -19h-448q-26 0 -45 19t-19 45v448q0 26 19 45t45 19t45 -19l144 -144l332 332q10 10 23 10t23 -10l114 -114q10 -10 10 -23z" />
+<glyph unicode="&#xf067;" horiz-adv-x="1408" d="M1408 800v-192q0 -40 -28 -68t-68 -28h-416v-416q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v416h-416q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h416v416q0 40 28 68t68 28h192q40 0 68 -28t28 -68v-416h416q40 0 68 -28t28 -68z" />
+<glyph unicode="&#xf068;" horiz-adv-x="1408" d="M1408 800v-192q0 -40 -28 -68t-68 -28h-1216q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h1216q40 0 68 -28t28 -68z" />
+<glyph unicode="&#xf069;" horiz-adv-x="1664" d="M1482 486q46 -26 59.5 -77.5t-12.5 -97.5l-64 -110q-26 -46 -77.5 -59.5t-97.5 12.5l-266 153v-307q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v307l-266 -153q-46 -26 -97.5 -12.5t-77.5 59.5l-64 110q-26 46 -12.5 97.5t59.5 77.5l266 154l-266 154 q-46 26 -59.5 77.5t12.5 97.5l64 110q26 46 77.5 59.5t97.5 -12.5l266 -153v307q0 52 38 90t90 38h128q52 0 90 -38t38 -90v-307l266 153q46 26 97.5 12.5t77.5 -59.5l64 -110q26 -46 12.5 -97.5t-59.5 -77.5l-266 -154z" />
+<glyph unicode="&#xf06a;" d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM896 161v190q0 14 -9 23.5t-22 9.5h-192q-13 0 -23 -10t-10 -23v-190q0 -13 10 -23t23 -10h192 q13 0 22 9.5t9 23.5zM894 505l18 621q0 12 -10 18q-10 8 -24 8h-220q-14 0 -24 -8q-10 -6 -10 -18l17 -621q0 -10 10 -17.5t24 -7.5h185q14 0 23.5 7.5t10.5 17.5z" />
+<glyph unicode="&#xf06b;" d="M928 180v56v468v192h-320v-192v-468v-56q0 -25 18 -38.5t46 -13.5h192q28 0 46 13.5t18 38.5zM472 1024h195l-126 161q-26 31 -69 31q-40 0 -68 -28t-28 -68t28 -68t68 -28zM1160 1120q0 40 -28 68t-68 28q-43 0 -69 -31l-125 -161h194q40 0 68 28t28 68zM1536 864v-320 q0 -14 -9 -23t-23 -9h-96v-416q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v416h-96q-14 0 -23 9t-9 23v320q0 14 9 23t23 9h440q-93 0 -158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5q107 0 168 -77l128 -165l128 165q61 [...]
+<glyph unicode="&#xf06c;" horiz-adv-x="1792" d="M1280 832q0 26 -19 45t-45 19q-172 0 -318 -49.5t-259.5 -134t-235.5 -219.5q-19 -21 -19 -45q0 -26 19 -45t45 -19q24 0 45 19q27 24 74 71t67 66q137 124 268.5 176t313.5 52q26 0 45 19t19 45zM1792 1030q0 -95 -20 -193q-46 -224 -184.5 -383t-357.5 -268 q-214 -108 -438 -108q-148 0 -286 47q-15 5 -88 42t-96 37q-16 0 -39.5 -32t-45 -70t-52.5 -70t-60 -32q-30 0 -51 11t-31 24t-27 42q-2 4 -6 11t-5.5 10t-3 9.5t-1.5 13.5q0 35 31 73.5t68 65.5t68 56t31 48q0 4 -14 3 [...]
+<glyph unicode="&#xf06d;" horiz-adv-x="1408" d="M1408 -160v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-1344q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h1344q13 0 22.5 -9.5t9.5 -22.5zM1152 896q0 -78 -24.5 -144t-64 -112.5t-87.5 -88t-96 -77.5t-87.5 -72t-64 -81.5t-24.5 -96.5q0 -96 67 -224l-4 1l1 -1 q-90 41 -160 83t-138.5 100t-113.5 122.5t-72.5 150.5t-27.5 184q0 78 24.5 144t64 112.5t87.5 88t96 77.5t87.5 72t64 81.5t24.5 96.5q0 94 -66 224l3 -1l-1 1q90 -41 160 -83t138.5 -100t113.5 -122.5t72.5 -150. [...]
+<glyph unicode="&#xf06e;" horiz-adv-x="1792" d="M1664 576q-152 236 -381 353q61 -104 61 -225q0 -185 -131.5 -316.5t-316.5 -131.5t-316.5 131.5t-131.5 316.5q0 121 61 225q-229 -117 -381 -353q133 -205 333.5 -326.5t434.5 -121.5t434.5 121.5t333.5 326.5zM944 960q0 20 -14 34t-34 14q-125 0 -214.5 -89.5 t-89.5 -214.5q0 -20 14 -34t34 -14t34 14t14 34q0 86 61 147t147 61q20 0 34 14t14 34zM1792 576q0 -34 -20 -69q-140 -230 -376.5 -368.5t-499.5 -138.5t-499.5 139t-376.5 368q-20 35 -20 69t20 69q140 229 376.5 [...]
+<glyph unicode="&#xf070;" horiz-adv-x="1792" d="M555 201l78 141q-87 63 -136 159t-49 203q0 121 61 225q-229 -117 -381 -353q167 -258 427 -375zM944 960q0 20 -14 34t-34 14q-125 0 -214.5 -89.5t-89.5 -214.5q0 -20 14 -34t34 -14t34 14t14 34q0 86 61 147t147 61q20 0 34 14t14 34zM1307 1151q0 -7 -1 -9 q-105 -188 -315 -566t-316 -567l-49 -89q-10 -16 -28 -16q-12 0 -134 70q-16 10 -16 28q0 12 44 87q-143 65 -263.5 173t-208.5 245q-20 31 -20 69t20 69q153 235 380 371t496 136q89 0 180 -17l54 97q10 16 28 16q5 0 [...]
+<glyph unicode="&#xf071;" horiz-adv-x="1792" d="M1024 161v190q0 14 -9.5 23.5t-22.5 9.5h-192q-13 0 -22.5 -9.5t-9.5 -23.5v-190q0 -14 9.5 -23.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 23.5zM1022 535l18 459q0 12 -10 19q-13 11 -24 11h-220q-11 0 -24 -11q-10 -7 -10 -21l17 -457q0 -10 10 -16.5t24 -6.5h185 q14 0 23.5 6.5t10.5 16.5zM1008 1469l768 -1408q35 -63 -2 -126q-17 -29 -46.5 -46t-63.5 -17h-1536q-34 0 -63.5 17t-46.5 46q-37 63 -2 126l768 1408q17 31 47 49t65 18t65 -18t47 -49z" />
+<glyph unicode="&#xf072;" horiz-adv-x="1408" d="M1376 1376q44 -52 12 -148t-108 -172l-161 -161l160 -696q5 -19 -12 -33l-128 -96q-7 -6 -19 -6q-4 0 -7 1q-15 3 -21 16l-279 508l-259 -259l53 -194q5 -17 -8 -31l-96 -96q-9 -9 -23 -9h-2q-15 2 -24 13l-189 252l-252 189q-11 7 -13 23q-1 13 9 25l96 97q9 9 23 9 q6 0 8 -1l194 -53l259 259l-508 279q-14 8 -17 24q-2 16 9 27l128 128q14 13 30 8l665 -159l160 160q76 76 172 108t148 -12z" />
+<glyph unicode="&#xf073;" horiz-adv-x="1664" d="M128 -128h288v288h-288v-288zM480 -128h320v288h-320v-288zM128 224h288v320h-288v-320zM480 224h320v320h-320v-320zM128 608h288v288h-288v-288zM864 -128h320v288h-320v-288zM480 608h320v288h-320v-288zM1248 -128h288v288h-288v-288zM864 224h320v320h-320v-320z M512 1088v288q0 13 -9.5 22.5t-22.5 9.5h-64q-13 0 -22.5 -9.5t-9.5 -22.5v-288q0 -13 9.5 -22.5t22.5 -9.5h64q13 0 22.5 9.5t9.5 22.5zM1248 224h288v320h-288v-320zM864 608h320v288h-320v-288zM1248 608h28 [...]
+<glyph unicode="&#xf074;" horiz-adv-x="1792" d="M666 1055q-60 -92 -137 -273q-22 45 -37 72.5t-40.5 63.5t-51 56.5t-63 35t-81.5 14.5h-224q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h224q250 0 410 -225zM1792 256q0 -14 -9 -23l-320 -320q-9 -9 -23 -9q-13 0 -22.5 9.5t-9.5 22.5v192q-32 0 -85 -0.5t-81 -1t-73 1 t-71 5t-64 10.5t-63 18.5t-58 28.5t-59 40t-55 53.5t-56 69.5q59 93 136 273q22 -45 37 -72.5t40.5 -63.5t51 -56.5t63 -35t81.5 -14.5h256v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23zM1792 1152q0 -1 [...]
+<glyph unicode="&#xf075;" horiz-adv-x="1792" d="M1792 640q0 -174 -120 -321.5t-326 -233t-450 -85.5q-70 0 -145 8q-198 -175 -460 -242q-49 -14 -114 -22q-17 -2 -30.5 9t-17.5 29v1q-3 4 -0.5 12t2 10t4.5 9.5l6 9t7 8.5t8 9q7 8 31 34.5t34.5 38t31 39.5t32.5 51t27 59t26 76q-157 89 -247.5 220t-90.5 281 q0 130 71 248.5t191 204.5t286 136.5t348 50.5q244 0 450 -85.5t326 -233t120 -321.5z" />
+<glyph unicode="&#xf076;" d="M1536 704v-128q0 -201 -98.5 -362t-274 -251.5t-395.5 -90.5t-395.5 90.5t-274 251.5t-98.5 362v128q0 26 19 45t45 19h384q26 0 45 -19t19 -45v-128q0 -52 23.5 -90t53.5 -57t71 -30t64 -13t44 -2t44 2t64 13t71 30t53.5 57t23.5 90v128q0 26 19 45t45 19h384 q26 0 45 -19t19 -45zM512 1344v-384q0 -26 -19 -45t-45 -19h-384q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h384q26 0 45 -19t19 -45zM1536 1344v-384q0 -26 -19 -45t-45 -19h-384q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h384q26 0 45 [...]
+<glyph unicode="&#xf077;" horiz-adv-x="1664" d="M1611 320q0 -53 -37 -90l-75 -75q-38 -38 -91 -38q-54 0 -90 38l-486 485l-486 -485q-36 -38 -90 -38t-90 38l-75 75q-38 36 -38 90q0 53 38 91l651 651q37 37 90 37q52 0 91 -37l650 -651q38 -38 38 -91z" />
+<glyph unicode="&#xf078;" horiz-adv-x="1664" d="M1611 832q0 -53 -37 -90l-651 -651q-38 -38 -91 -38q-54 0 -90 38l-651 651q-38 36 -38 90q0 53 38 91l74 75q39 37 91 37q53 0 90 -37l486 -486l486 486q37 37 90 37q52 0 91 -37l75 -75q37 -39 37 -91z" />
+<glyph unicode="&#xf079;" horiz-adv-x="1920" d="M1280 32q0 -13 -9.5 -22.5t-22.5 -9.5h-960q-8 0 -13.5 2t-9 7t-5.5 8t-3 11.5t-1 11.5v13v11v160v416h-192q-26 0 -45 19t-19 45q0 24 15 41l320 384q19 22 49 22t49 -22l320 -384q15 -17 15 -41q0 -26 -19 -45t-45 -19h-192v-384h576q16 0 25 -11l160 -192q7 -11 7 -21 zM1920 448q0 -24 -15 -41l-320 -384q-20 -23 -49 -23t-49 23l-320 384q-15 17 -15 41q0 26 19 45t45 19h192v384h-576q-16 0 -25 12l-160 192q-7 9 -7 20q0 13 9.5 22.5t22.5 9.5h960q8 0 13.5 -2t9 -7t5.5  [...]
+<glyph unicode="&#xf07a;" horiz-adv-x="1664" d="M640 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1536 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1664 1088v-512q0 -24 -16 -42.5t-41 -21.5 l-1044 -122q1 -7 4.5 -21.5t6 -26.5t2.5 -22q0 -16 -24 -64h920q26 0 45 -19t19 -45t-19 -45t-45 -19h-1024q-26 0 -45 19t-19 45q0 14 11 39.5t29.5 59.5t20.5 38l-177 823h-204q-26 0 -45 19t-19 45t19 45t45 19h [...]
+<glyph unicode="&#xf07b;" horiz-adv-x="1664" d="M1664 928v-704q0 -92 -66 -158t-158 -66h-1216q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h672q92 0 158 -66t66 -158z" />
+<glyph unicode="&#xf07c;" horiz-adv-x="1920" d="M1879 584q0 -31 -31 -66l-336 -396q-43 -51 -120.5 -86.5t-143.5 -35.5h-1088q-34 0 -60.5 13t-26.5 43q0 31 31 66l336 396q43 51 120.5 86.5t143.5 35.5h1088q34 0 60.5 -13t26.5 -43zM1536 928v-160h-832q-94 0 -197 -47.5t-164 -119.5l-337 -396l-5 -6q0 4 -0.5 12.5 t-0.5 12.5v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h544q92 0 158 -66t66 -158z" />
+<glyph unicode="&#xf07d;" horiz-adv-x="768" d="M704 1216q0 -26 -19 -45t-45 -19h-128v-1024h128q26 0 45 -19t19 -45t-19 -45l-256 -256q-19 -19 -45 -19t-45 19l-256 256q-19 19 -19 45t19 45t45 19h128v1024h-128q-26 0 -45 19t-19 45t19 45l256 256q19 19 45 19t45 -19l256 -256q19 -19 19 -45z" />
+<glyph unicode="&#xf07e;" horiz-adv-x="1792" d="M1792 640q0 -26 -19 -45l-256 -256q-19 -19 -45 -19t-45 19t-19 45v128h-1024v-128q0 -26 -19 -45t-45 -19t-45 19l-256 256q-19 19 -19 45t19 45l256 256q19 19 45 19t45 -19t19 -45v-128h1024v128q0 26 19 45t45 19t45 -19l256 -256q19 -19 19 -45z" />
+<glyph unicode="&#xf080;" horiz-adv-x="1920" d="M512 512v-384h-256v384h256zM896 1024v-896h-256v896h256zM1280 768v-640h-256v640h256zM1664 1152v-1024h-256v1024h256zM1792 32v1216q0 13 -9.5 22.5t-22.5 9.5h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-1216q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5z M1920 1248v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" />
+<glyph unicode="&#xf081;" d="M1280 926q-56 -25 -121 -34q68 40 93 117q-65 -38 -134 -51q-61 66 -153 66q-87 0 -148.5 -61.5t-61.5 -148.5q0 -29 5 -48q-129 7 -242 65t-192 155q-29 -50 -29 -106q0 -114 91 -175q-47 1 -100 26v-2q0 -75 50 -133.5t123 -72.5q-29 -8 -51 -8q-13 0 -39 4 q21 -63 74.5 -104t121.5 -42q-116 -90 -261 -90q-26 0 -50 3q148 -94 322 -94q112 0 210 35.5t168 95t120.5 137t75 162t24.5 168.5q0 18 -1 27q63 45 105 109zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t [...]
+<glyph unicode="&#xf082;" d="M1307 618l23 219h-198v109q0 49 15.5 68.5t71.5 19.5h110v219h-175q-152 0 -218 -72t-66 -213v-131h-131v-219h131v-635h262v635h175zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960 q119 0 203.5 -84.5t84.5 -203.5z" />
+<glyph unicode="&#xf083;" horiz-adv-x="1792" d="M928 704q0 14 -9 23t-23 9q-66 0 -113 -47t-47 -113q0 -14 9 -23t23 -9t23 9t9 23q0 40 28 68t68 28q14 0 23 9t9 23zM1152 574q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181zM128 0h1536v128h-1536v-128zM1280 574q0 159 -112.5 271.5 t-271.5 112.5t-271.5 -112.5t-112.5 -271.5t112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5zM256 1216h384v128h-384v-128zM128 1024h1536v118v138h-828l-64 -128h-644v-128zM1792 1280v-1280q0 -53 -37.5 - [...]
+<glyph unicode="&#xf084;" horiz-adv-x="1792" d="M832 1024q0 80 -56 136t-136 56t-136 -56t-56 -136q0 -42 19 -83q-41 19 -83 19q-80 0 -136 -56t-56 -136t56 -136t136 -56t136 56t56 136q0 42 -19 83q41 -19 83 -19q80 0 136 56t56 136zM1683 320q0 -17 -49 -66t-66 -49q-9 0 -28.5 16t-36.5 33t-38.5 40t-24.5 26 l-96 -96l220 -220q28 -28 28 -68q0 -42 -39 -81t-81 -39q-40 0 -68 28l-671 671q-176 -131 -365 -131q-163 0 -265.5 102.5t-102.5 265.5q0 160 95 313t248 248t313 95q163 0 265.5 -102.5t102.5 -265.5q0 -189  [...]
+<glyph unicode="&#xf085;" horiz-adv-x="1920" d="M896 640q0 106 -75 181t-181 75t-181 -75t-75 -181t75 -181t181 -75t181 75t75 181zM1664 128q0 52 -38 90t-90 38t-90 -38t-38 -90q0 -53 37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1664 1152q0 52 -38 90t-90 38t-90 -38t-38 -90q0 -53 37.5 -90.5t90.5 -37.5 t90.5 37.5t37.5 90.5zM1280 731v-185q0 -10 -7 -19.5t-16 -10.5l-155 -24q-11 -35 -32 -76q34 -48 90 -115q7 -10 7 -20q0 -12 -7 -19q-23 -30 -82.5 -89.5t-78.5 -59.5q-11 0 -21 7l-115 90q-37 -19 -77 -31q-11  [...]
+<glyph unicode="&#xf086;" horiz-adv-x="1792" d="M1408 768q0 -139 -94 -257t-256.5 -186.5t-353.5 -68.5q-86 0 -176 16q-124 -88 -278 -128q-36 -9 -86 -16h-3q-11 0 -20.5 8t-11.5 21q-1 3 -1 6.5t0.5 6.5t2 6l2.5 5t3.5 5.5t4 5t4.5 5t4 4.5q5 6 23 25t26 29.5t22.5 29t25 38.5t20.5 44q-124 72 -195 177t-71 224 q0 139 94 257t256.5 186.5t353.5 68.5t353.5 -68.5t256.5 -186.5t94 -257zM1792 512q0 -120 -71 -224.5t-195 -176.5q10 -24 20.5 -44t25 -38.5t22.5 -29t26 -29.5t23 -25q1 -1 4 -4.5t4.5 -5t4 -5t3.5 -5.5l2.5 [...]
+<glyph unicode="&#xf087;" d="M256 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 768q0 51 -39 89.5t-89 38.5h-352q0 58 48 159.5t48 160.5q0 98 -32 145t-128 47q-26 -26 -38 -85t-30.5 -125.5t-59.5 -109.5q-22 -23 -77 -91q-4 -5 -23 -30t-31.5 -41t-34.5 -42.5 t-40 -44t-38.5 -35.5t-40 -27t-35.5 -9h-32v-640h32q13 0 31.5 -3t33 -6.5t38 -11t35 -11.5t35.5 -12.5t29 -10.5q211 -73 342 -73h121q192 0 192 167q0 26 -5 56q30 16 47.5 52.5t17.5 73.5t-18 69q53 50 53 119q0 25 -10 55.5t-25 4 [...]
+<glyph unicode="&#xf088;" d="M256 1088q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 512q0 35 -21.5 81t-53.5 47q15 17 25 47.5t10 55.5q0 69 -53 119q18 32 18 69t-17.5 73.5t-47.5 52.5q5 30 5 56q0 85 -49 126t-136 41h-128q-131 0 -342 -73q-5 -2 -29 -10.5 t-35.5 -12.5t-35 -11.5t-38 -11t-33 -6.5t-31.5 -3h-32v-640h32q16 0 35.5 -9t40 -27t38.5 -35.5t40 -44t34.5 -42.5t31.5 -41t23 -30q55 -68 77 -91q41 -43 59.5 -109.5t30.5 -125.5t38 -85q96 0 128 47t32 145q0 59 -48 160.5t-48 159.5h [...]
+<glyph unicode="&#xf089;" horiz-adv-x="896" d="M832 1504v-1339l-449 -236q-22 -12 -40 -12q-21 0 -31.5 14.5t-10.5 35.5q0 6 2 20l86 500l-364 354q-25 27 -25 48q0 37 56 46l502 73l225 455q19 41 49 41z" />
+<glyph unicode="&#xf08a;" horiz-adv-x="1792" d="M1664 940q0 81 -21.5 143t-55 98.5t-81.5 59.5t-94 31t-98 8t-112 -25.5t-110.5 -64t-86.5 -72t-60 -61.5q-18 -22 -49 -22t-49 22q-24 28 -60 61.5t-86.5 72t-110.5 64t-112 25.5t-98 -8t-94 -31t-81.5 -59.5t-55 -98.5t-21.5 -143q0 -168 187 -355l581 -560l580 559 q188 188 188 356zM1792 940q0 -221 -229 -450l-623 -600q-18 -18 -44 -18t-44 18l-624 602q-10 8 -27.5 26t-55.5 65.5t-68 97.5t-53.5 121t-23.5 138q0 220 127 344t351 124q62 0 126.5 -21.5t120 -58t95.5 -6 [...]
+<glyph unicode="&#xf08b;" horiz-adv-x="1664" d="M640 96q0 -4 1 -20t0.5 -26.5t-3 -23.5t-10 -19.5t-20.5 -6.5h-320q-119 0 -203.5 84.5t-84.5 203.5v704q0 119 84.5 203.5t203.5 84.5h320q13 0 22.5 -9.5t9.5 -22.5q0 -4 1 -20t0.5 -26.5t-3 -23.5t-10 -19.5t-20.5 -6.5h-320q-66 0 -113 -47t-47 -113v-704 q0 -66 47 -113t113 -47h288h11h13t11.5 -1t11.5 -3t8 -5.5t7 -9t2 -13.5zM1568 640q0 -26 -19 -45l-544 -544q-19 -19 -45 -19t-45 19t-19 45v288h-448q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h448v288q0 26 19 45t4 [...]
+<glyph unicode="&#xf08c;" d="M237 122h231v694h-231v-694zM483 1030q-1 52 -36 86t-93 34t-94.5 -34t-36.5 -86q0 -51 35.5 -85.5t92.5 -34.5h1q59 0 95 34.5t36 85.5zM1068 122h231v398q0 154 -73 233t-193 79q-136 0 -209 -117h2v101h-231q3 -66 0 -694h231v388q0 38 7 56q15 35 45 59.5t74 24.5 q116 0 116 -157v-371zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
+<glyph unicode="&#xf08d;" horiz-adv-x="1152" d="M480 672v448q0 14 -9 23t-23 9t-23 -9t-9 -23v-448q0 -14 9 -23t23 -9t23 9t9 23zM1152 320q0 -26 -19 -45t-45 -19h-429l-51 -483q-2 -12 -10.5 -20.5t-20.5 -8.5h-1q-27 0 -32 27l-76 485h-404q-26 0 -45 19t-19 45q0 123 78.5 221.5t177.5 98.5v512q-52 0 -90 38 t-38 90t38 90t90 38h640q52 0 90 -38t38 -90t-38 -90t-90 -38v-512q99 0 177.5 -98.5t78.5 -221.5z" />
+<glyph unicode="&#xf08e;" horiz-adv-x="1792" d="M1408 608v-320q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h704q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-704q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v320 q0 14 9 23t23 9h64q14 0 23 -9t9 -23zM1792 1472v-512q0 -26 -19 -45t-45 -19t-45 19l-176 176l-652 -652q-10 -10 -23 -10t-23 10l-114 114q-10 10 -10 23t10 23l652 652l-176 176q-19 19 -19 45t19 45t45 19h512q2 [...]
+<glyph unicode="&#xf090;" d="M1184 640q0 -26 -19 -45l-544 -544q-19 -19 -45 -19t-45 19t-19 45v288h-448q-26 0 -45 19t-19 45v384q0 26 19 45t45 19h448v288q0 26 19 45t45 19t45 -19l544 -544q19 -19 19 -45zM1536 992v-704q0 -119 -84.5 -203.5t-203.5 -84.5h-320q-13 0 -22.5 9.5t-9.5 22.5 q0 4 -1 20t-0.5 26.5t3 23.5t10 19.5t20.5 6.5h320q66 0 113 47t47 113v704q0 66 -47 113t-113 47h-288h-11h-13t-11.5 1t-11.5 3t-8 5.5t-7 9t-2 13.5q0 4 -1 20t-0.5 26.5t3 23.5t10 19.5t20.5 6.5h320q119 0 203.5 -84.5t84.5 -2 [...]
+<glyph unicode="&#xf091;" horiz-adv-x="1664" d="M458 653q-74 162 -74 371h-256v-96q0 -78 94.5 -162t235.5 -113zM1536 928v96h-256q0 -209 -74 -371q141 29 235.5 113t94.5 162zM1664 1056v-128q0 -71 -41.5 -143t-112 -130t-173 -97.5t-215.5 -44.5q-42 -54 -95 -95q-38 -34 -52.5 -72.5t-14.5 -89.5q0 -54 30.5 -91 t97.5 -37q75 0 133.5 -45.5t58.5 -114.5v-64q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v64q0 69 58.5 114.5t133.5 45.5q67 0 97.5 37t30.5 91q0 51 -14.5 89.5t-52.5 72.5q-53 41 -95 95q-113 5 -215.5 4 [...]
+<glyph unicode="&#xf092;" d="M394 184q-8 -9 -20 3q-13 11 -4 19q8 9 20 -3q12 -11 4 -19zM352 245q9 -12 0 -19q-8 -6 -17 7t0 18q9 7 17 -6zM291 305q-5 -7 -13 -2q-10 5 -7 12q3 5 13 2q10 -5 7 -12zM322 271q-6 -7 -16 3q-9 11 -2 16q6 6 16 -3q9 -11 2 -16zM451 159q-4 -12 -19 -6q-17 4 -13 15 t19 7q16 -5 13 -16zM514 154q0 -11 -16 -11q-17 -2 -17 11q0 11 16 11q17 2 17 -11zM572 164q2 -10 -14 -14t-18 8t14 15q16 2 18 -9zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-224q-16 0 -24.5 1t-19.5 5t-16 14.5t-5 [...]
+<glyph unicode="&#xf093;" horiz-adv-x="1664" d="M1280 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1536 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 288v-320q0 -40 -28 -68t-68 -28h-1472q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h427q21 -56 70.5 -92 t110.5 -36h256q61 0 110.5 36t70.5 92h427q40 0 68 -28t28 -68zM1339 936q-17 -40 -59 -40h-256v-448q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v448h-256q-42 0 -59 40q-17 39 14 69l448 448q18 19 45 19t45 [...]
+<glyph unicode="&#xf094;" d="M1407 710q0 44 -7 113.5t-18 96.5q-12 30 -17 44t-9 36.5t-4 48.5q0 23 5 68.5t5 67.5q0 37 -10 55q-4 1 -13 1q-19 0 -58 -4.5t-59 -4.5q-60 0 -176 24t-175 24q-43 0 -94.5 -11.5t-85 -23.5t-89.5 -34q-137 -54 -202 -103q-96 -73 -159.5 -189.5t-88 -236t-24.5 -248.5 q0 -40 12.5 -120t12.5 -121q0 -23 -11 -66.5t-11 -65.5t12 -36.5t34 -14.5q24 0 72.5 11t73.5 11q57 0 169.5 -15.5t169.5 -15.5q181 0 284 36q129 45 235.5 152.5t166 245.5t59.5 275zM1535 712q0 -165 -70 -327.5t-196 -288t- [...]
+<glyph unicode="&#xf095;" horiz-adv-x="1408" d="M1408 296q0 -27 -10 -70.5t-21 -68.5q-21 -50 -122 -106q-94 -51 -186 -51q-27 0 -52.5 3.5t-57.5 12.5t-47.5 14.5t-55.5 20.5t-49 18q-98 35 -175 83q-128 79 -264.5 215.5t-215.5 264.5q-48 77 -83 175q-3 9 -18 49t-20.5 55.5t-14.5 47.5t-12.5 57.5t-3.5 52.5 q0 92 51 186q56 101 106 122q25 11 68.5 21t70.5 10q14 0 21 -3q18 -6 53 -76q11 -19 30 -54t35 -63.5t31 -53.5q3 -4 17.5 -25t21.5 -35.5t7 -28.5q0 -20 -28.5 -50t-62 -55t-62 -53t-28.5 -46q0 -9 5 -22.5t8.5  [...]
+<glyph unicode="&#xf096;" horiz-adv-x="1408" d="M1120 1280h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113v832q0 66 -47 113t-113 47zM1408 1120v-832q0 -119 -84.5 -203.5t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832 q119 0 203.5 -84.5t84.5 -203.5z" />
+<glyph unicode="&#xf097;" horiz-adv-x="1280" d="M1152 1280h-1024v-1242l423 406l89 85l89 -85l423 -406v1242zM1164 1408q23 0 44 -9q33 -13 52.5 -41t19.5 -62v-1289q0 -34 -19.5 -62t-52.5 -41q-19 -8 -44 -8q-48 0 -83 32l-441 424l-441 -424q-36 -33 -83 -33q-23 0 -44 9q-33 13 -52.5 41t-19.5 62v1289 q0 34 19.5 62t52.5 41q21 9 44 9h1048z" />
+<glyph unicode="&#xf098;" d="M1280 343q0 11 -2 16q-3 8 -38.5 29.5t-88.5 49.5l-53 29q-5 3 -19 13t-25 15t-21 5q-18 0 -47 -32.5t-57 -65.5t-44 -33q-7 0 -16.5 3.5t-15.5 6.5t-17 9.5t-14 8.5q-99 55 -170.5 126.5t-126.5 170.5q-2 3 -8.5 14t-9.5 17t-6.5 15.5t-3.5 16.5q0 13 20.5 33.5t45 38.5 t45 39.5t20.5 36.5q0 10 -5 21t-15 25t-13 19q-3 6 -15 28.5t-25 45.5t-26.5 47.5t-25 40.5t-16.5 18t-16 2q-48 0 -101 -22q-46 -21 -80 -94.5t-34 -130.5q0 -16 2.5 -34t5 -30.5t9 -33t10 -29.5t12.5 -33t11 -30q60 -164 216. [...]
+<glyph unicode="&#xf099;" horiz-adv-x="1664" d="M1620 1128q-67 -98 -162 -167q1 -14 1 -42q0 -130 -38 -259.5t-115.5 -248.5t-184.5 -210.5t-258 -146t-323 -54.5q-271 0 -496 145q35 -4 78 -4q225 0 401 138q-105 2 -188 64.5t-114 159.5q33 -5 61 -5q43 0 85 11q-112 23 -185.5 111.5t-73.5 205.5v4q68 -38 146 -41 q-66 44 -105 115t-39 154q0 88 44 163q121 -149 294.5 -238.5t371.5 -99.5q-8 38 -8 74q0 134 94.5 228.5t228.5 94.5q140 0 236 -102q109 21 205 78q-37 -115 -142 -178q93 10 186 50z" />
+<glyph unicode="&#xf09a;" horiz-adv-x="768" d="M511 980h257l-30 -284h-227v-824h-341v824h-170v284h170v171q0 182 86 275.5t283 93.5h227v-284h-142q-39 0 -62.5 -6.5t-34 -23.5t-13.5 -34.5t-3 -49.5v-142z" />
+<glyph unicode="&#xf09b;" d="M1536 640q0 -251 -146.5 -451.5t-378.5 -277.5q-27 -5 -39.5 7t-12.5 30v211q0 97 -52 142q57 6 102.5 18t94 39t81 66.5t53 105t20.5 150.5q0 121 -79 206q37 91 -8 204q-28 9 -81 -11t-92 -44l-38 -24q-93 26 -192 26t-192 -26q-16 11 -42.5 27t-83.5 38.5t-86 13.5 q-44 -113 -7 -204q-79 -85 -79 -206q0 -85 20.5 -150t52.5 -105t80.5 -67t94 -39t102.5 -18q-40 -36 -49 -103q-21 -10 -45 -15t-57 -5t-65.5 21.5t-55.5 62.5q-19 32 -48.5 52t-49.5 24l-20 3q-21 0 -29 -4.5t-5 -11.5t9 -14t13 - [...]
+<glyph unicode="&#xf09c;" horiz-adv-x="1664" d="M1664 960v-256q0 -26 -19 -45t-45 -19h-64q-26 0 -45 19t-19 45v256q0 106 -75 181t-181 75t-181 -75t-75 -181v-192h96q40 0 68 -28t28 -68v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h672v192q0 185 131.5 316.5t316.5 131.5 t316.5 -131.5t131.5 -316.5z" />
+<glyph unicode="&#xf09d;" horiz-adv-x="1920" d="M1760 1408q66 0 113 -47t47 -113v-1216q0 -66 -47 -113t-113 -47h-1600q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1600zM160 1280q-13 0 -22.5 -9.5t-9.5 -22.5v-224h1664v224q0 13 -9.5 22.5t-22.5 9.5h-1600zM1760 0q13 0 22.5 9.5t9.5 22.5v608h-1664v-608 q0 -13 9.5 -22.5t22.5 -9.5h1600zM256 128v128h256v-128h-256zM640 128v128h384v-128h-384z" />
+<glyph unicode="&#xf09e;" horiz-adv-x="1408" d="M384 192q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM896 69q2 -28 -17 -48q-18 -21 -47 -21h-135q-25 0 -43 16.5t-20 41.5q-22 229 -184.5 391.5t-391.5 184.5q-25 2 -41.5 20t-16.5 43v135q0 29 21 47q17 17 43 17h5q160 -13 306 -80.5 t259 -181.5q114 -113 181.5 -259t80.5 -306zM1408 67q2 -27 -18 -47q-18 -20 -46 -20h-143q-26 0 -44.5 17.5t-19.5 42.5q-12 215 -101 408.5t-231.5 336t-336 231.5t-408.5 102q-25 1 -42.5 19.5t-17.5 43.5 [...]
+<glyph unicode="&#xf0a0;" d="M1040 320q0 -33 -23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5t23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5zM1296 320q0 -33 -23.5 -56.5t-56.5 -23.5t-56.5 23.5t-23.5 56.5t23.5 56.5t56.5 23.5t56.5 -23.5t23.5 -56.5zM1408 160v320q0 13 -9.5 22.5t-22.5 9.5 h-1216q-13 0 -22.5 -9.5t-9.5 -22.5v-320q0 -13 9.5 -22.5t22.5 -9.5h1216q13 0 22.5 9.5t9.5 22.5zM178 640h1180l-157 482q-4 13 -16 21.5t-26 8.5h-782q-14 0 -26 -8.5t-16 -21.5zM1536 480v-320q0 -66 -47 -113t-113 -47h-1216q- [...]
+<glyph unicode="&#xf0a1;" horiz-adv-x="1792" d="M1664 896q53 0 90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5v-384q0 -52 -38 -90t-90 -38q-417 347 -812 380q-58 -19 -91 -66t-31 -100.5t40 -92.5q-20 -33 -23 -65.5t6 -58t33.5 -55t48 -50t61.5 -50.5q-29 -58 -111.5 -83t-168.5 -11.5t-132 55.5q-7 23 -29.5 87.5 t-32 94.5t-23 89t-15 101t3.5 98.5t22 110.5h-122q-66 0 -113 47t-47 113v192q0 66 47 113t113 47h480q435 0 896 384q52 0 90 -38t38 -90v-384zM1536 292v954q-394 -302 -768 -343v-270q377 -42 768 -341z" />
+<glyph unicode="&#xf0a2;" horiz-adv-x="1664" d="M848 -160q0 16 -16 16q-59 0 -101.5 42.5t-42.5 101.5q0 16 -16 16t-16 -16q0 -73 51.5 -124.5t124.5 -51.5q16 0 16 16zM183 128h1298q-164 181 -246.5 411.5t-82.5 484.5q0 256 -320 256t-320 -256q0 -254 -82.5 -484.5t-246.5 -411.5zM1664 128q0 -52 -38 -90t-90 -38 h-448q0 -106 -75 -181t-181 -75t-181 75t-75 181h-448q-52 0 -90 38t-38 90q190 161 287 397.5t97 498.5q0 165 96 262t264 117q-8 18 -8 37q0 40 28 68t68 28t68 -28t28 -68q0 -19 -8 -37q168 -20 264 -117 [...]
+<glyph unicode="&#xf0a3;" d="M1376 640l138 -135q30 -28 20 -70q-12 -41 -52 -51l-188 -48l53 -186q12 -41 -19 -70q-29 -31 -70 -19l-186 53l-48 -188q-10 -40 -51 -52q-12 -2 -19 -2q-31 0 -51 22l-135 138l-135 -138q-28 -30 -70 -20q-41 11 -51 52l-48 188l-186 -53q-41 -12 -70 19q-31 29 -19 70 l53 186l-188 48q-40 10 -52 51q-10 42 20 70l138 135l-138 135q-30 28 -20 70q12 41 52 51l188 48l-53 186q-12 41 19 70q29 31 70 19l186 -53l48 188q10 41 51 51q41 12 70 -19l135 -139l135 139q29 30 70 19q41 -10 51 -51l48 [...]
+<glyph unicode="&#xf0a4;" horiz-adv-x="1792" d="M256 192q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1664 768q0 51 -39 89.5t-89 38.5h-576q0 20 15 48.5t33 55t33 68t15 84.5q0 67 -44.5 97.5t-115.5 30.5q-24 0 -90 -139q-24 -44 -37 -65q-40 -64 -112 -145q-71 -81 -101 -106 q-69 -57 -140 -57h-32v-640h32q72 0 167 -32t193.5 -64t179.5 -32q189 0 189 167q0 26 -5 56q30 16 47.5 52.5t17.5 73.5t-18 69q53 50 53 119q0 25 -10 55.5t-25 47.5h331q52 0 90 38t38 90zM1792 769q0 -105 -75.5 -181t-1 [...]
+<glyph unicode="&#xf0a5;" horiz-adv-x="1792" d="M1376 128h32v640h-32q-35 0 -67.5 12t-62.5 37t-50 46t-49 54q-2 3 -3.5 4.5t-4 4.5t-4.5 5q-72 81 -112 145q-14 22 -38 68q-1 3 -10.5 22.5t-18.5 36t-20 35.5t-21.5 30.5t-18.5 11.5q-71 0 -115.5 -30.5t-44.5 -97.5q0 -43 15 -84.5t33 -68t33 -55t15 -48.5h-576 q-50 0 -89 -38.5t-39 -89.5q0 -52 38 -90t90 -38h331q-15 -17 -25 -47.5t-10 -55.5q0 -69 53 -119q-18 -32 -18 -69t17.5 -73.5t47.5 -52.5q-4 -24 -4 -56q0 -85 48.5 -126t135.5 -41q84 0 183 32t194 64t167 32z [...]
+<glyph unicode="&#xf0a6;" d="M1280 -64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 700q0 189 -167 189q-26 0 -56 -5q-16 30 -52.5 47.5t-73.5 17.5t-69 -18q-50 53 -119 53q-25 0 -55.5 -10t-47.5 -25v331q0 52 -38 90t-90 38q-51 0 -89.5 -39t-38.5 -89v-576 q-20 0 -48.5 15t-55 33t-68 33t-84.5 15q-67 0 -97.5 -44.5t-30.5 -115.5q0 -24 139 -90q44 -24 65 -37q64 -40 145 -112q81 -71 106 -101q57 -69 57 -140v-32h640v32q0 72 32 167t64 193.5t32 179.5zM1536 705q0 -133 -69 -322q-59 -164 - [...]
+<glyph unicode="&#xf0a7;" d="M1408 576q0 84 -32 183t-64 194t-32 167v32h-640v-32q0 -35 -12 -67.5t-37 -62.5t-46 -50t-54 -49q-9 -8 -14 -12q-81 -72 -145 -112q-22 -14 -68 -38q-3 -1 -22.5 -10.5t-36 -18.5t-35.5 -20t-30.5 -21.5t-11.5 -18.5q0 -71 30.5 -115.5t97.5 -44.5q43 0 84.5 15t68 33 t55 33t48.5 15v-576q0 -50 38.5 -89t89.5 -39q52 0 90 38t38 90v331q46 -35 103 -35q69 0 119 53q32 -18 69 -18t73.5 17.5t52.5 47.5q24 -4 56 -4q85 0 126 48.5t41 135.5zM1280 1344q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -4 [...]
+<glyph unicode="&#xf0a8;" d="M1280 576v128q0 26 -19 45t-45 19h-502l189 189q19 19 19 45t-19 45l-91 91q-18 18 -45 18t-45 -18l-362 -362l-91 -91q-18 -18 -18 -45t18 -45l91 -91l362 -362q18 -18 45 -18t45 18l91 91q18 18 18 45t-18 45l-189 189h502q26 0 45 19t19 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf0a9;" d="M1285 640q0 27 -18 45l-91 91l-362 362q-18 18 -45 18t-45 -18l-91 -91q-18 -18 -18 -45t18 -45l189 -189h-502q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h502l-189 -189q-19 -19 -19 -45t19 -45l91 -91q18 -18 45 -18t45 18l362 362l91 91q18 18 18 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf0aa;" d="M1284 641q0 27 -18 45l-362 362l-91 91q-18 18 -45 18t-45 -18l-91 -91l-362 -362q-18 -18 -18 -45t18 -45l91 -91q18 -18 45 -18t45 18l189 189v-502q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v502l189 -189q19 -19 45 -19t45 19l91 91q18 18 18 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf0ab;" d="M1284 639q0 27 -18 45l-91 91q-18 18 -45 18t-45 -18l-189 -189v502q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-502l-189 189q-19 19 -45 19t-45 -19l-91 -91q-18 -18 -18 -45t18 -45l362 -362l91 -91q18 -18 45 -18t45 18l91 91l362 362q18 18 18 45zM1536 640 q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf0ac;" d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM1042 887q-2 -1 -9.5 -9.5t-13.5 -9.5q2 0 4.5 5t5 11t3.5 7q6 7 22 15q14 6 52 12q34 8 51 -11 q-2 2 9.5 13t14.5 12q3 2 15 4.5t15 7.5l2 22q-12 -1 -17.5 7t-6.5 21q0 -2 -6 -8q0 7 -4.5 8t-11.5 -1t-9 -1q-10 3 -15 7.5t-8 16.5t-4 15q-2 5 -9.5 10.5t-9.5 10.5q-1 2 -2.5 5.5t-3 6.5t-4 5.5t-5.5 2.5t-7 -5t-7.5 -10t-4.5 -5 [...]
+<glyph unicode="&#xf0ad;" horiz-adv-x="1664" d="M384 64q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1028 484l-682 -682q-37 -37 -90 -37q-52 0 -91 37l-106 108q-38 36 -38 90q0 53 38 91l681 681q39 -98 114.5 -173.5t173.5 -114.5zM1662 919q0 -39 -23 -106q-47 -134 -164.5 -217.5 t-258.5 -83.5q-185 0 -316.5 131.5t-131.5 316.5t131.5 316.5t316.5 131.5q58 0 121.5 -16.5t107.5 -46.5q16 -11 16 -28t-16 -28l-293 -169v-224l193 -107q5 3 79 48.5t135.5 81t70.5 35.5q15 0 23.5 -10t8.5 -25z" />
+<glyph unicode="&#xf0ae;" horiz-adv-x="1792" d="M1024 128h640v128h-640v-128zM640 640h1024v128h-1024v-128zM1280 1152h384v128h-384v-128zM1792 320v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 832v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19 t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45zM1792 1344v-256q0 -26 -19 -45t-45 -19h-1664q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1664q26 0 45 -19t19 -45z" />
+<glyph unicode="&#xf0b0;" horiz-adv-x="1408" d="M1403 1241q17 -41 -14 -70l-493 -493v-742q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-256 256q-19 19 -19 45v486l-493 493q-31 29 -14 70q17 39 59 39h1280q42 0 59 -39z" />
+<glyph unicode="&#xf0b1;" horiz-adv-x="1792" d="M640 1280h512v128h-512v-128zM1792 640v-480q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v480h672v-160q0 -26 19 -45t45 -19h320q26 0 45 19t19 45v160h672zM1024 640v-128h-256v128h256zM1792 1120v-384h-1792v384q0 66 47 113t113 47h352v160q0 40 28 68 t68 28h576q40 0 68 -28t28 -68v-160h352q66 0 113 -47t47 -113z" />
+<glyph unicode="&#xf0b2;" d="M1283 995l-355 -355l355 -355l144 144q29 31 70 14q39 -17 39 -59v-448q0 -26 -19 -45t-45 -19h-448q-42 0 -59 40q-17 39 14 69l144 144l-355 355l-355 -355l144 -144q31 -30 14 -69q-17 -40 -59 -40h-448q-26 0 -45 19t-19 45v448q0 42 40 59q39 17 69 -14l144 -144 l355 355l-355 355l-144 -144q-19 -19 -45 -19q-12 0 -24 5q-40 17 -40 59v448q0 26 19 45t45 19h448q42 0 59 -40q17 -39 -14 -69l-144 -144l355 -355l355 355l-144 144q-31 30 -14 69q17 40 59 40h448q26 0 45 -19t19 -45v-448q0  [...]
+<glyph unicode="&#xf0c0;" horiz-adv-x="1920" d="M593 640q-162 -5 -265 -128h-134q-82 0 -138 40.5t-56 118.5q0 353 124 353q6 0 43.5 -21t97.5 -42.5t119 -21.5q67 0 133 23q-5 -37 -5 -66q0 -139 81 -256zM1664 3q0 -120 -73 -189.5t-194 -69.5h-874q-121 0 -194 69.5t-73 189.5q0 53 3.5 103.5t14 109t26.5 108.5 t43 97.5t62 81t85.5 53.5t111.5 20q10 0 43 -21.5t73 -48t107 -48t135 -21.5t135 21.5t107 48t73 48t43 21.5q61 0 111.5 -20t85.5 -53.5t62 -81t43 -97.5t26.5 -108.5t14 -109t3.5 -103.5zM640 1280q0 -106 -7 [...]
+<glyph unicode="&#xf0c1;" horiz-adv-x="1664" d="M1456 320q0 40 -28 68l-208 208q-28 28 -68 28q-42 0 -72 -32q3 -3 19 -18.5t21.5 -21.5t15 -19t13 -25.5t3.5 -27.5q0 -40 -28 -68t-68 -28q-15 0 -27.5 3.5t-25.5 13t-19 15t-21.5 21.5t-18.5 19q-33 -31 -33 -73q0 -40 28 -68l206 -207q27 -27 68 -27q40 0 68 26 l147 146q28 28 28 67zM753 1025q0 40 -28 68l-206 207q-28 28 -68 28q-39 0 -68 -27l-147 -146q-28 -28 -28 -67q0 -40 28 -68l208 -208q27 -27 68 -27q42 0 72 31q-3 3 -19 18.5t-21.5 21.5t-15 19t-13 25.5t-3. [...]
+<glyph unicode="&#xf0c2;" horiz-adv-x="1920" d="M1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088q-185 0 -316.5 131.5t-131.5 316.5q0 132 71 241.5t187 163.5q-2 28 -2 43q0 212 150 362t362 150q158 0 286.5 -88t187.5 -230q70 62 166 62q106 0 181 -75t75 -181q0 -75 -41 -138q129 -30 213 -134.5t84 -239.5z " />
+<glyph unicode="&#xf0c3;" horiz-adv-x="1664" d="M1527 88q56 -89 21.5 -152.5t-140.5 -63.5h-1152q-106 0 -140.5 63.5t21.5 152.5l503 793v399h-64q-26 0 -45 19t-19 45t19 45t45 19h512q26 0 45 -19t19 -45t-19 -45t-45 -19h-64v-399zM748 813l-272 -429h712l-272 429l-20 31v37v399h-128v-399v-37z" />
+<glyph unicode="&#xf0c4;" horiz-adv-x="1792" d="M960 640q26 0 45 -19t19 -45t-19 -45t-45 -19t-45 19t-19 45t19 45t45 19zM1260 576l507 -398q28 -20 25 -56q-5 -35 -35 -51l-128 -64q-13 -7 -29 -7q-17 0 -31 8l-690 387l-110 -66q-8 -4 -12 -5q14 -49 10 -97q-7 -77 -56 -147.5t-132 -123.5q-132 -84 -277 -84 q-136 0 -222 78q-90 84 -79 207q7 76 56 147t131 124q132 84 278 84q83 0 151 -31q9 13 22 22l122 73l-122 73q-13 9 -22 22q-68 -31 -151 -31q-146 0 -278 84q-82 53 -131 124t-56 147q-5 59 15.5 113t63.5 93q85 [...]
+<glyph unicode="&#xf0c5;" horiz-adv-x="1792" d="M1696 1152q40 0 68 -28t28 -68v-1216q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v288h-544q-40 0 -68 28t-28 68v672q0 40 20 88t48 76l408 408q28 28 76 48t88 20h416q40 0 68 -28t28 -68v-328q68 40 128 40h416zM1152 939l-299 -299h299v299zM512 1323l-299 -299 h299v299zM708 676l316 316v416h-384v-416q0 -40 -28 -68t-68 -28h-416v-640h512v256q0 40 20 88t48 76zM1664 -128v1152h-384v-416q0 -40 -28 -68t-68 -28h-416v-640h896z" />
+<glyph unicode="&#xf0c6;" horiz-adv-x="1408" d="M1404 151q0 -117 -79 -196t-196 -79q-135 0 -235 100l-777 776q-113 115 -113 271q0 159 110 270t269 111q158 0 273 -113l605 -606q10 -10 10 -22q0 -16 -30.5 -46.5t-46.5 -30.5q-13 0 -23 10l-606 607q-79 77 -181 77q-106 0 -179 -75t-73 -181q0 -105 76 -181 l776 -777q63 -63 145 -63q64 0 106 42t42 106q0 82 -63 145l-581 581q-26 24 -60 24q-29 0 -48 -19t-19 -48q0 -32 25 -59l410 -410q10 -10 10 -22q0 -16 -31 -47t-47 -31q-12 0 -22 10l-410 410q-63 61 -63 149q0  [...]
+<glyph unicode="&#xf0c7;" d="M384 0h768v384h-768v-384zM1280 0h128v896q0 14 -10 38.5t-20 34.5l-281 281q-10 10 -34 20t-39 10v-416q0 -40 -28 -68t-68 -28h-576q-40 0 -68 28t-28 68v416h-128v-1280h128v416q0 40 28 68t68 28h832q40 0 68 -28t28 -68v-416zM896 928v320q0 13 -9.5 22.5t-22.5 9.5 h-192q-13 0 -22.5 -9.5t-9.5 -22.5v-320q0 -13 9.5 -22.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 22.5zM1536 896v-928q0 -40 -28 -68t-68 -28h-1344q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h928q40 0 88 -20t76 -48l280 -280q28 [...]
+<glyph unicode="&#xf0c8;" d="M1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
+<glyph unicode="&#xf0c9;" d="M1536 192v-128q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1536 704v-128q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1536 1216v-128q0 -26 -19 -45 t-45 -19h-1408q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" />
+<glyph unicode="&#xf0ca;" horiz-adv-x="1792" d="M384 128q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM384 640q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1792 224v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5 t22.5 9.5h1216q13 0 22.5 -9.5t9.5 -22.5zM384 1152q0 -80 -56 -136t-136 -56t-136 56t-56 136t56 136t136 56t136 -56t56 -136zM1792 736v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1216q-13 0 -22.5 9.5t-9.5 22. [...]
+<glyph unicode="&#xf0cb;" horiz-adv-x="1792" d="M381 -84q0 -80 -54.5 -126t-135.5 -46q-106 0 -172 66l57 88q49 -45 106 -45q29 0 50.5 14.5t21.5 42.5q0 64 -105 56l-26 56q8 10 32.5 43.5t42.5 54t37 38.5v1q-16 0 -48.5 -1t-48.5 -1v-53h-106v152h333v-88l-95 -115q51 -12 81 -49t30 -88zM383 543v-159h-362 q-6 36 -6 54q0 51 23.5 93t56.5 68t66 47.5t56.5 43.5t23.5 45q0 25 -14.5 38.5t-39.5 13.5q-46 0 -81 -58l-85 59q24 51 71.5 79.5t105.5 28.5q73 0 123 -41.5t50 -112.5q0 -50 -34 -91.5t-75 -64.5t-75.5 -50.5t- [...]
+<glyph unicode="&#xf0cc;" horiz-adv-x="1792" d="M1760 640q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-1728q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h1728zM483 704q-28 35 -51 80q-48 97 -48 188q0 181 134 309q133 127 393 127q50 0 167 -19q66 -12 177 -48q10 -38 21 -118q14 -123 14 -183q0 -18 -5 -45l-12 -3l-84 6 l-14 2q-50 149 -103 205q-88 91 -210 91q-114 0 -182 -59q-67 -58 -67 -146q0 -73 66 -140t279 -129q69 -20 173 -66q58 -28 95 -52h-743zM990 448h411q7 -39 7 -92q0 -111 -41 -212q-23 -55 -71 -104q-37 -3 [...]
+<glyph unicode="&#xf0cd;" d="M48 1313q-37 2 -45 4l-3 88q13 1 40 1q60 0 112 -4q132 -7 166 -7q86 0 168 3q116 4 146 5q56 0 86 2l-1 -14l2 -64v-9q-60 -9 -124 -9q-60 0 -79 -25q-13 -14 -13 -132q0 -13 0.5 -32.5t0.5 -25.5l1 -229l14 -280q6 -124 51 -202q35 -59 96 -92q88 -47 177 -47 q104 0 191 28q56 18 99 51q48 36 65 64q36 56 53 114q21 73 21 229q0 79 -3.5 128t-11 122.5t-13.5 159.5l-4 59q-5 67 -24 88q-34 35 -77 34l-100 -2l-14 3l2 86h84l205 -10q76 -3 196 10l18 -2q6 -38 6 -51q0 -7 -4 -31q-45 -12 -84 -1 [...]
+<glyph unicode="&#xf0ce;" horiz-adv-x="1664" d="M512 160v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM512 544v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1024 160v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23 v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM512 928v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h320q14 0 23 9t9 23zM1024 544v192q0 14 -9 23t-23 9h-320q-14 0 -23 -9t-9 -2 [...]
+<glyph unicode="&#xf0d0;" horiz-adv-x="1664" d="M1190 955l293 293l-107 107l-293 -293zM1637 1248q0 -27 -18 -45l-1286 -1286q-18 -18 -45 -18t-45 18l-198 198q-18 18 -18 45t18 45l1286 1286q18 18 45 18t45 -18l198 -198q18 -18 18 -45zM286 1438l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98zM636 1276 l196 -60l-196 -60l-60 -196l-60 196l-196 60l196 60l60 196zM1566 798l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98zM926 1438l98 -30l-98 -30l-30 -98l-30 98l-98 30l98 30l30 98z" />
+<glyph unicode="&#xf0d1;" horiz-adv-x="1792" d="M640 128q0 52 -38 90t-90 38t-90 -38t-38 -90t38 -90t90 -38t90 38t38 90zM256 640h384v256h-158q-13 0 -22 -9l-195 -195q-9 -9 -9 -22v-30zM1536 128q0 52 -38 90t-90 38t-90 -38t-38 -90t38 -90t90 -38t90 38t38 90zM1792 1216v-1024q0 -15 -4 -26.5t-13.5 -18.5 t-16.5 -11.5t-23.5 -6t-22.5 -2t-25.5 0t-22.5 0.5q0 -106 -75 -181t-181 -75t-181 75t-75 181h-384q0 -106 -75 -181t-181 -75t-181 75t-75 181h-64q-3 0 -22.5 -0.5t-25.5 0t-22.5 2t-23.5 6t-16.5 11.5t-13.5  [...]
+<glyph unicode="&#xf0d2;" d="M1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103q-111 0 -218 32q59 93 78 164q9 34 54 211q20 -39 73 -67.5t114 -28.5q121 0 216 68.5t147 188.5t52 270q0 114 -59.5 214t-172.5 163t-255 63q-105 0 -196 -29t-154.5 -77t-109 -110.5t-67 -129.5t-21.5 -134 q0 -104 40 -183t117 -111q30 -12 38 20q2 7 8 31t8 30q6 23 -11 43q-51 61 -51 151q0 151 104.5 259.5t273.5 108.5q151 0 235.5 -82t84.5 -213q0 -170 -68.5 -289t-175.5 -119q-61 0 -98 43.5t-23 104.5q8 35 26.5 93.5t30 103t11 [...]
+<glyph unicode="&#xf0d3;" d="M1248 1408q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-725q85 122 108 210q9 34 53 209q21 -39 73.5 -67t112.5 -28q181 0 295.5 147.5t114.5 373.5q0 84 -35 162.5t-96.5 139t-152.5 97t-197 36.5q-104 0 -194.5 -28.5t-153 -76.5 t-107.5 -109.5t-66.5 -128t-21.5 -132.5q0 -102 39.5 -180t116.5 -110q13 -5 23.5 0t14.5 19q10 44 15 61q6 23 -11 42q-50 62 -50 150q0 150 103.5 256.5t270.5 106.5q149 0 232.5 -81t83.5 -210q0 -168 -67.5 -286t-173.5 -118q-60 0 - [...]
+<glyph unicode="&#xf0d4;" d="M678 -57q0 -38 -10 -71h-380q-95 0 -171.5 56.5t-103.5 147.5q24 45 69 77.5t100 49.5t107 24t107 7q32 0 49 -2q6 -4 30.5 -21t33 -23t31 -23t32 -25.5t27.5 -25.5t26.5 -29.5t21 -30.5t17.5 -34.5t9.5 -36t4.5 -40.5zM385 294q-234 -7 -385 -85v433q103 -118 273 -118 q32 0 70 5q-21 -61 -21 -86q0 -67 63 -149zM558 805q0 -100 -43.5 -160.5t-140.5 -60.5q-51 0 -97 26t-78 67.5t-56 93.5t-35.5 104t-11.5 99q0 96 51.5 165t144.5 69q66 0 119 -41t84 -104t47 -130t16 -128zM1536 896v-736q0 -1 [...]
+<glyph unicode="&#xf0d5;" horiz-adv-x="1664" d="M876 71q0 21 -4.5 40.5t-9.5 36t-17.5 34.5t-21 30.5t-26.5 29.5t-27.5 25.5t-32 25.5t-31 23t-33 23t-30.5 21q-17 2 -50 2q-54 0 -106 -7t-108 -25t-98 -46t-69 -75t-27 -107q0 -68 35.5 -121.5t93 -84t120.5 -45.5t127 -15q59 0 112.5 12.5t100.5 39t74.5 73.5 t27.5 110zM756 933q0 60 -16.5 127.5t-47 130.5t-84 104t-119.5 41q-93 0 -144 -69t-51 -165q0 -47 11.5 -99t35.5 -104t56 -93.5t78 -67.5t97 -26q97 0 140.5 60.5t43.5 160.5zM625 1408h437l-135 -79h-135q71 -45 [...]
+<glyph unicode="&#xf0d6;" horiz-adv-x="1920" d="M768 384h384v96h-128v448h-114l-148 -137l77 -80q42 37 55 57h2v-288h-128v-96zM1280 640q0 -70 -21 -142t-59.5 -134t-101.5 -101t-138 -39t-138 39t-101.5 101t-59.5 134t-21 142t21 142t59.5 134t101.5 101t138 39t138 -39t101.5 -101t59.5 -134t21 -142zM1792 384 v512q-106 0 -181 75t-75 181h-1152q0 -106 -75 -181t-181 -75v-512q106 0 181 -75t75 -181h1152q0 106 75 181t181 75zM1920 1216v-1152q0 -26 -19 -45t-45 -19h-1792q-26 0 -45 19t-19 45v1152q0 26 19 45t45  [...]
+<glyph unicode="&#xf0d7;" horiz-adv-x="1024" d="M1024 832q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45z" />
+<glyph unicode="&#xf0d8;" horiz-adv-x="1024" d="M1024 320q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" />
+<glyph unicode="&#xf0d9;" horiz-adv-x="640" d="M640 1088v-896q0 -26 -19 -45t-45 -19t-45 19l-448 448q-19 19 -19 45t19 45l448 448q19 19 45 19t45 -19t19 -45z" />
+<glyph unicode="&#xf0da;" horiz-adv-x="640" d="M576 640q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19t-19 45v896q0 26 19 45t45 19t45 -19l448 -448q19 -19 19 -45z" />
+<glyph unicode="&#xf0db;" horiz-adv-x="1664" d="M160 0h608v1152h-640v-1120q0 -13 9.5 -22.5t22.5 -9.5zM1536 32v1120h-640v-1152h608q13 0 22.5 9.5t9.5 22.5zM1664 1248v-1216q0 -66 -47 -113t-113 -47h-1344q-66 0 -113 47t-47 113v1216q0 66 47 113t113 47h1344q66 0 113 -47t47 -113z" />
+<glyph unicode="&#xf0dc;" horiz-adv-x="1024" d="M1024 448q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45zM1024 832q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" />
+<glyph unicode="&#xf0dd;" horiz-adv-x="1024" d="M1024 448q0 -26 -19 -45l-448 -448q-19 -19 -45 -19t-45 19l-448 448q-19 19 -19 45t19 45t45 19h896q26 0 45 -19t19 -45z" />
+<glyph unicode="&#xf0de;" horiz-adv-x="1024" d="M1024 832q0 -26 -19 -45t-45 -19h-896q-26 0 -45 19t-19 45t19 45l448 448q19 19 45 19t45 -19l448 -448q19 -19 19 -45z" />
+<glyph unicode="&#xf0e0;" horiz-adv-x="1792" d="M1792 826v-794q0 -66 -47 -113t-113 -47h-1472q-66 0 -113 47t-47 113v794q44 -49 101 -87q362 -246 497 -345q57 -42 92.5 -65.5t94.5 -48t110 -24.5h1h1q51 0 110 24.5t94.5 48t92.5 65.5q170 123 498 345q57 39 100 87zM1792 1120q0 -79 -49 -151t-122 -123 q-376 -261 -468 -325q-10 -7 -42.5 -30.5t-54 -38t-52 -32.5t-57.5 -27t-50 -9h-1h-1q-23 0 -50 9t-57.5 27t-52 32.5t-54 38t-42.5 30.5q-91 64 -262 182.5t-205 142.5q-62 42 -117 115.5t-55 136.5q0 78 41.5 130t11 [...]
+<glyph unicode="&#xf0e1;" d="M349 911v-991h-330v991h330zM370 1217q1 -73 -50.5 -122t-135.5 -49h-2q-82 0 -132 49t-50 122q0 74 51.5 122.5t134.5 48.5t133 -48.5t51 -122.5zM1536 488v-568h-329v530q0 105 -40.5 164.5t-126.5 59.5q-63 0 -105.5 -34.5t-63.5 -85.5q-11 -30 -11 -81v-553h-329 q2 399 2 647t-1 296l-1 48h329v-144h-2q20 32 41 56t56.5 52t87 43.5t114.5 15.5q171 0 275 -113.5t104 -332.5z" />
+<glyph unicode="&#xf0e2;" d="M1536 640q0 -156 -61 -298t-164 -245t-245 -164t-298 -61q-172 0 -327 72.5t-264 204.5q-7 10 -6.5 22.5t8.5 20.5l137 138q10 9 25 9q16 -2 23 -12q73 -95 179 -147t225 -52q104 0 198.5 40.5t163.5 109.5t109.5 163.5t40.5 198.5t-40.5 198.5t-109.5 163.5 t-163.5 109.5t-198.5 40.5q-98 0 -188 -35.5t-160 -101.5l137 -138q31 -30 14 -69q-17 -40 -59 -40h-448q-26 0 -45 19t-19 45v448q0 42 40 59q39 17 69 -14l130 -129q107 101 244.5 156.5t284.5 55.5q156 0 298 -61t245 -164t164 -245t61 - [...]
+<glyph unicode="&#xf0e3;" horiz-adv-x="1792" d="M1771 0q0 -53 -37 -90l-107 -108q-39 -37 -91 -37q-53 0 -90 37l-363 364q-38 36 -38 90q0 53 43 96l-256 256l-126 -126q-14 -14 -34 -14t-34 14q2 -2 12.5 -12t12.5 -13t10 -11.5t10 -13.5t6 -13.5t5.5 -16.5t1.5 -18q0 -38 -28 -68q-3 -3 -16.5 -18t-19 -20.5 t-18.5 -16.5t-22 -15.5t-22 -9t-26 -4.5q-40 0 -68 28l-408 408q-28 28 -28 68q0 13 4.5 26t9 22t15.5 22t16.5 18.5t20.5 19t18 16.5q30 28 68 28q10 0 18 -1.5t16.5 -5.5t13.5 -6t13.5 -10t11.5 -10t13 -12.5t12 - [...]
+<glyph unicode="&#xf0e4;" horiz-adv-x="1792" d="M384 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM576 832q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1004 351l101 382q6 26 -7.5 48.5t-38.5 29.5 t-48 -6.5t-30 -39.5l-101 -382q-60 -5 -107 -43.5t-63 -98.5q-20 -77 20 -146t117 -89t146 20t89 117q16 60 -6 117t-72 91zM1664 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37 [...]
+<glyph unicode="&#xf0e5;" horiz-adv-x="1792" d="M896 1152q-204 0 -381.5 -69.5t-282 -187.5t-104.5 -255q0 -112 71.5 -213.5t201.5 -175.5l87 -50l-27 -96q-24 -91 -70 -172q152 63 275 171l43 38l57 -6q69 -8 130 -8q204 0 381.5 69.5t282 187.5t104.5 255t-104.5 255t-282 187.5t-381.5 69.5zM1792 640 q0 -174 -120 -321.5t-326 -233t-450 -85.5q-70 0 -145 8q-198 -175 -460 -242q-49 -14 -114 -22h-5q-15 0 -27 10.5t-16 27.5v1q-3 4 -0.5 12t2 10t4.5 9.5l6 9t7 8.5t8 9q7 8 31 34.5t34.5 38t31 39.5t32.5 51t27 59t26  [...]
+<glyph unicode="&#xf0e6;" horiz-adv-x="1792" d="M704 1152q-153 0 -286 -52t-211.5 -141t-78.5 -191q0 -82 53 -158t149 -132l97 -56l-35 -84q34 20 62 39l44 31l53 -10q78 -14 153 -14q153 0 286 52t211.5 141t78.5 191t-78.5 191t-211.5 141t-286 52zM704 1280q191 0 353.5 -68.5t256.5 -186.5t94 -257t-94 -257 t-256.5 -186.5t-353.5 -68.5q-86 0 -176 16q-124 -88 -278 -128q-36 -9 -86 -16h-3q-11 0 -20.5 8t-11.5 21q-1 3 -1 6.5t0.5 6.5t2 6l2.5 5t3.5 5.5t4 5t4.5 5t4 4.5q5 6 23 25t26 29.5t22.5 29t25 38.5t20.5 44q [...]
+<glyph unicode="&#xf0e7;" horiz-adv-x="896" d="M885 970q18 -20 7 -44l-540 -1157q-13 -25 -42 -25q-4 0 -14 2q-17 5 -25.5 19t-4.5 30l197 808l-406 -101q-4 -1 -12 -1q-18 0 -31 11q-18 15 -13 39l201 825q4 14 16 23t28 9h328q19 0 32 -12.5t13 -29.5q0 -8 -5 -18l-171 -463l396 98q8 2 12 2q19 0 34 -15z" />
+<glyph unicode="&#xf0e8;" horiz-adv-x="1792" d="M1792 288v-320q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192h-512v-192h96q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192h-512v-192h96q40 0 68 -28t28 -68v-320 q0 -40 -28 -68t-68 -28h-320q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h96v192q0 52 38 90t90 38h512v192h-96q-40 0 -68 28t-28 68v320q0 40 28 68t68 28h320q40 0 68 -28t28 -68v-320q0 -40 -28 -68t-68 -28h-96v-192h [...]
+<glyph unicode="&#xf0e9;" horiz-adv-x="1664" d="M896 708v-580q0 -104 -76 -180t-180 -76t-180 76t-76 180q0 26 19 45t45 19t45 -19t19 -45q0 -50 39 -89t89 -39t89 39t39 89v580q33 11 64 11t64 -11zM1664 681q0 -13 -9.5 -22.5t-22.5 -9.5q-11 0 -23 10q-49 46 -93 69t-102 23q-68 0 -128 -37t-103 -97 q-7 -10 -17.5 -28t-14.5 -24q-11 -17 -28 -17q-18 0 -29 17q-4 6 -14.5 24t-17.5 28q-43 60 -102.5 97t-127.5 37t-127.5 -37t-102.5 -97q-7 -10 -17.5 -28t-14.5 -24q-11 -17 -29 -17q-17 0 -28 17q-4 6 -14.5 24t-17.5 2 [...]
+<glyph unicode="&#xf0ea;" horiz-adv-x="1792" d="M768 -128h896v640h-416q-40 0 -68 28t-28 68v416h-384v-1152zM1024 1312v64q0 13 -9.5 22.5t-22.5 9.5h-704q-13 0 -22.5 -9.5t-9.5 -22.5v-64q0 -13 9.5 -22.5t22.5 -9.5h704q13 0 22.5 9.5t9.5 22.5zM1280 640h299l-299 299v-299zM1792 512v-672q0 -40 -28 -68t-68 -28 h-960q-40 0 -68 28t-28 68v160h-544q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h1088q40 0 68 -28t28 -68v-328q21 -13 36 -28l408 -408q28 -28 48 -76t20 -88z" />
+<glyph unicode="&#xf0eb;" horiz-adv-x="1024" d="M736 960q0 -13 -9.5 -22.5t-22.5 -9.5t-22.5 9.5t-9.5 22.5q0 46 -54 71t-106 25q-13 0 -22.5 9.5t-9.5 22.5t9.5 22.5t22.5 9.5q50 0 99.5 -16t87 -54t37.5 -90zM896 960q0 72 -34.5 134t-90 101.5t-123 62t-136.5 22.5t-136.5 -22.5t-123 -62t-90 -101.5t-34.5 -134 q0 -101 68 -180q10 -11 30.5 -33t30.5 -33q128 -153 141 -298h228q13 145 141 298q10 11 30.5 33t30.5 33q68 79 68 180zM1024 960q0 -155 -103 -268q-45 -49 -74.5 -87t-59.5 -95.5t-34 -107.5q47 -28 47 -82q [...]
+<glyph unicode="&#xf0ec;" horiz-adv-x="1792" d="M1792 352v-192q0 -13 -9.5 -22.5t-22.5 -9.5h-1376v-192q0 -13 -9.5 -22.5t-22.5 -9.5q-12 0 -24 10l-319 320q-9 9 -9 22q0 14 9 23l320 320q9 9 23 9q13 0 22.5 -9.5t9.5 -22.5v-192h1376q13 0 22.5 -9.5t9.5 -22.5zM1792 896q0 -14 -9 -23l-320 -320q-9 -9 -23 -9 q-13 0 -22.5 9.5t-9.5 22.5v192h-1376q-13 0 -22.5 9.5t-9.5 22.5v192q0 13 9.5 22.5t22.5 9.5h1376v192q0 14 9 23t23 9q12 0 24 -10l319 -319q9 -9 9 -23z" />
+<glyph unicode="&#xf0ed;" horiz-adv-x="1920" d="M1280 608q0 14 -9 23t-23 9h-224v352q0 13 -9.5 22.5t-22.5 9.5h-192q-13 0 -22.5 -9.5t-9.5 -22.5v-352h-224q-13 0 -22.5 -9.5t-9.5 -22.5q0 -14 9 -23l352 -352q9 -9 23 -9t23 9l351 351q10 12 10 24zM1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088 q-185 0 -316.5 131.5t-131.5 316.5q0 130 70 240t188 165q-2 30 -2 43q0 212 150 362t362 150q156 0 285.5 -87t188.5 -231q71 62 166 62q106 0 181 -75t75 -181q0 -76 -41 -138q130 -31 213.5 -135.5t83.5 -238.5z" />
+<glyph unicode="&#xf0ee;" horiz-adv-x="1920" d="M1280 672q0 14 -9 23l-352 352q-9 9 -23 9t-23 -9l-351 -351q-10 -12 -10 -24q0 -14 9 -23t23 -9h224v-352q0 -13 9.5 -22.5t22.5 -9.5h192q13 0 22.5 9.5t9.5 22.5v352h224q13 0 22.5 9.5t9.5 22.5zM1920 384q0 -159 -112.5 -271.5t-271.5 -112.5h-1088 q-185 0 -316.5 131.5t-131.5 316.5q0 130 70 240t188 165q-2 30 -2 43q0 212 150 362t362 150q156 0 285.5 -87t188.5 -231q71 62 166 62q106 0 181 -75t75 -181q0 -76 -41 -138q130 -31 213.5 -135.5t83.5 -238.5z" />
+<glyph unicode="&#xf0f0;" horiz-adv-x="1408" d="M384 192q0 -26 -19 -45t-45 -19t-45 19t-19 45t19 45t45 19t45 -19t19 -45zM1408 131q0 -121 -73 -190t-194 -69h-874q-121 0 -194 69t-73 190q0 68 5.5 131t24 138t47.5 132.5t81 103t120 60.5q-22 -52 -22 -120v-203q-58 -20 -93 -70t-35 -111q0 -80 56 -136t136 -56 t136 56t56 136q0 61 -35.5 111t-92.5 70v203q0 62 25 93q132 -104 295 -104t295 104q25 -31 25 -93v-64q-106 0 -181 -75t-75 -181v-89q-32 -29 -32 -71q0 -40 28 -68t68 -28t68 28t28 68q0 42 -32 71v89q0 52 [...]
+<glyph unicode="&#xf0f1;" horiz-adv-x="1408" d="M1280 832q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 832q0 -62 -35.5 -111t-92.5 -70v-395q0 -159 -131.5 -271.5t-316.5 -112.5t-316.5 112.5t-131.5 271.5v132q-164 20 -274 128t-110 252v512q0 26 19 45t45 19q6 0 16 -2q17 30 47 48 t65 18q53 0 90.5 -37.5t37.5 -90.5t-37.5 -90.5t-90.5 -37.5q-33 0 -64 18v-402q0 -106 94 -181t226 -75t226 75t94 181v402q-31 -18 -64 -18q-53 0 -90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5q35 0 65 -18t47 - [...]
+<glyph unicode="&#xf0f2;" horiz-adv-x="1792" d="M640 1152h512v128h-512v-128zM288 1152v-1280h-64q-92 0 -158 66t-66 158v832q0 92 66 158t158 66h64zM1408 1152v-1280h-1024v1280h128v160q0 40 28 68t68 28h576q40 0 68 -28t28 -68v-160h128zM1792 928v-832q0 -92 -66 -158t-158 -66h-64v1280h64q92 0 158 -66 t66 -158z" />
+<glyph unicode="&#xf0f3;" horiz-adv-x="1664" d="M848 -160q0 16 -16 16q-59 0 -101.5 42.5t-42.5 101.5q0 16 -16 16t-16 -16q0 -73 51.5 -124.5t124.5 -51.5q16 0 16 16zM1664 128q0 -52 -38 -90t-90 -38h-448q0 -106 -75 -181t-181 -75t-181 75t-75 181h-448q-52 0 -90 38t-38 90q190 161 287 397.5t97 498.5 q0 165 96 262t264 117q-8 18 -8 37q0 40 28 68t68 28t68 -28t28 -68q0 -19 -8 -37q168 -20 264 -117t96 -262q0 -262 97 -498.5t287 -397.5z" />
+<glyph unicode="&#xf0f4;" horiz-adv-x="1920" d="M1664 896q0 80 -56 136t-136 56h-64v-384h64q80 0 136 56t56 136zM0 128h1792q0 -106 -75 -181t-181 -75h-1280q-106 0 -181 75t-75 181zM1856 896q0 -159 -112.5 -271.5t-271.5 -112.5h-64v-32q0 -92 -66 -158t-158 -66h-704q-92 0 -158 66t-66 158v736q0 26 19 45 t45 19h1152q159 0 271.5 -112.5t112.5 -271.5z" />
+<glyph unicode="&#xf0f5;" horiz-adv-x="1408" d="M640 1472v-640q0 -61 -35.5 -111t-92.5 -70v-779q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v779q-57 20 -92.5 70t-35.5 111v640q0 26 19 45t45 19t45 -19t19 -45v-416q0 -26 19 -45t45 -19t45 19t19 45v416q0 26 19 45t45 19t45 -19t19 -45v-416q0 -26 19 -45 t45 -19t45 19t19 45v416q0 26 19 45t45 19t45 -19t19 -45zM1408 1472v-1600q0 -52 -38 -90t-90 -38h-128q-52 0 -90 38t-38 90v512h-224q-13 0 -22.5 9.5t-9.5 22.5v800q0 132 94 226t226 94h256q26 0 45 -19t1 [...]
+<glyph unicode="&#xf0f6;" horiz-adv-x="1280" d="M1024 352v-64q0 -14 -9 -23t-23 -9h-704q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h704q14 0 23 -9t9 -23zM1024 608v-64q0 -14 -9 -23t-23 -9h-704q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h704q14 0 23 -9t9 -23zM128 0h1024v768h-416q-40 0 -68 28t-28 68v416h-512v-1280z M768 896h376q-10 29 -22 41l-313 313q-12 12 -41 22v-376zM1280 864v-896q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h640q40 0 88 -20t76 -48l312 -312q28 -28 48 -76t20 -88z" />
+<glyph unicode="&#xf0f7;" horiz-adv-x="1408" d="M384 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22 [...]
+<glyph unicode="&#xf0f8;" horiz-adv-x="1408" d="M384 224v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5z M640 480v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22.5v64q0 13 9.5 22.5t22.5 9.5h64q13 0 22.5 -9.5t9.5 -22.5zM384 736v-64q0 -13 -9.5 -22.5t-22.5 -9.5h-64q-13 0 -22.5 9.5t-9.5 22 [...]
+<glyph unicode="&#xf0f9;" horiz-adv-x="1920" d="M640 128q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM256 640h384v256h-158q-14 -2 -22 -9l-195 -195q-7 -12 -9 -22v-30zM1536 128q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5 t90.5 37.5t37.5 90.5zM1664 800v192q0 14 -9 23t-23 9h-224v224q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-224h-224q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h224v-224q0 -14 9 -23t23 -9h192q14 0 23  [...]
+<glyph unicode="&#xf0fa;" horiz-adv-x="1792" d="M1280 416v192q0 14 -9 23t-23 9h-224v224q0 14 -9 23t-23 9h-192q-14 0 -23 -9t-9 -23v-224h-224q-14 0 -23 -9t-9 -23v-192q0 -14 9 -23t23 -9h224v-224q0 -14 9 -23t23 -9h192q14 0 23 9t9 23v224h224q14 0 23 9t9 23zM640 1152h512v128h-512v-128zM256 1152v-1280h-32 q-92 0 -158 66t-66 158v832q0 92 66 158t158 66h32zM1440 1152v-1280h-1088v1280h160v160q0 40 28 68t68 28h576q40 0 68 -28t28 -68v-160h160zM1792 928v-832q0 -92 -66 -158t-158 -66h-32v1280h32q92 0 15 [...]
+<glyph unicode="&#xf0fb;" horiz-adv-x="1920" d="M1920 576q-1 -32 -288 -96l-352 -32l-224 -64h-64l-293 -352h69q26 0 45 -4.5t19 -11.5t-19 -11.5t-45 -4.5h-96h-160h-64v32h64v416h-160l-192 -224h-96l-32 32v192h32v32h128v8l-192 24v128l192 24v8h-128v32h-32v192l32 32h96l192 -224h160v416h-64v32h64h160h96 q26 0 45 -4.5t19 -11.5t-19 -11.5t-45 -4.5h-69l293 -352h64l224 -64l352 -32q261 -58 287 -93z" />
+<glyph unicode="&#xf0fc;" horiz-adv-x="1664" d="M640 640v384h-256v-256q0 -53 37.5 -90.5t90.5 -37.5h128zM1664 192v-192h-1152v192l128 192h-128q-159 0 -271.5 112.5t-112.5 271.5v320l-64 64l32 128h480l32 128h960l32 -192l-64 -32v-800z" />
+<glyph unicode="&#xf0fd;" d="M1280 192v896q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-320h-512v320q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-896q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v320h512v-320q0 -26 19 -45t45 -19h128q26 0 45 19t19 45zM1536 1120v-960 q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
+<glyph unicode="&#xf0fe;" d="M1280 576v128q0 26 -19 45t-45 19h-320v320q0 26 -19 45t-45 19h-128q-26 0 -45 -19t-19 -45v-320h-320q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h320v-320q0 -26 19 -45t45 -19h128q26 0 45 19t19 45v320h320q26 0 45 19t19 45zM1536 1120v-960 q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
+<glyph unicode="&#xf100;" horiz-adv-x="1024" d="M627 160q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23zM1011 160q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23 t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23z" />
+<glyph unicode="&#xf101;" horiz-adv-x="1024" d="M595 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23zM979 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23 l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" />
+<glyph unicode="&#xf102;" horiz-adv-x="1152" d="M1075 224q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23zM1075 608q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393 q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" />
+<glyph unicode="&#xf103;" horiz-adv-x="1152" d="M1075 672q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23zM1075 1056q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23 t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" />
+<glyph unicode="&#xf104;" horiz-adv-x="640" d="M627 992q0 -13 -10 -23l-393 -393l393 -393q10 -10 10 -23t-10 -23l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" />
+<glyph unicode="&#xf105;" horiz-adv-x="640" d="M595 576q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" />
+<glyph unicode="&#xf106;" horiz-adv-x="1152" d="M1075 352q0 -13 -10 -23l-50 -50q-10 -10 -23 -10t-23 10l-393 393l-393 -393q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l466 -466q10 -10 10 -23z" />
+<glyph unicode="&#xf107;" horiz-adv-x="1152" d="M1075 800q0 -13 -10 -23l-466 -466q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l393 -393l393 393q10 10 23 10t23 -10l50 -50q10 -10 10 -23z" />
+<glyph unicode="&#xf108;" horiz-adv-x="1920" d="M1792 544v832q0 13 -9.5 22.5t-22.5 9.5h-1600q-13 0 -22.5 -9.5t-9.5 -22.5v-832q0 -13 9.5 -22.5t22.5 -9.5h1600q13 0 22.5 9.5t9.5 22.5zM1920 1376v-1088q0 -66 -47 -113t-113 -47h-544q0 -37 16 -77.5t32 -71t16 -43.5q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19 t-19 45q0 14 16 44t32 70t16 78h-544q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h1600q66 0 113 -47t47 -113z" />
+<glyph unicode="&#xf109;" horiz-adv-x="1920" d="M416 256q-66 0 -113 47t-47 113v704q0 66 47 113t113 47h1088q66 0 113 -47t47 -113v-704q0 -66 -47 -113t-113 -47h-1088zM384 1120v-704q0 -13 9.5 -22.5t22.5 -9.5h1088q13 0 22.5 9.5t9.5 22.5v704q0 13 -9.5 22.5t-22.5 9.5h-1088q-13 0 -22.5 -9.5t-9.5 -22.5z M1760 192h160v-96q0 -40 -47 -68t-113 -28h-1600q-66 0 -113 28t-47 68v96h160h1600zM1040 96q16 0 16 16t-16 16h-160q-16 0 -16 -16t16 -16h160z" />
+<glyph unicode="&#xf10a;" horiz-adv-x="1152" d="M640 128q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1024 288v960q0 13 -9.5 22.5t-22.5 9.5h-832q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h832q13 0 22.5 9.5t9.5 22.5zM1152 1248v-1088q0 -66 -47 -113t-113 -47h-832 q-66 0 -113 47t-47 113v1088q0 66 47 113t113 47h832q66 0 113 -47t47 -113z" />
+<glyph unicode="&#xf10b;" horiz-adv-x="768" d="M464 128q0 33 -23.5 56.5t-56.5 23.5t-56.5 -23.5t-23.5 -56.5t23.5 -56.5t56.5 -23.5t56.5 23.5t23.5 56.5zM672 288v704q0 13 -9.5 22.5t-22.5 9.5h-512q-13 0 -22.5 -9.5t-9.5 -22.5v-704q0 -13 9.5 -22.5t22.5 -9.5h512q13 0 22.5 9.5t9.5 22.5zM480 1136 q0 16 -16 16h-160q-16 0 -16 -16t16 -16h160q16 0 16 16zM768 1152v-1024q0 -52 -38 -90t-90 -38h-512q-52 0 -90 38t-38 90v1024q0 52 38 90t90 38h512q52 0 90 -38t38 -90z" />
+<glyph unicode="&#xf10c;" d="M768 1184q-148 0 -273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273t-73 273t-198 198t-273 73zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103 t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf10d;" horiz-adv-x="1664" d="M768 576v-384q0 -80 -56 -136t-136 -56h-384q-80 0 -136 56t-56 136v704q0 104 40.5 198.5t109.5 163.5t163.5 109.5t198.5 40.5h64q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-64q-106 0 -181 -75t-75 -181v-32q0 -40 28 -68t68 -28h224q80 0 136 -56t56 -136z M1664 576v-384q0 -80 -56 -136t-136 -56h-384q-80 0 -136 56t-56 136v704q0 104 40.5 198.5t109.5 163.5t163.5 109.5t198.5 40.5h64q26 0 45 -19t19 -45v-128q0 -26 -19 -45t-45 -19h-64q-106 0 -181 -75t-75  [...]
+<glyph unicode="&#xf10e;" horiz-adv-x="1664" d="M768 1216v-704q0 -104 -40.5 -198.5t-109.5 -163.5t-163.5 -109.5t-198.5 -40.5h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64q106 0 181 75t75 181v32q0 40 -28 68t-68 28h-224q-80 0 -136 56t-56 136v384q0 80 56 136t136 56h384q80 0 136 -56t56 -136zM1664 1216 v-704q0 -104 -40.5 -198.5t-109.5 -163.5t-163.5 -109.5t-198.5 -40.5h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64q106 0 181 75t75 181v32q0 40 -28 68t-68 28h-224q-80 0 -136 56t-56 136v384q0 80 [...]
+<glyph unicode="&#xf110;" horiz-adv-x="1568" d="M496 192q0 -60 -42.5 -102t-101.5 -42q-60 0 -102 42t-42 102t42 102t102 42q59 0 101.5 -42t42.5 -102zM928 0q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM320 640q0 -66 -47 -113t-113 -47t-113 47t-47 113 t47 113t113 47t113 -47t47 -113zM1360 192q0 -46 -33 -79t-79 -33t-79 33t-33 79t33 79t79 33t79 -33t33 -79zM528 1088q0 -73 -51.5 -124.5t-124.5 -51.5t-124.5 51.5t-51.5 124.5t51.5 124.5t124.5 51.5t124.5 [...]
+<glyph unicode="&#xf111;" d="M1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf112;" horiz-adv-x="1792" d="M1792 416q0 -166 -127 -451q-3 -7 -10.5 -24t-13.5 -30t-13 -22q-12 -17 -28 -17q-15 0 -23.5 10t-8.5 25q0 9 2.5 26.5t2.5 23.5q5 68 5 123q0 101 -17.5 181t-48.5 138.5t-80 101t-105.5 69.5t-133 42.5t-154 21.5t-175.5 6h-224v-256q0 -26 -19 -45t-45 -19t-45 19 l-512 512q-19 19 -19 45t19 45l512 512q19 19 45 19t45 -19t19 -45v-256h224q713 0 875 -403q53 -134 53 -333z" />
+<glyph unicode="&#xf113;" horiz-adv-x="1664" d="M640 320q0 -40 -12.5 -82t-43 -76t-72.5 -34t-72.5 34t-43 76t-12.5 82t12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82zM1280 320q0 -40 -12.5 -82t-43 -76t-72.5 -34t-72.5 34t-43 76t-12.5 82t12.5 82t43 76t72.5 34t72.5 -34t43 -76t12.5 -82zM1440 320 q0 120 -69 204t-187 84q-41 0 -195 -21q-71 -11 -157 -11t-157 11q-152 21 -195 21q-118 0 -187 -84t-69 -204q0 -88 32 -153.5t81 -103t122 -60t140 -29.5t149 -7h168q82 0 149 7t140 29.5t122 60t81 103t32 153.5zM16 [...]
+<glyph unicode="&#xf114;" horiz-adv-x="1664" d="M1536 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 28t-28 68v64q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68v-960q0 -40 28 -68t68 -28h1216q40 0 68 28t28 68zM1664 928v-704q0 -92 -66 -158t-158 -66h-1216q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320 q92 0 158 -66t66 -158v-32h672q92 0 158 -66t66 -158z" />
+<glyph unicode="&#xf115;" horiz-adv-x="1920" d="M1781 605q0 35 -53 35h-1088q-40 0 -85.5 -21.5t-71.5 -52.5l-294 -363q-18 -24 -18 -40q0 -35 53 -35h1088q40 0 86 22t71 53l294 363q18 22 18 39zM640 768h768v160q0 40 -28 68t-68 28h-576q-40 0 -68 28t-28 68v64q0 40 -28 68t-68 28h-320q-40 0 -68 -28t-28 -68 v-853l256 315q44 53 116 87.5t140 34.5zM1909 605q0 -62 -46 -120l-295 -363q-43 -53 -116 -87.5t-140 -34.5h-1088q-92 0 -158 66t-66 158v960q0 92 66 158t158 66h320q92 0 158 -66t66 -158v-32h544q92 0 158 [...]
+<glyph unicode="&#xf116;" horiz-adv-x="1152" d="M896 608v-64q0 -14 -9 -23t-23 -9h-224v-224q0 -14 -9 -23t-23 -9h-64q-14 0 -23 9t-9 23v224h-224q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h224v224q0 14 9 23t23 9h64q14 0 23 -9t9 -23v-224h224q14 0 23 -9t9 -23zM1024 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 -28 t-28 -68v-704q0 -40 28 -68t68 -28h704q40 0 68 28t28 68zM1152 928v-704q0 -92 -65.5 -158t-158.5 -66h-704q-93 0 -158.5 66t-65.5 158v704q0 93 65.5 158.5t158.5 65.5h704q93 0 158.5 -65.5t65.5 -158.5z" />
+<glyph unicode="&#xf117;" horiz-adv-x="1152" d="M928 1152q93 0 158.5 -65.5t65.5 -158.5v-704q0 -92 -65.5 -158t-158.5 -66h-704q-93 0 -158.5 66t-65.5 158v704q0 93 65.5 158.5t158.5 65.5h704zM1024 224v704q0 40 -28 68t-68 28h-704q-40 0 -68 -28t-28 -68v-704q0 -40 28 -68t68 -28h704q40 0 68 28t28 68z M864 640q14 0 23 -9t9 -23v-64q0 -14 -9 -23t-23 -9h-576q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h576z" />
+<glyph unicode="&#xf118;" d="M1134 461q-37 -121 -138 -195t-228 -74t-228 74t-138 195q-8 25 4 48.5t38 31.5q25 8 48.5 -4t31.5 -38q25 -80 92.5 -129.5t151.5 -49.5t151.5 49.5t92.5 129.5q8 26 32 38t49 4t37 -31.5t4 -48.5zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5 t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-2 [...]
+<glyph unicode="&#xf119;" d="M1134 307q8 -25 -4 -48.5t-37 -31.5t-49 4t-32 38q-25 80 -92.5 129.5t-151.5 49.5t-151.5 -49.5t-92.5 -129.5q-8 -26 -31.5 -38t-48.5 -4q-26 8 -38 31.5t-4 48.5q37 121 138 195t228 74t228 -74t138 -195zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5 t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248 [...]
+<glyph unicode="&#xf11a;" d="M1152 448q0 -26 -19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h640q26 0 45 -19t19 -45zM640 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1152 896q0 -53 -37.5 -90.5t-90.5 -37.5t-90.5 37.5 t-37.5 90.5t37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136. [...]
+<glyph unicode="&#xf11b;" horiz-adv-x="1920" d="M832 448v128q0 14 -9 23t-23 9h-192v192q0 14 -9 23t-23 9h-128q-14 0 -23 -9t-9 -23v-192h-192q-14 0 -23 -9t-9 -23v-128q0 -14 9 -23t23 -9h192v-192q0 -14 9 -23t23 -9h128q14 0 23 9t9 23v192h192q14 0 23 9t9 23zM1408 384q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5 t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1664 640q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM1920 512q0 -212 -150 -362t-362 -1 [...]
+<glyph unicode="&#xf11c;" horiz-adv-x="1920" d="M384 368v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM512 624v-96q0 -16 -16 -16h-224q-16 0 -16 16v96q0 16 16 16h224q16 0 16 -16zM384 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM1408 368v-96q0 -16 -16 -16 h-864q-16 0 -16 16v96q0 16 16 16h864q16 0 16 -16zM768 624v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16zM640 880v-96q0 -16 -16 -16h-96q-16 0 -16 16v96q0 16 16 16h96q16 0 16 -16z [...]
+<glyph unicode="&#xf11d;" horiz-adv-x="1792" d="M1664 491v616q-169 -91 -306 -91q-82 0 -145 32q-100 49 -184 76.5t-178 27.5q-173 0 -403 -127v-599q245 113 433 113q55 0 103.5 -7.5t98 -26t77 -31t82.5 -39.5l28 -14q44 -22 101 -22q120 0 293 92zM320 1280q0 -35 -17.5 -64t-46.5 -46v-1266q0 -14 -9 -23t-23 -9 h-64q-14 0 -23 9t-9 23v1266q-29 17 -46.5 46t-17.5 64q0 53 37.5 90.5t90.5 37.5t90.5 -37.5t37.5 -90.5zM1792 1216v-763q0 -39 -35 -57q-10 -5 -17 -9q-218 -116 -369 -116q-88 0 -158 35l-28 14q-64 33 -9 [...]
+<glyph unicode="&#xf11e;" horiz-adv-x="1792" d="M832 536v192q-181 -16 -384 -117v-185q205 96 384 110zM832 954v197q-172 -8 -384 -126v-189q215 111 384 118zM1664 491v184q-235 -116 -384 -71v224q-20 6 -39 15q-5 3 -33 17t-34.5 17t-31.5 15t-34.5 15.5t-32.5 13t-36 12.5t-35 8.5t-39.5 7.5t-39.5 4t-44 2 q-23 0 -49 -3v-222h19q102 0 192.5 -29t197.5 -82q19 -9 39 -15v-188q42 -17 91 -17q120 0 293 92zM1664 918v189q-169 -91 -306 -91q-45 0 -78 8v-196q148 -42 384 90zM320 1280q0 -35 -17.5 -64t-46.5 -46v-1266q [...]
+<glyph unicode="&#xf120;" horiz-adv-x="1664" d="M585 553l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10t23 -10l466 -466q10 -10 10 -23t-10 -23zM1664 96v-64q0 -14 -9 -23t-23 -9h-960q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h960q14 0 23 -9 t9 -23z" />
+<glyph unicode="&#xf121;" horiz-adv-x="1920" d="M617 137l-50 -50q-10 -10 -23 -10t-23 10l-466 466q-10 10 -10 23t10 23l466 466q10 10 23 10t23 -10l50 -50q10 -10 10 -23t-10 -23l-393 -393l393 -393q10 -10 10 -23t-10 -23zM1208 1204l-373 -1291q-4 -13 -15.5 -19.5t-23.5 -2.5l-62 17q-13 4 -19.5 15.5t-2.5 24.5 l373 1291q4 13 15.5 19.5t23.5 2.5l62 -17q13 -4 19.5 -15.5t2.5 -24.5zM1865 553l-466 -466q-10 -10 -23 -10t-23 10l-50 50q-10 10 -10 23t10 23l393 393l-393 393q-10 10 -10 23t10 23l50 50q10 10 23 10 [...]
+<glyph unicode="&#xf122;" horiz-adv-x="1792" d="M640 454v-70q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-512 512q-19 19 -19 45t19 45l512 512q29 31 70 14q39 -17 39 -59v-69l-397 -398q-19 -19 -19 -45t19 -45zM1792 416q0 -58 -17 -133.5t-38.5 -138t-48 -125t-40.5 -90.5l-20 -40q-8 -17 -28 -17q-6 0 -9 1 q-25 8 -23 34q43 400 -106 565q-64 71 -170.5 110.5t-267.5 52.5v-251q0 -42 -39 -59q-13 -5 -25 -5q-27 0 -45 19l-512 512q-19 19 -19 45t19 45l512 512q29 31 70 14q39 -17 39 -59v-262q411 -28 599 -221q169 - [...]
+<glyph unicode="&#xf123;" horiz-adv-x="1664" d="M1186 579l257 250l-356 52l-66 10l-30 60l-159 322v-963l59 -31l318 -168l-60 355l-12 66zM1638 841l-363 -354l86 -500q5 -33 -6 -51.5t-34 -18.5q-17 0 -40 12l-449 236l-449 -236q-23 -12 -40 -12q-23 0 -34 18.5t-6 51.5l86 500l-364 354q-32 32 -23 59.5t54 34.5 l502 73l225 455q20 41 49 41q28 0 49 -41l225 -455l502 -73q45 -7 54 -34.5t-24 -59.5z" />
+<glyph unicode="&#xf124;" horiz-adv-x="1408" d="M1401 1187l-640 -1280q-17 -35 -57 -35q-5 0 -15 2q-22 5 -35.5 22.5t-13.5 39.5v576h-576q-22 0 -39.5 13.5t-22.5 35.5t4 42t29 30l1280 640q13 7 29 7q27 0 45 -19q15 -14 18.5 -34.5t-6.5 -39.5z" />
+<glyph unicode="&#xf125;" horiz-adv-x="1664" d="M557 256h595v595zM512 301l595 595h-595v-595zM1664 224v-192q0 -14 -9 -23t-23 -9h-224v-224q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v224h-864q-14 0 -23 9t-9 23v864h-224q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h224v224q0 14 9 23t23 9h192q14 0 23 -9t9 -23 v-224h851l246 247q10 9 23 9t23 -9q9 -10 9 -23t-9 -23l-247 -246v-851h224q14 0 23 -9t9 -23z" />
+<glyph unicode="&#xf126;" horiz-adv-x="1024" d="M288 64q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM288 1216q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM928 1088q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM1024 1088q0 -52 -26 -96.5t-70 -69.5 q-2 -287 -226 -414q-68 -38 -203 -81q-128 -40 -169.5 -71t-41.5 -100v-26q44 -25 70 -69.5t26 -96.5q0 -80 -56 -136t-136 -56t-136 56t-56 136q0 52 26 96.5t70 69.5v820q-44 25 -70 69.5t-26 96.5q0 80 56 13 [...]
+<glyph unicode="&#xf127;" horiz-adv-x="1664" d="M439 265l-256 -256q-10 -9 -23 -9q-12 0 -23 9q-9 10 -9 23t9 23l256 256q10 9 23 9t23 -9q9 -10 9 -23t-9 -23zM608 224v-320q0 -14 -9 -23t-23 -9t-23 9t-9 23v320q0 14 9 23t23 9t23 -9t9 -23zM384 448q0 -14 -9 -23t-23 -9h-320q-14 0 -23 9t-9 23t9 23t23 9h320 q14 0 23 -9t9 -23zM1648 320q0 -120 -85 -203l-147 -146q-83 -83 -203 -83q-121 0 -204 85l-334 335q-21 21 -42 56l239 18l273 -274q27 -27 68 -27.5t68 26.5l147 146q28 28 28 67q0 40 -28 68l-274 275l18 239 [...]
+<glyph unicode="&#xf128;" horiz-adv-x="1024" d="M704 280v-240q0 -16 -12 -28t-28 -12h-240q-16 0 -28 12t-12 28v240q0 16 12 28t28 12h240q16 0 28 -12t12 -28zM1020 880q0 -54 -15.5 -101t-35 -76.5t-55 -59.5t-57.5 -43.5t-61 -35.5q-41 -23 -68.5 -65t-27.5 -67q0 -17 -12 -32.5t-28 -15.5h-240q-15 0 -25.5 18.5 t-10.5 37.5v45q0 83 65 156.5t143 108.5q59 27 84 56t25 76q0 42 -46.5 74t-107.5 32q-65 0 -108 -29q-35 -25 -107 -115q-13 -16 -31 -16q-12 0 -25 8l-164 125q-13 10 -15.5 25t5.5 28q160 266 464 266q80 0 [...]
+<glyph unicode="&#xf129;" horiz-adv-x="640" d="M640 192v-128q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h64v384h-64q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h384q26 0 45 -19t19 -45v-576h64q26 0 45 -19t19 -45zM512 1344v-192q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v192 q0 26 19 45t45 19h256q26 0 45 -19t19 -45z" />
+<glyph unicode="&#xf12a;" horiz-adv-x="640" d="M512 288v-224q0 -26 -19 -45t-45 -19h-256q-26 0 -45 19t-19 45v224q0 26 19 45t45 19h256q26 0 45 -19t19 -45zM542 1344l-28 -768q-1 -26 -20.5 -45t-45.5 -19h-256q-26 0 -45.5 19t-20.5 45l-28 768q-1 26 17.5 45t44.5 19h320q26 0 44.5 -19t17.5 -45z" />
+<glyph unicode="&#xf12b;" d="M897 167v-167h-248l-159 252l-24 42q-8 9 -11 21h-3l-9 -21q-10 -20 -25 -44l-155 -250h-258v167h128l197 291l-185 272h-137v168h276l139 -228q2 -4 23 -42q8 -9 11 -21h3q3 9 11 21l25 42l140 228h257v-168h-125l-184 -267l204 -296h109zM1534 846v-206h-514l-3 27 q-4 28 -4 46q0 64 26 117t65 86.5t84 65t84 54.5t65 54t26 64q0 38 -29.5 62.5t-70.5 24.5q-51 0 -97 -39q-14 -11 -36 -38l-105 92q26 37 63 66q83 65 188 65q110 0 178 -59.5t68 -158.5q0 -56 -24.5 -103t-62 -76.5t-81.5 -58.5t- [...]
+<glyph unicode="&#xf12c;" d="M897 167v-167h-248l-159 252l-24 42q-8 9 -11 21h-3l-9 -21q-10 -20 -25 -44l-155 -250h-258v167h128l197 291l-185 272h-137v168h276l139 -228q2 -4 23 -42q8 -9 11 -21h3q3 9 11 21l25 42l140 228h257v-168h-125l-184 -267l204 -296h109zM1536 -50v-206h-514l-4 27 q-3 45 -3 46q0 64 26 117t65 86.5t84 65t84 54.5t65 54t26 64q0 38 -29.5 62.5t-70.5 24.5q-51 0 -97 -39q-14 -11 -36 -38l-105 92q26 37 63 66q80 65 188 65q110 0 178 -59.5t68 -158.5q0 -66 -34.5 -118.5t-84 -86t-99.5 -62.5t- [...]
+<glyph unicode="&#xf12d;" horiz-adv-x="1920" d="M896 128l336 384h-768l-336 -384h768zM1909 1205q15 -34 9.5 -71.5t-30.5 -65.5l-896 -1024q-38 -44 -96 -44h-768q-38 0 -69.5 20.5t-47.5 54.5q-15 34 -9.5 71.5t30.5 65.5l896 1024q38 44 96 44h768q38 0 69.5 -20.5t47.5 -54.5z" />
+<glyph unicode="&#xf12e;" horiz-adv-x="1664" d="M1664 438q0 -81 -44.5 -135t-123.5 -54q-41 0 -77.5 17.5t-59 38t-56.5 38t-71 17.5q-110 0 -110 -124q0 -39 16 -115t15 -115v-5q-22 0 -33 -1q-34 -3 -97.5 -11.5t-115.5 -13.5t-98 -5q-61 0 -103 26.5t-42 83.5q0 37 17.5 71t38 56.5t38 59t17.5 77.5q0 79 -54 123.5 t-135 44.5q-84 0 -143 -45.5t-59 -127.5q0 -43 15 -83t33.5 -64.5t33.5 -53t15 -50.5q0 -45 -46 -89q-37 -35 -117 -35q-95 0 -245 24q-9 2 -27.5 4t-27.5 4l-13 2q-1 0 -3 1q-2 0 -2 1v1024q2 -1 17.5 -3.5t [...]
+<glyph unicode="&#xf130;" horiz-adv-x="1152" d="M1152 832v-128q0 -221 -147.5 -384.5t-364.5 -187.5v-132h256q26 0 45 -19t19 -45t-19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h256v132q-217 24 -364.5 187.5t-147.5 384.5v128q0 26 19 45t45 19t45 -19t19 -45v-128q0 -185 131.5 -316.5t316.5 -131.5 t316.5 131.5t131.5 316.5v128q0 26 19 45t45 19t45 -19t19 -45zM896 1216v-512q0 -132 -94 -226t-226 -94t-226 94t-94 226v512q0 132 94 226t226 94t226 -94t94 -226z" />
+<glyph unicode="&#xf131;" horiz-adv-x="1408" d="M271 591l-101 -101q-42 103 -42 214v128q0 26 19 45t45 19t45 -19t19 -45v-128q0 -53 15 -113zM1385 1193l-361 -361v-128q0 -132 -94 -226t-226 -94q-55 0 -109 19l-96 -96q97 -51 205 -51q185 0 316.5 131.5t131.5 316.5v128q0 26 19 45t45 19t45 -19t19 -45v-128 q0 -221 -147.5 -384.5t-364.5 -187.5v-132h256q26 0 45 -19t19 -45t-19 -45t-45 -19h-640q-26 0 -45 19t-19 45t19 45t45 19h256v132q-125 13 -235 81l-254 -254q-10 -10 -23 -10t-23 10l-82 82q-10 10 -10 23t10 [...]
+<glyph unicode="&#xf132;" horiz-adv-x="1280" d="M1088 576v640h-448v-1137q119 63 213 137q235 184 235 360zM1280 1344v-768q0 -86 -33.5 -170.5t-83 -150t-118 -127.5t-126.5 -103t-121 -77.5t-89.5 -49.5t-42.5 -20q-12 -6 -26 -6t-26 6q-16 7 -42.5 20t-89.5 49.5t-121 77.5t-126.5 103t-118 127.5t-83 150 t-33.5 170.5v768q0 26 19 45t45 19h1152q26 0 45 -19t19 -45z" />
+<glyph unicode="&#xf133;" horiz-adv-x="1664" d="M128 -128h1408v1024h-1408v-1024zM512 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1280 1088v288q0 14 -9 23t-23 9h-64q-14 0 -23 -9t-9 -23v-288q0 -14 9 -23t23 -9h64q14 0 23 9t9 23zM1664 1152v-1280 q0 -52 -38 -90t-90 -38h-1408q-52 0 -90 38t-38 90v1280q0 52 38 90t90 38h128v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h384v96q0 66 47 113t113 47h64q66 0 113 -47t47 -113v-96h128q52 0 90 -38t38 -90z" />
+<glyph unicode="&#xf134;" horiz-adv-x="1408" d="M512 1344q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1408 1376v-320q0 -16 -12 -25q-8 -7 -20 -7q-4 0 -7 1l-448 96q-11 2 -18 11t-7 20h-256v-102q111 -23 183.5 -111t72.5 -203v-800q0 -26 -19 -45t-45 -19h-512q-26 0 -45 19t-19 45v800 q0 106 62.5 190.5t161.5 114.5v111h-32q-59 0 -115 -23.5t-91.5 -53t-66 -66.5t-40.5 -53.5t-14 -24.5q-17 -35 -57 -35q-16 0 -29 7q-23 12 -31.5 37t3.5 49q5 10 14.5 26t37.5 53.5t60.5 70t85 67t108.5 52.5q-2 [...]
+<glyph unicode="&#xf135;" horiz-adv-x="1664" d="M1440 1088q0 40 -28 68t-68 28t-68 -28t-28 -68t28 -68t68 -28t68 28t28 68zM1664 1376q0 -249 -75.5 -430.5t-253.5 -360.5q-81 -80 -195 -176l-20 -379q-2 -16 -16 -26l-384 -224q-7 -4 -16 -4q-12 0 -23 9l-64 64q-13 14 -8 32l85 276l-281 281l-276 -85q-3 -1 -9 -1 q-14 0 -23 9l-64 64q-17 19 -5 39l224 384q10 14 26 16l379 20q96 114 176 195q188 187 358 258t431 71q14 0 24 -9.5t10 -22.5z" />
+<glyph unicode="&#xf136;" horiz-adv-x="1792" d="M1708 881l-188 -881h-304l181 849q4 21 1 43q-4 20 -16 35q-10 14 -28 24q-18 9 -40 9h-197l-205 -960h-303l204 960h-304l-205 -960h-304l272 1280h1139q157 0 245 -118q86 -116 52 -281z" />
+<glyph unicode="&#xf137;" d="M909 141l102 102q19 19 19 45t-19 45l-307 307l307 307q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45t19 -45l454 -454q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf138;" d="M717 141l454 454q19 19 19 45t-19 45l-454 454q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l307 -307l-307 -307q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf139;" d="M1165 397l102 102q19 19 19 45t-19 45l-454 454q-19 19 -45 19t-45 -19l-454 -454q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19l307 307l307 -307q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf13a;" d="M813 237l454 454q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-307 -307l-307 307q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l454 -454q19 -19 45 -19t45 19zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5 t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf13b;" horiz-adv-x="1408" d="M1130 939l16 175h-884l47 -534h612l-22 -228l-197 -53l-196 53l-13 140h-175l22 -278l362 -100h4v1l359 99l50 544h-644l-15 181h674zM0 1408h1408l-128 -1438l-578 -162l-574 162z" />
+<glyph unicode="&#xf13c;" horiz-adv-x="1792" d="M275 1408h1505l-266 -1333l-804 -267l-698 267l71 356h297l-29 -147l422 -161l486 161l68 339h-1208l58 297h1209l38 191h-1208z" />
+<glyph unicode="&#xf13d;" horiz-adv-x="1792" d="M960 1280q0 26 -19 45t-45 19t-45 -19t-19 -45t19 -45t45 -19t45 19t19 45zM1792 352v-352q0 -22 -20 -30q-8 -2 -12 -2q-13 0 -23 9l-93 93q-119 -143 -318.5 -226.5t-429.5 -83.5t-429.5 83.5t-318.5 226.5l-93 -93q-9 -9 -23 -9q-4 0 -12 2q-20 8 -20 30v352 q0 14 9 23t23 9h352q22 0 30 -20q8 -19 -7 -35l-100 -100q67 -91 189.5 -153.5t271.5 -82.5v647h-192q-26 0 -45 19t-19 45v128q0 26 19 45t45 19h192v163q-58 34 -93 92.5t-35 128.5q0 106 75 181t181 75t181 -75t75 [...]
+<glyph unicode="&#xf13e;" horiz-adv-x="1152" d="M1056 768q40 0 68 -28t28 -68v-576q0 -40 -28 -68t-68 -28h-960q-40 0 -68 28t-28 68v576q0 40 28 68t68 28h32v320q0 185 131.5 316.5t316.5 131.5t316.5 -131.5t131.5 -316.5q0 -26 -19 -45t-45 -19h-64q-26 0 -45 19t-19 45q0 106 -75 181t-181 75t-181 -75t-75 -181 v-320h736z" />
+<glyph unicode="&#xf140;" d="M1024 640q0 -106 -75 -181t-181 -75t-181 75t-75 181t75 181t181 75t181 -75t75 -181zM1152 640q0 159 -112.5 271.5t-271.5 112.5t-271.5 -112.5t-112.5 -271.5t112.5 -271.5t271.5 -112.5t271.5 112.5t112.5 271.5zM1280 640q0 -212 -150 -362t-362 -150t-362 150 t-150 362t150 362t362 150t362 -150t150 -362zM1408 640q0 130 -51 248.5t-136.5 204t-204 136.5t-248.5 51t-248.5 -51t-204 -136.5t-136.5 -204t-51 -248.5t51 -248.5t136.5 -204t204 -136.5t248.5 -51t248.5 51t204 136.5t136.5 2 [...]
+<glyph unicode="&#xf141;" horiz-adv-x="1408" d="M384 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM896 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM1408 800v-192q0 -40 -28 -68t-68 -28h-192 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68z" />
+<glyph unicode="&#xf142;" horiz-adv-x="384" d="M384 288v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM384 800v-192q0 -40 -28 -68t-68 -28h-192q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68zM384 1312v-192q0 -40 -28 -68t-68 -28h-192 q-40 0 -68 28t-28 68v192q0 40 28 68t68 28h192q40 0 68 -28t28 -68z" />
+<glyph unicode="&#xf143;" d="M512 256q0 53 -37.5 90.5t-90.5 37.5t-90.5 -37.5t-37.5 -90.5t37.5 -90.5t90.5 -37.5t90.5 37.5t37.5 90.5zM863 162q-13 232 -177 396t-396 177q-14 1 -24 -9t-10 -23v-128q0 -13 8.5 -22t21.5 -10q154 -11 264 -121t121 -264q1 -13 10 -21.5t22 -8.5h128q13 0 23 10 t9 24zM1247 161q-5 154 -56 297.5t-139.5 260t-205 205t-260 139.5t-297.5 56q-14 1 -23 -9q-10 -10 -10 -23v-128q0 -13 9 -22t22 -10q204 -7 378 -111.5t278.5 -278.5t111.5 -378q1 -13 10 -22t22 -9h128q13 0 23 10q11 9 9 23z [...]
+<glyph unicode="&#xf144;" d="M768 1408q209 0 385.5 -103t279.5 -279.5t103 -385.5t-103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103zM1152 585q32 18 32 55t-32 55l-544 320q-31 19 -64 1q-32 -19 -32 -56v-640q0 -37 32 -56 q16 -8 32 -8q17 0 32 9z" />
+<glyph unicode="&#xf145;" horiz-adv-x="1792" d="M1024 1084l316 -316l-572 -572l-316 316zM813 105l618 618q19 19 19 45t-19 45l-362 362q-18 18 -45 18t-45 -18l-618 -618q-19 -19 -19 -45t19 -45l362 -362q18 -18 45 -18t45 18zM1702 742l-907 -908q-37 -37 -90.5 -37t-90.5 37l-126 126q56 56 56 136t-56 136 t-136 56t-136 -56l-125 126q-37 37 -37 90.5t37 90.5l907 906q37 37 90.5 37t90.5 -37l125 -125q-56 -56 -56 -136t56 -136t136 -56t136 56l126 -125q37 -37 37 -90.5t-37 -90.5z" />
+<glyph unicode="&#xf146;" d="M1280 576v128q0 26 -19 45t-45 19h-896q-26 0 -45 -19t-19 -45v-128q0 -26 19 -45t45 -19h896q26 0 45 19t19 45zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5 t84.5 -203.5z" />
+<glyph unicode="&#xf147;" horiz-adv-x="1408" d="M1152 736v-64q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h832q14 0 23 -9t9 -23zM1280 288v832q0 66 -47 113t-113 47h-832q-66 0 -113 -47t-47 -113v-832q0 -66 47 -113t113 -47h832q66 0 113 47t47 113zM1408 1120v-832q0 -119 -84.5 -203.5 t-203.5 -84.5h-832q-119 0 -203.5 84.5t-84.5 203.5v832q0 119 84.5 203.5t203.5 84.5h832q119 0 203.5 -84.5t84.5 -203.5z" />
+<glyph unicode="&#xf148;" horiz-adv-x="1024" d="M1018 933q-18 -37 -58 -37h-192v-864q0 -14 -9 -23t-23 -9h-704q-21 0 -29 18q-8 20 4 35l160 192q9 11 25 11h320v640h-192q-40 0 -58 37q-17 37 9 68l320 384q18 22 49 22t49 -22l320 -384q27 -32 9 -68z" />
+<glyph unicode="&#xf149;" horiz-adv-x="1024" d="M32 1280h704q13 0 22.5 -9.5t9.5 -23.5v-863h192q40 0 58 -37t-9 -69l-320 -384q-18 -22 -49 -22t-49 22l-320 384q-26 31 -9 69q18 37 58 37h192v640h-320q-14 0 -25 11l-160 192q-13 14 -4 34q9 19 29 19z" />
+<glyph unicode="&#xf14a;" d="M685 237l614 614q19 19 19 45t-19 45l-102 102q-19 19 -45 19t-45 -19l-467 -467l-211 211q-19 19 -45 19t-45 -19l-102 -102q-19 -19 -19 -45t19 -45l358 -358q19 -19 45 -19t45 19zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5 t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
+<glyph unicode="&#xf14b;" d="M404 428l152 -152l-52 -52h-56v96h-96v56zM818 818q14 -13 -3 -30l-291 -291q-17 -17 -30 -3q-14 13 3 30l291 291q17 17 30 3zM544 128l544 544l-288 288l-544 -544v-288h288zM1152 736l92 92q28 28 28 68t-28 68l-152 152q-28 28 -68 28t-68 -28l-92 -92zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
+<glyph unicode="&#xf14c;" d="M1280 608v480q0 26 -19 45t-45 19h-480q-42 0 -59 -39q-17 -41 14 -70l144 -144l-534 -534q-19 -19 -19 -45t19 -45l102 -102q19 -19 45 -19t45 19l534 534l144 -144q18 -19 45 -19q12 0 25 5q39 17 39 59zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960 q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
+<glyph unicode="&#xf14d;" d="M1005 435l352 352q19 19 19 45t-19 45l-352 352q-30 31 -69 14q-40 -17 -40 -59v-160q-119 0 -216 -19.5t-162.5 -51t-114 -79t-76.5 -95.5t-44.5 -109t-21.5 -111.5t-5 -110.5q0 -181 167 -404q10 -12 25 -12q7 0 13 3q22 9 19 33q-44 354 62 473q46 52 130 75.5 t224 23.5v-160q0 -42 40 -59q12 -5 24 -5q26 0 45 19zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
+<glyph unicode="&#xf14e;" d="M640 448l256 128l-256 128v-256zM1024 1039v-542l-512 -256v542zM1312 640q0 148 -73 273t-198 198t-273 73t-273 -73t-198 -198t-73 -273t73 -273t198 -198t273 -73t273 73t198 198t73 273zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf150;" d="M1145 861q18 -35 -5 -66l-320 -448q-19 -27 -52 -27t-52 27l-320 448q-23 31 -5 66q17 35 57 35h640q40 0 57 -35zM1280 160v960q0 13 -9.5 22.5t-22.5 9.5h-960q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
+<glyph unicode="&#xf151;" d="M1145 419q-17 -35 -57 -35h-640q-40 0 -57 35q-18 35 5 66l320 448q19 27 52 27t52 -27l320 -448q23 -31 5 -66zM1280 160v960q0 13 -9.5 22.5t-22.5 9.5h-960q-13 0 -22.5 -9.5t-9.5 -22.5v-960q0 -13 9.5 -22.5t22.5 -9.5h960q13 0 22.5 9.5t9.5 22.5zM1536 1120v-960 q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
+<glyph unicode="&#xf152;" d="M1088 640q0 -33 -27 -52l-448 -320q-31 -23 -66 -5q-35 17 -35 57v640q0 40 35 57q35 18 66 -5l448 -320q27 -19 27 -52zM1280 160v960q0 14 -9 23t-23 9h-960q-14 0 -23 -9t-9 -23v-960q0 -14 9 -23t23 -9h960q14 0 23 9t9 23zM1536 1120v-960q0 -119 -84.5 -203.5 t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
+<glyph unicode="&#xf153;" horiz-adv-x="1024" d="M976 229l35 -159q3 -12 -3 -22.5t-17 -14.5l-5 -1q-4 -2 -10.5 -3.5t-16 -4.5t-21.5 -5.5t-25.5 -5t-30 -5t-33.5 -4.5t-36.5 -3t-38.5 -1q-234 0 -409 130.5t-238 351.5h-95q-13 0 -22.5 9.5t-9.5 22.5v113q0 13 9.5 22.5t22.5 9.5h66q-2 57 1 105h-67q-14 0 -23 9 t-9 23v114q0 14 9 23t23 9h98q67 210 243.5 338t400.5 128q102 0 194 -23q11 -3 20 -15q6 -11 3 -24l-43 -159q-3 -13 -14 -19.5t-24 -2.5l-4 1q-4 1 -11.5 2.5l-17.5 3.5t-22.5 3.5t-26 3t-29 2.5t-29.5 1q-126  [...]
+<glyph unicode="&#xf154;" horiz-adv-x="1024" d="M1020 399v-367q0 -14 -9 -23t-23 -9h-956q-14 0 -23 9t-9 23v150q0 13 9.5 22.5t22.5 9.5h97v383h-95q-14 0 -23 9.5t-9 22.5v131q0 14 9 23t23 9h95v223q0 171 123.5 282t314.5 111q185 0 335 -125q9 -8 10 -20.5t-7 -22.5l-103 -127q-9 -11 -22 -12q-13 -2 -23 7 q-5 5 -26 19t-69 32t-93 18q-85 0 -137 -47t-52 -123v-215h305q13 0 22.5 -9t9.5 -23v-131q0 -13 -9.5 -22.5t-22.5 -9.5h-305v-379h414v181q0 13 9 22.5t23 9.5h162q14 0 23 -9.5t9 -22.5z" />
+<glyph unicode="&#xf155;" horiz-adv-x="1024" d="M978 351q0 -153 -99.5 -263.5t-258.5 -136.5v-175q0 -14 -9 -23t-23 -9h-135q-13 0 -22.5 9.5t-9.5 22.5v175q-66 9 -127.5 31t-101.5 44.5t-74 48t-46.5 37.5t-17.5 18q-17 21 -2 41l103 135q7 10 23 12q15 2 24 -9l2 -2q113 -99 243 -125q37 -8 74 -8q81 0 142.5 43 t61.5 122q0 28 -15 53t-33.5 42t-58.5 37.5t-66 32t-80 32.5q-39 16 -61.5 25t-61.5 26.5t-62.5 31t-56.5 35.5t-53.5 42.5t-43.5 49t-35.5 58t-21 66.5t-8.5 78q0 138 98 242t255 134v180q0 13 9.5 22.5t22.5  [...]
+<glyph unicode="&#xf156;" horiz-adv-x="898" d="M898 1066v-102q0 -14 -9 -23t-23 -9h-168q-23 -144 -129 -234t-276 -110q167 -178 459 -536q14 -16 4 -34q-8 -18 -29 -18h-195q-16 0 -25 12q-306 367 -498 571q-9 9 -9 22v127q0 13 9.5 22.5t22.5 9.5h112q132 0 212.5 43t102.5 125h-427q-14 0 -23 9t-9 23v102 q0 14 9 23t23 9h413q-57 113 -268 113h-145q-13 0 -22.5 9.5t-9.5 22.5v133q0 14 9 23t23 9h832q14 0 23 -9t9 -23v-102q0 -14 -9 -23t-23 -9h-233q47 -61 64 -144h171q14 0 23 -9t9 -23z" />
+<glyph unicode="&#xf157;" horiz-adv-x="1027" d="M603 0h-172q-13 0 -22.5 9t-9.5 23v330h-288q-13 0 -22.5 9t-9.5 23v103q0 13 9.5 22.5t22.5 9.5h288v85h-288q-13 0 -22.5 9t-9.5 23v104q0 13 9.5 22.5t22.5 9.5h214l-321 578q-8 16 0 32q10 16 28 16h194q19 0 29 -18l215 -425q19 -38 56 -125q10 24 30.5 68t27.5 61 l191 420q8 19 29 19h191q17 0 27 -16q9 -14 1 -31l-313 -579h215q13 0 22.5 -9.5t9.5 -22.5v-104q0 -14 -9.5 -23t-22.5 -9h-290v-85h290q13 0 22.5 -9.5t9.5 -22.5v-103q0 -14 -9.5 -23t-22.5 -9h-290v-330q [...]
+<glyph unicode="&#xf158;" horiz-adv-x="1664" d="M1664 352v-32q0 -132 -94 -226t-226 -94h-128q-132 0 -226 94t-94 226v480h-224q-2 -102 -14.5 -190.5t-30.5 -156t-48.5 -126.5t-57 -99.5t-67.5 -77.5t-69.5 -58.5t-74 -44t-69 -32t-65.5 -25.5q-4 -2 -32 -13q-8 -2 -12 -2q-22 0 -30 20l-71 178q-5 13 0 25t17 17 q7 3 20 7.5t18 6.5q31 12 46.5 18.5t44.5 20t45.5 26t42 32.5t40.5 42.5t34.5 53.5t30.5 68.5t22.5 83.5t17 103t6.5 123h-256q-14 0 -23 9t-9 23v160q0 14 9 23t23 9h1216q14 0 23 -9t9 -23v-160q0 -14 -9 -23t [...]
+<glyph unicode="&#xf159;" horiz-adv-x="1792" d="M514 341l81 299h-159l75 -300q1 -1 1 -3t1 -3q0 1 0.5 3.5t0.5 3.5zM630 768l35 128h-292l32 -128h225zM822 768h139l-35 128h-70zM1271 340l78 300h-162l81 -299q0 -1 0.5 -3.5t1.5 -3.5q0 1 0.5 3t0.5 3zM1382 768l33 128h-297l34 -128h230zM1792 736v-64q0 -14 -9 -23 t-23 -9h-213l-164 -616q-7 -24 -31 -24h-159q-24 0 -31 24l-166 616h-209l-167 -616q-7 -24 -31 -24h-159q-11 0 -19.5 7t-10.5 17l-160 616h-208q-14 0 -23 9t-9 23v64q0 14 9 23t23 9h175l-33 128h-142q-1 [...]
+<glyph unicode="&#xf15a;" horiz-adv-x="1280" d="M1167 896q18 -182 -131 -258q117 -28 175 -103t45 -214q-7 -71 -32.5 -125t-64.5 -89t-97 -58.5t-121.5 -34.5t-145.5 -15v-255h-154v251q-80 0 -122 1v-252h-154v255q-18 0 -54 0.5t-55 0.5h-200l31 183h111q50 0 58 51v402h16q-6 1 -16 1v287q-13 68 -89 68h-111v164 l212 -1q64 0 97 1v252h154v-247q82 2 122 2v245h154v-252q79 -7 140 -22.5t113 -45t82.5 -78t36.5 -114.5zM952 351q0 36 -15 64t-37 46t-57.5 30.5t-65.5 18.5t-74 9t-69 3t-64.5 -1t-47.5 -1v-338q8 0 37 -0 [...]
+<glyph unicode="&#xf15b;" horiz-adv-x="1280" d="M1280 768v-800q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28t-28 68v1344q0 40 28 68t68 28h544v-544q0 -40 28 -68t68 -28h544zM1277 896h-509v509q82 -15 132 -65l312 -312q50 -50 65 -132z" />
+<glyph unicode="&#xf15c;" horiz-adv-x="1280" d="M1024 160v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h704q14 0 23 9t9 23zM1024 416v64q0 14 -9 23t-23 9h-704q-14 0 -23 -9t-9 -23v-64q0 -14 9 -23t23 -9h704q14 0 23 9t9 23zM1280 768v-800q0 -40 -28 -68t-68 -28h-1088q-40 0 -68 28 t-28 68v1344q0 40 28 68t68 28h544v-544q0 -40 28 -68t68 -28h544zM1277 896h-509v509q82 -15 132 -65l312 -312q50 -50 65 -132z" />
+<glyph unicode="&#xf15d;" horiz-adv-x="1664" d="M1191 1128h177l-72 218l-12 47q-2 16 -2 20h-4l-3 -20q0 -1 -3.5 -18t-7.5 -29zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1572 -23 v-233h-584v90l369 529q12 18 21 27l11 9v3q-2 0 -6.5 -0.5t-7.5 -0.5q-12 -3 -30 -3h-232v-115h-120v229h567v-89l-369 -530q-6 -8 -21 -26l-11 -11v-2l14 2q9 2 30 2h248v119h121zM1661 874v-106h-288v106h75l- [...]
+<glyph unicode="&#xf15e;" horiz-adv-x="1664" d="M1191 104h177l-72 218l-12 47q-2 16 -2 20h-4l-3 -20q0 -1 -3.5 -18t-7.5 -29zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1661 -150 v-106h-288v106h75l-47 144h-243l-47 -144h75v-106h-287v106h70l230 662h162l230 -662h70zM1572 1001v-233h-584v90l369 529q12 18 21 27l11 9v3q-2 0 -6.5 -0.5t-7.5 -0.5q-12 -3 -30 -3h-232v-115h-120v229h567 [...]
+<glyph unicode="&#xf160;" horiz-adv-x="1792" d="M736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23zM1792 -32v-192q0 -14 -9 -23t-23 -9h-832q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h832 q14 0 23 -9t9 -23zM1600 480v-192q0 -14 -9 -23t-23 -9h-640q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h640q14 0 23 -9t9 -23zM1408 992v-192q0 -14 -9 -23t-23 -9h-448q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h448q14  [...]
+<glyph unicode="&#xf161;" horiz-adv-x="1792" d="M1216 -32v-192q0 -14 -9 -23t-23 -9h-256q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h256q14 0 23 -9t9 -23zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192 q14 0 23 -9t9 -23zM1408 480v-192q0 -14 -9 -23t-23 -9h-448q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h448q14 0 23 -9t9 -23zM1600 992v-192q0 -14 -9 -23t-23 -9h-640q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h640q14  [...]
+<glyph unicode="&#xf162;" d="M1346 223q0 63 -44 116t-103 53q-52 0 -83 -37t-31 -94t36.5 -95t104.5 -38q50 0 85 27t35 68zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9t9 -23 zM1486 165q0 -62 -13 -121.5t-41 -114t-68 -95.5t-98.5 -65.5t-127.5 -24.5q-62 0 -108 16q-24 8 -42 15l39 113q15 -7 31 -11q37 -13 75 -13q84 0 134.5 58.5t66.5 145.5h-2q-21 -23 -61.5 -37t-84.5 -14q-106 0 -173 71.5t-67 [...]
+<glyph unicode="&#xf163;" d="M1346 1247q0 63 -44 116t-103 53q-52 0 -83 -37t-31 -94t36.5 -95t104.5 -38q50 0 85 27t35 68zM736 96q0 -12 -10 -24l-319 -319q-10 -9 -23 -9q-12 0 -23 9l-320 320q-15 16 -7 35q8 20 30 20h192v1376q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1376h192q14 0 23 -9 t9 -23zM1456 -142v-114h-469v114h167v432q0 7 0.5 19t0.5 17v16h-2l-7 -12q-8 -13 -26 -31l-62 -58l-82 86l192 185h123v-654h165zM1486 1189q0 -62 -13 -121.5t-41 -114t-68 -95.5t-98.5 -65.5t-127.5 -24.5q-62 0 -108 16q-24 8 -4 [...]
+<glyph unicode="&#xf164;" horiz-adv-x="1664" d="M256 192q0 26 -19 45t-45 19q-27 0 -45.5 -19t-18.5 -45q0 -27 18.5 -45.5t45.5 -18.5q26 0 45 18.5t19 45.5zM416 704v-640q0 -26 -19 -45t-45 -19h-288q-26 0 -45 19t-19 45v640q0 26 19 45t45 19h288q26 0 45 -19t19 -45zM1600 704q0 -86 -55 -149q15 -44 15 -76 q3 -76 -43 -137q17 -56 0 -117q-15 -57 -54 -94q9 -112 -49 -181q-64 -76 -197 -78h-36h-76h-17q-66 0 -144 15.5t-121.5 29t-120.5 39.5q-123 43 -158 44q-26 1 -45 19.5t-19 44.5v641q0 25 18 43.5t43 20.5q24  [...]
+<glyph unicode="&#xf165;" horiz-adv-x="1664" d="M256 960q0 -26 -19 -45t-45 -19q-27 0 -45.5 19t-18.5 45q0 27 18.5 45.5t45.5 18.5q26 0 45 -18.5t19 -45.5zM416 448v640q0 26 -19 45t-45 19h-288q-26 0 -45 -19t-19 -45v-640q0 -26 19 -45t45 -19h288q26 0 45 19t19 45zM1545 597q55 -61 55 -149q-1 -78 -57.5 -135 t-134.5 -57h-277q4 -14 8 -24t11 -22t10 -18q18 -37 27 -57t19 -58.5t10 -76.5q0 -24 -0.5 -39t-5 -45t-12 -50t-24 -45t-40 -40.5t-60 -26t-82.5 -10.5q-26 0 -45 19q-20 20 -34 50t-19.5 52t-12.5 61q-9 42 [...]
+<glyph unicode="&#xf166;" d="M919 233v157q0 50 -29 50q-17 0 -33 -16v-224q16 -16 33 -16q29 0 29 49zM1103 355h66v34q0 51 -33 51t-33 -51v-34zM532 621v-70h-80v-423h-74v423h-78v70h232zM733 495v-367h-67v40q-39 -45 -76 -45q-33 0 -42 28q-6 16 -6 54v290h66v-270q0 -24 1 -26q1 -15 15 -15 q20 0 42 31v280h67zM985 384v-146q0 -52 -7 -73q-12 -42 -53 -42q-35 0 -68 41v-36h-67v493h67v-161q32 40 68 40q41 0 53 -42q7 -21 7 -74zM1236 255v-9q0 -29 -2 -43q-3 -22 -15 -40q-27 -40 -80 -40q-52 0 -81 38q-21 27 -21 86 [...]
+<glyph unicode="&#xf167;" d="M971 292v-211q0 -67 -39 -67q-23 0 -45 22v301q22 22 45 22q39 0 39 -67zM1309 291v-46h-90v46q0 68 45 68t45 -68zM343 509h107v94h-312v-94h105v-569h100v569zM631 -60h89v494h-89v-378q-30 -42 -57 -42q-18 0 -21 21q-1 3 -1 35v364h-89v-391q0 -49 8 -73 q12 -37 58 -37q48 0 102 61v-54zM1060 88v197q0 73 -9 99q-17 56 -71 56q-50 0 -93 -54v217h-89v-663h89v48q45 -55 93 -55q54 0 71 55q9 27 9 100zM1398 98v13h-91q0 -51 -2 -61q-7 -36 -40 -36q-46 0 -46 69v87h179v103q0 79 -27 116q-39  [...]
+<glyph unicode="&#xf168;" horiz-adv-x="1408" d="M597 869q-10 -18 -257 -456q-27 -46 -65 -46h-239q-21 0 -31 17t0 36l253 448q1 0 0 1l-161 279q-12 22 -1 37q9 15 32 15h239q40 0 66 -45zM1403 1511q11 -16 0 -37l-528 -934v-1l336 -615q11 -20 1 -37q-10 -15 -32 -15h-239q-42 0 -66 45l-339 622q18 32 531 942 q25 45 64 45h241q22 0 31 -15z" />
+<glyph unicode="&#xf169;" d="M685 771q0 1 -126 222q-21 34 -52 34h-184q-18 0 -26 -11q-7 -12 1 -29l125 -216v-1l-196 -346q-9 -14 0 -28q8 -13 24 -13h185q31 0 50 36zM1309 1268q-7 12 -24 12h-187q-30 0 -49 -35l-411 -729q1 -2 262 -481q20 -35 52 -35h184q18 0 25 12q8 13 -1 28l-260 476v1 l409 723q8 16 0 28zM1536 1120v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
+<glyph unicode="&#xf16a;" horiz-adv-x="1792" d="M1280 640q0 37 -30 54l-512 320q-31 20 -65 2q-33 -18 -33 -56v-640q0 -38 33 -56q16 -8 31 -8q20 0 34 10l512 320q30 17 30 54zM1792 640q0 -96 -1 -150t-8.5 -136.5t-22.5 -147.5q-16 -73 -69 -123t-124 -58q-222 -25 -671 -25t-671 25q-71 8 -124.5 58t-69.5 123 q-14 65 -21.5 147.5t-8.5 136.5t-1 150t1 150t8.5 136.5t22.5 147.5q16 73 69 123t124 58q222 25 671 25t671 -25q71 -8 124.5 -58t69.5 -123q14 -65 21.5 -147.5t8.5 -136.5t1 -150z" />
+<glyph unicode="&#xf16b;" horiz-adv-x="1792" d="M402 829l494 -305l-342 -285l-490 319zM1388 274v-108l-490 -293v-1l-1 1l-1 -1v1l-489 293v108l147 -96l342 284v2l1 -1l1 1v-2l343 -284zM554 1418l342 -285l-494 -304l-338 270zM1390 829l338 -271l-489 -319l-343 285zM1239 1418l489 -319l-338 -270l-494 304z" />
+<glyph unicode="&#xf16c;" horiz-adv-x="1408" d="M928 135v-151l-707 -1v151zM1169 481v-701l-1 -35v-1h-1132l-35 1h-1v736h121v-618h928v618h120zM241 393l704 -65l-13 -150l-705 65zM309 709l683 -183l-39 -146l-683 183zM472 1058l609 -360l-77 -130l-609 360zM832 1389l398 -585l-124 -85l-399 584zM1285 1536 l121 -697l-149 -26l-121 697z" />
+<glyph unicode="&#xf16d;" d="M1362 110v648h-135q20 -63 20 -131q0 -126 -64 -232.5t-174 -168.5t-240 -62q-197 0 -337 135.5t-140 327.5q0 68 20 131h-141v-648q0 -26 17.5 -43.5t43.5 -17.5h1069q25 0 43 17.5t18 43.5zM1078 643q0 124 -90.5 211.5t-218.5 87.5q-127 0 -217.5 -87.5t-90.5 -211.5 t90.5 -211.5t217.5 -87.5q128 0 218.5 87.5t90.5 211.5zM1362 1003v165q0 28 -20 48.5t-49 20.5h-174q-29 0 -49 -20.5t-20 -48.5v-165q0 -29 20 -49t49 -20h174q29 0 49 20t20 49zM1536 1211v-1142q0 -81 -58 -139t-139 -58h-11 [...]
+<glyph unicode="&#xf16e;" d="M1248 1408q119 0 203.5 -84.5t84.5 -203.5v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960zM698 640q0 88 -62 150t-150 62t-150 -62t-62 -150t62 -150t150 -62t150 62t62 150zM1262 640q0 88 -62 150 t-150 62t-150 -62t-62 -150t62 -150t150 -62t150 62t62 150z" />
+<glyph unicode="&#xf170;" d="M768 914l201 -306h-402zM1133 384h94l-459 691l-459 -691h94l104 160h522zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf171;" horiz-adv-x="1408" d="M815 677q8 -63 -50.5 -101t-111.5 -6q-39 17 -53.5 58t-0.5 82t52 58q36 18 72.5 12t64 -35.5t27.5 -67.5zM926 698q-14 107 -113 164t-197 13q-63 -28 -100.5 -88.5t-34.5 -129.5q4 -91 77.5 -155t165.5 -56q91 8 152 84t50 168zM1165 1240q-20 27 -56 44.5t-58 22 t-71 12.5q-291 47 -566 -2q-43 -7 -66 -12t-55 -22t-50 -43q30 -28 76 -45.5t73.5 -22t87.5 -11.5q228 -29 448 -1q63 8 89.5 12t72.5 21.5t75 46.5zM1222 205q-8 -26 -15.5 -76.5t-14 -84t-28.5 -70t-58 -56.5q- [...]
+<glyph unicode="&#xf172;" d="M848 666q0 43 -41 66t-77 1q-43 -20 -42.5 -72.5t43.5 -70.5q39 -23 81 4t36 72zM928 682q8 -66 -36 -121t-110 -61t-119 40t-56 113q-2 49 25.5 93t72.5 64q70 31 141.5 -10t81.5 -118zM1100 1073q-20 -21 -53.5 -34t-53 -16t-63.5 -8q-155 -20 -324 0q-44 6 -63 9.5 t-52.5 16t-54.5 32.5q13 19 36 31t40 15.5t47 8.5q198 35 408 1q33 -5 51 -8.5t43 -16t39 -31.5zM1142 327q0 7 5.5 26.5t3 32t-17.5 16.5q-161 -106 -365 -106t-366 106l-12 -6l-5 -12q26 -154 41 -210q47 -81 204 -108q249 -46 4 [...]
+<glyph unicode="&#xf173;" horiz-adv-x="1024" d="M390 1408h219v-388h364v-241h-364v-394q0 -136 14 -172q13 -37 52 -60q50 -31 117 -31q117 0 232 76v-242q-102 -48 -178 -65q-77 -19 -173 -19q-105 0 -186 27q-78 25 -138 75q-58 51 -79 105q-22 54 -22 161v539h-170v217q91 30 155 84q64 55 103 132q39 78 54 196z " />
+<glyph unicode="&#xf174;" d="M1123 127v181q-88 -56 -174 -56q-51 0 -88 23q-29 17 -39 45q-11 30 -11 129v295h274v181h-274v291h-164q-11 -90 -40 -147t-78 -99q-48 -40 -116 -63v-163h127v-404q0 -78 17 -121q17 -42 59 -78q43 -37 104 -57q62 -20 140 -20q67 0 129 14q57 13 134 49zM1536 1120 v-960q0 -119 -84.5 -203.5t-203.5 -84.5h-960q-119 0 -203.5 84.5t-84.5 203.5v960q0 119 84.5 203.5t203.5 84.5h960q119 0 203.5 -84.5t84.5 -203.5z" />
+<glyph unicode="&#xf175;" horiz-adv-x="768" d="M765 237q8 -19 -5 -35l-350 -384q-10 -10 -23 -10q-14 0 -24 10l-355 384q-13 16 -5 35q9 19 29 19h224v1248q0 14 9 23t23 9h192q14 0 23 -9t9 -23v-1248h224q21 0 29 -19z" />
+<glyph unicode="&#xf176;" horiz-adv-x="768" d="M765 1043q-9 -19 -29 -19h-224v-1248q0 -14 -9 -23t-23 -9h-192q-14 0 -23 9t-9 23v1248h-224q-21 0 -29 19t5 35l350 384q10 10 23 10q14 0 24 -10l355 -384q13 -16 5 -35z" />
+<glyph unicode="&#xf177;" horiz-adv-x="1792" d="M1792 736v-192q0 -14 -9 -23t-23 -9h-1248v-224q0 -21 -19 -29t-35 5l-384 350q-10 10 -10 23q0 14 10 24l384 354q16 14 35 6q19 -9 19 -29v-224h1248q14 0 23 -9t9 -23z" />
+<glyph unicode="&#xf178;" horiz-adv-x="1792" d="M1728 643q0 -14 -10 -24l-384 -354q-16 -14 -35 -6q-19 9 -19 29v224h-1248q-14 0 -23 9t-9 23v192q0 14 9 23t23 9h1248v224q0 21 19 29t35 -5l384 -350q10 -10 10 -23z" />
+<glyph unicode="&#xf179;" horiz-adv-x="1408" d="M1393 321q-39 -125 -123 -250q-129 -196 -257 -196q-49 0 -140 32q-86 32 -151 32q-61 0 -142 -33q-81 -34 -132 -34q-152 0 -301 259q-147 261 -147 503q0 228 113 374q112 144 284 144q72 0 177 -30q104 -30 138 -30q45 0 143 34q102 34 173 34q119 0 213 -65 q52 -36 104 -100q-79 -67 -114 -118q-65 -94 -65 -207q0 -124 69 -223t158 -126zM1017 1494q0 -61 -29 -136q-30 -75 -93 -138q-54 -54 -108 -72q-37 -11 -104 -17q3 149 78 257q74 107 250 148q1 -3 2.5 -11t2.5 -11 [...]
+<glyph unicode="&#xf17a;" horiz-adv-x="1664" d="M682 530v-651l-682 94v557h682zM682 1273v-659h-682v565zM1664 530v-786l-907 125v661h907zM1664 1408v-794h-907v669z" />
+<glyph unicode="&#xf17b;" horiz-adv-x="1408" d="M493 1053q16 0 27.5 11.5t11.5 27.5t-11.5 27.5t-27.5 11.5t-27 -11.5t-11 -27.5t11 -27.5t27 -11.5zM915 1053q16 0 27 11.5t11 27.5t-11 27.5t-27 11.5t-27.5 -11.5t-11.5 -27.5t11.5 -27.5t27.5 -11.5zM103 869q42 0 72 -30t30 -72v-430q0 -43 -29.5 -73t-72.5 -30 t-73 30t-30 73v430q0 42 30 72t73 30zM1163 850v-666q0 -46 -32 -78t-77 -32h-75v-227q0 -43 -30 -73t-73 -30t-73 30t-30 73v227h-138v-227q0 -43 -30 -73t-73 -30q-42 0 -72 30t-30 73l-1 227h-74q-46 0 -78  [...]
+<glyph unicode="&#xf17c;" d="M663 1125q-11 -1 -15.5 -10.5t-8.5 -9.5q-5 -1 -5 5q0 12 19 15h10zM750 1111q-4 -1 -11.5 6.5t-17.5 4.5q24 11 32 -2q3 -6 -3 -9zM399 684q-4 1 -6 -3t-4.5 -12.5t-5.5 -13.5t-10 -13q-7 -10 -1 -12q4 -1 12.5 7t12.5 18q1 3 2 7t2 6t1.5 4.5t0.5 4v3t-1 2.5t-3 2z M1254 325q0 18 -55 42q4 15 7.5 27.5t5 26t3 21.5t0.5 22.5t-1 19.5t-3.5 22t-4 20.5t-5 25t-5.5 26.5q-10 48 -47 103t-72 75q24 -20 57 -83q87 -162 54 -278q-11 -40 -50 -42q-31 -4 -38.5 18.5t-8 83.5t-11.5 107q-9 39 -19.5 69 [...]
+<glyph unicode="&#xf17d;" d="M1024 36q-42 241 -140 498h-2l-2 -1q-16 -6 -43 -16.5t-101 -49t-137 -82t-131 -114.5t-103 -148l-15 11q184 -150 418 -150q132 0 256 52zM839 643q-21 49 -53 111q-311 -93 -673 -93q-1 -7 -1 -21q0 -124 44 -236.5t124 -201.5q50 89 123.5 166.5t142.5 124.5t130.5 81 t99.5 48l37 13q4 1 13 3.5t13 4.5zM732 855q-120 213 -244 378q-138 -65 -234 -186t-128 -272q302 0 606 80zM1416 536q-210 60 -409 29q87 -239 128 -469q111 75 185 189.5t96 250.5zM611 1277q-1 0 -2 -1q1 1 2 1zM1201 1132q [...]
+<glyph unicode="&#xf17e;" d="M1173 473q0 50 -19.5 91.5t-48.5 68.5t-73 49t-82.5 34t-87.5 23l-104 24q-30 7 -44 10.5t-35 11.5t-30 16t-16.5 21t-7.5 30q0 77 144 77q43 0 77 -12t54 -28.5t38 -33.5t40 -29t48 -12q47 0 75.5 32t28.5 77q0 55 -56 99.5t-142 67.5t-182 23q-68 0 -132 -15.5 t-119.5 -47t-89 -87t-33.5 -128.5q0 -61 19 -106.5t56 -75.5t80 -48.5t103 -32.5l146 -36q90 -22 112 -36q32 -20 32 -60q0 -39 -40 -64.5t-105 -25.5q-51 0 -91.5 16t-65 38.5t-45.5 45t-46 38.5t-54 16q-50 0 -75.5 -30t-25.5 -75q0 - [...]
+<glyph unicode="&#xf180;" horiz-adv-x="1664" d="M1483 512l-587 -587q-52 -53 -127.5 -53t-128.5 53l-587 587q-53 53 -53 128t53 128l587 587q53 53 128 53t128 -53l265 -265l-398 -399l-188 188q-42 42 -99 42q-59 0 -100 -41l-120 -121q-42 -40 -42 -99q0 -58 42 -100l406 -408q30 -28 67 -37l6 -4h28q60 0 99 41 l619 619l2 -3q53 -53 53 -128t-53 -128zM1406 1138l120 -120q14 -15 14 -36t-14 -36l-730 -730q-17 -15 -37 -15v0q-4 0 -6 1q-18 2 -30 14l-407 408q-14 15 -14 36t14 35l121 120q13 15 35 15t36 -15l252 -252l [...]
+<glyph unicode="&#xf181;" d="M704 192v1024q0 14 -9 23t-23 9h-480q-14 0 -23 -9t-9 -23v-1024q0 -14 9 -23t23 -9h480q14 0 23 9t9 23zM1376 576v640q0 14 -9 23t-23 9h-480q-14 0 -23 -9t-9 -23v-640q0 -14 9 -23t23 -9h480q14 0 23 9t9 23zM1536 1344v-1408q0 -26 -19 -45t-45 -19h-1408 q-26 0 -45 19t-19 45v1408q0 26 19 45t45 19h1408q26 0 45 -19t19 -45z" />
+<glyph unicode="&#xf182;" horiz-adv-x="1280" d="M1280 480q0 -40 -28 -68t-68 -28q-51 0 -80 43l-227 341h-45v-132l247 -411q9 -15 9 -33q0 -26 -19 -45t-45 -19h-192v-272q0 -46 -33 -79t-79 -33h-160q-46 0 -79 33t-33 79v272h-192q-26 0 -45 19t-19 45q0 18 9 33l247 411v132h-45l-227 -341q-29 -43 -80 -43 q-40 0 -68 28t-28 68q0 29 16 53l256 384q73 107 176 107h384q103 0 176 -107l256 -384q16 -24 16 -53zM864 1280q0 -93 -65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5t158.5 -65.5t65. [...]
+<glyph unicode="&#xf183;" horiz-adv-x="1024" d="M1024 832v-416q0 -40 -28 -68t-68 -28t-68 28t-28 68v352h-64v-912q0 -46 -33 -79t-79 -33t-79 33t-33 79v464h-64v-464q0 -46 -33 -79t-79 -33t-79 33t-33 79v912h-64v-352q0 -40 -28 -68t-68 -28t-68 28t-28 68v416q0 80 56 136t136 56h640q80 0 136 -56t56 -136z M736 1280q0 -93 -65.5 -158.5t-158.5 -65.5t-158.5 65.5t-65.5 158.5t65.5 158.5t158.5 65.5t158.5 -65.5t65.5 -158.5z" />
+<glyph unicode="&#xf184;" d="M773 234l350 473q16 22 24.5 59t-6 85t-61.5 79q-40 26 -83 25.5t-73.5 -17.5t-54.5 -45q-36 -40 -96 -40q-59 0 -95 40q-24 28 -54.5 45t-73.5 17.5t-84 -25.5q-46 -31 -60.5 -79t-6 -85t24.5 -59zM1536 640q0 -209 -103 -385.5t-279.5 -279.5t-385.5 -103t-385.5 103 t-279.5 279.5t-103 385.5t103 385.5t279.5 279.5t385.5 103t385.5 -103t279.5 -279.5t103 -385.5z" />
+<glyph unicode="&#xf185;" horiz-adv-x="1792" d="M1472 640q0 117 -45.5 223.5t-123 184t-184 123t-223.5 45.5t-223.5 -45.5t-184 -123t-123 -184t-45.5 -223.5t45.5 -223.5t123 -184t184 -123t223.5 -45.5t223.5 45.5t184 123t123 184t45.5 223.5zM1748 363q-4 -15 -20 -20l-292 -96v-306q0 -16 -13 -26q-15 -10 -29 -4 l-292 94l-180 -248q-10 -13 -26 -13t-26 13l-180 248l-292 -94q-14 -6 -29 4q-13 10 -13 26v306l-292 96q-16 5 -20 20q-5 17 4 29l180 248l-180 248q-9 13 -4 29q4 15 20 20l292 96v306q0 16 13 26q15 10 2 [...]
+<glyph unicode="&#xf186;" d="M1262 233q-54 -9 -110 -9q-182 0 -337 90t-245 245t-90 337q0 192 104 357q-201 -60 -328.5 -229t-127.5 -384q0 -130 51 -248.5t136.5 -204t204 -136.5t248.5 -51q144 0 273.5 61.5t220.5 171.5zM1465 318q-94 -203 -283.5 -324.5t-413.5 -121.5q-156 0 -298 61 t-245 164t-164 245t-61 298q0 153 57.5 292.5t156 241.5t235.5 164.5t290 68.5q44 2 61 -39q18 -41 -15 -72q-86 -78 -131.5 -181.5t-45.5 -218.5q0 -148 73 -273t198 -198t273 -73q118 0 228 51q41 18 72 -13q14 -14 17.5 -34t-4.5 -38z" />
+<glyph unicode="&#xf187;" horiz-adv-x="1792" d="M1088 704q0 26 -19 45t-45 19h-256q-26 0 -45 -19t-19 -45t19 -45t45 -19h256q26 0 45 19t19 45zM1664 896v-960q0 -26 -19 -45t-45 -19h-1408q-26 0 -45 19t-19 45v960q0 26 19 45t45 19h1408q26 0 45 -19t19 -45zM1728 1344v-256q0 -26 -19 -45t-45 -19h-1536 q-26 0 -45 19t-19 45v256q0 26 19 45t45 19h1536q26 0 45 -19t19 -45z" />
+<glyph unicode="&#xf188;" horiz-adv-x="1664" d="M1632 576q0 -26 -19 -45t-45 -19h-224q0 -171 -67 -290l208 -209q19 -19 19 -45t-19 -45q-18 -19 -45 -19t-45 19l-198 197q-5 -5 -15 -13t-42 -28.5t-65 -36.5t-82 -29t-97 -13v896h-128v-896q-51 0 -101.5 13.5t-87 33t-66 39t-43.5 32.5l-15 14l-183 -207 q-20 -21 -48 -21q-24 0 -43 16q-19 18 -20.5 44.5t15.5 46.5l202 227q-58 114 -58 274h-224q-26 0 -45 19t-19 45t19 45t45 19h224v294l-173 173q-19 19 -19 45t19 45t45 19t45 -19l173 -173h844l173 173q19 19 45 19t45 [...]
+<glyph unicode="&#xf189;" horiz-adv-x="1920" d="M1917 1016q23 -64 -150 -294q-24 -32 -65 -85q-78 -100 -90 -131q-17 -41 14 -81q17 -21 81 -82h1l1 -1l1 -1l2 -2q141 -131 191 -221q3 -5 6.5 -12.5t7 -26.5t-0.5 -34t-25 -27.5t-59 -12.5l-256 -4q-24 -5 -56 5t-52 22l-20 12q-30 21 -70 64t-68.5 77.5t-61 58 t-56.5 15.5q-3 -1 -8 -3.5t-17 -14.5t-21.5 -29.5t-17 -52t-6.5 -77.5q0 -15 -3.5 -27.5t-7.5 -18.5l-4 -5q-18 -19 -53 -22h-115q-71 -4 -146 16.5t-131.5 53t-103 66t-70.5 57.5l-25 24q-10 10 -27.5 30t-71.5 91 [...]
+<glyph unicode="&#xf18a;" horiz-adv-x="1792" d="M675 252q21 34 11 69t-45 50q-34 14 -73 1t-60 -46q-22 -34 -13 -68.5t43 -50.5t74.5 -2.5t62.5 47.5zM769 373q8 13 3.5 26.5t-17.5 18.5q-14 5 -28.5 -0.5t-21.5 -18.5q-17 -31 13 -45q14 -5 29 0.5t22 18.5zM943 266q-45 -102 -158 -150t-224 -12 q-107 34 -147.5 126.5t6.5 187.5q47 93 151.5 139t210.5 19q111 -29 158.5 -119.5t2.5 -190.5zM1255 426q-9 96 -89 170t-208.5 109t-274.5 21q-223 -23 -369.5 -141.5t-132.5 -264.5q9 -96 89 -170t208.5 -109t274.5 -21q223 23 [...]
+<glyph unicode="&#xf18b;" horiz-adv-x="1920" d="M805 163q-122 -67 -261 -67q-141 0 -261 67q98 61 167 149t94 191q25 -103 94 -191t167 -149zM453 1176v-344q0 -179 -89.5 -326t-234.5 -217q-129 152 -129 351q0 200 129.5 352t323.5 184zM958 991q-128 -152 -128 -351q0 -201 128 -351q-145 70 -234.5 218t-89.5 328 v341q196 -33 324 -185zM1638 163q-122 -67 -261 -67q-141 0 -261 67q98 61 167 149t94 191q25 -103 94 -191t167 -149zM1286 1176v-344q0 -179 -91 -326t-237 -217v0q133 154 133 351q0 195 -133 351q129 151 [...]
+<glyph unicode="&#xf18c;" horiz-adv-x="1792" />
+<glyph unicode="&#xf18d;" horiz-adv-x="1792" />
+<glyph unicode="&#xf18e;" horiz-adv-x="1792" />
+<glyph unicode="&#xf500;" horiz-adv-x="1792" />
+</font>
+</defs></svg> 
\ No newline at end of file
diff --git a/content/docs/malhar-3.8/fonts/fontawesome-webfont.ttf b/content/docs/malhar-3.8/fonts/fontawesome-webfont.ttf
new file mode 100755
index 0000000..d365924
Binary files /dev/null and b/content/docs/malhar-3.8/fonts/fontawesome-webfont.ttf differ
diff --git a/content/docs/malhar-3.8/fonts/fontawesome-webfont.woff b/content/docs/malhar-3.8/fonts/fontawesome-webfont.woff
new file mode 100755
index 0000000..b9bd17e
Binary files /dev/null and b/content/docs/malhar-3.8/fonts/fontawesome-webfont.woff differ
diff --git a/content/docs/malhar-3.8/footer.html b/content/docs/malhar-3.8/footer.html
new file mode 100644
index 0000000..1103193
--- /dev/null
+++ b/content/docs/malhar-3.8/footer.html
@@ -0,0 +1,23 @@
+<footer>
+  {% if next_page or previous_page %}
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      {% if next_page %}
+        <a href="{{ next_page.url }}" class="btn btn-neutral float-right" title="{{ next_page.title }}">Next <span class="icon icon-circle-arrow-right"></span></a>
+      {% endif %}
+      {% if previous_page %}
+        <a href="{{ previous_page.url }}" class="btn btn-neutral" title="{{ previous_page.title }}"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      {% endif %}
+    </div>
+  {% endif %}
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    {% if copyright %}
+      <p>{{ copyright }}</p>
+    {% endif %}
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
diff --git a/content/docs/malhar-3.8/images/malhar-operators.png b/content/docs/malhar-3.8/images/malhar-operators.png
new file mode 100644
index 0000000..ac09622
Binary files /dev/null and b/content/docs/malhar-3.8/images/malhar-operators.png differ
diff --git a/content/docs/malhar-3.8/img/favicon.ico b/content/docs/malhar-3.8/img/favicon.ico
new file mode 100644
index 0000000..e85006a
Binary files /dev/null and b/content/docs/malhar-3.8/img/favicon.ico differ
diff --git a/content/docs/malhar-3.8/index.html b/content/docs/malhar-3.8/index.html
new file mode 100644
index 0000000..43548a8
--- /dev/null
+++ b/content/docs/malhar-3.8/index.html
@@ -0,0 +1,389 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="./css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="./css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="./css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = null;
+    var mkdocs_page_input_path = "index.md";
+    var mkdocs_page_url = "/";
+  </script>
+  
+  <script src="./js/jquery-2.1.1.min.js"></script>
+  <script src="./js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="./js/highlight.pack.js"></script>
+  <script src="./js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="./search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 current">
+        <a class="current" href=".">Apache Apex Malhar</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#apache-apex-malhar">Apache Apex Malhar</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#capabilities-common-across-malhar-operators">Capabilities common across Malhar operators</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#operator-library-overview">Operator Library Overview</a></li>
+                
+                    <li><a class="toctree-l4" href="#inputoutput-connectors">Input/output connectors</a></li>
+                
+                    <li><a class="toctree-l4" href="#parsers">Parsers</a></li>
+                
+                    <li><a class="toctree-l4" href="#stream-manipulation">Stream manipulation</a></li>
+                
+                    <li><a class="toctree-l4" href="#compute">Compute</a></li>
+                
+                    <li><a class="toctree-l4" href="#languages-support">Languages Support</a></li>
+                
+            
+            </ul>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href=".">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href=".">Docs</a> &raquo;</li>
+    
+      
+    
+    <li>Apache Apex Malhar</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="apache-apex-malhar">Apache Apex Malhar</h1>
+<p>Apache Apex Malhar is an open source operator and codec library that can be used with the <a href="http://apex.apache.org/">Apache Apex</a> platform to build real-time streaming applications.  Enabling users to extract value quickly, Malhar operators help get data in, analyze it in real-time, and get data out of Hadoop.  In addition to the operators, the library contains a number of example applications, demonstrating operator features and capabilities.</p>
+<p><img alt="MalharDiagram" src="./images/malhar-operators.png" /></p>
+<h1 id="capabilities-common-across-malhar-operators">Capabilities common across Malhar operators</h1>
+<p>For most streaming platforms, connectors are afterthoughts and often end up being simple ‘bolt-ons’ to the platform. As a result they often cause performance issues or data loss when put through failure scenarios and scalability requirements. Malhar operators do not face these issues as they were designed to be integral parts of Apex. Hence, they have following core streaming runtime capabilities</p>
+<ol>
+<li><strong>Fault tolerance</strong> – Malhar operators where applicable have fault tolerance built in. They use the checkpoint capability provided by the framework to ensure that there is no data loss under ANY failure scenario.</li>
+<li><strong>Processing guarantees</strong> – Malhar operators where applicable provide out of the box support for ALL three processing guarantees – exactly once, at-least once, and at-most once WITHOUT requiring the user to write any additional code.  Some operators, like MQTT operator, deal with source systems that can not track processed data and hence need the operators to keep track of the data.  Malhar has support for a generic operator that uses alternate storage like HDFS to facil [...]
+<li><strong>Dynamic updates</strong> – Based on changing business conditions you often have to tweak several parameters used by the operators in your streaming application without incurring any application downtime. You can also change properties of a Malhar operator at runtime without having to bring down the application.</li>
+<li><strong>Ease of extensibility</strong> – Malhar operators are based on templates that are easy to extend.</li>
+<li><strong>Partitioning support</strong> – In streaming applications the input data stream often needs to be partitioned based on the contents of the stream. Also for operators that ingest data from external systems partitioning needs to be done based on the capabilities of the external system.  For example with Kafka, the operator can automatically scale up or down based on the changes in the number of Kafka partitions.</li>
+</ol>
+<h1 id="operator-library-overview">Operator Library Overview</h1>
+<h2 id="inputoutput-connectors">Input/output connectors</h2>
+<p>Below is a summary of the various sub categories of input and output operators. Input operators also have a corresponding output operator</p>
+<ul>
+<li><strong>File Systems</strong> – Most streaming analytics use cases require the data to be stored in HDFS or perhaps S3 if the application is running in AWS.  Users often need to re-run their streaming analytical applications against historical data or consume data from upstream processes that are perhaps writing to some NFS share.  Apex supports input &amp; output operators for HDFS, S3, NFS &amp; Local Files.  There are also File Splitter and Block Reader operators, which can accele [...]
+<li><strong>Relational Databases</strong> – Most stream processing use cases require some reference data lookups to enrich, tag or filter streaming data. There is also a need to save results of the streaming analytical computation to a database so an operational dashboard can see them. Apex supports a JDBC operator so you can read/write data from any JDBC compliant RDBMS like Oracle, MySQL, Sqlite, etc.</li>
+<li><strong>NoSQL Databases</strong> – NoSQL key-value pair databases like Cassandra &amp; HBase are a common part of streaming analytics application architectures to lookup reference data or store results.  Malhar has operators for HBase, Cassandra, Accumulo, Aerospike, MongoDB, and CouchDB.</li>
+<li><strong>Messaging Systems</strong> – Kafka, JMS, and similar systems are the workhorses of messaging infrastructure in most enterprises.  Malhar has a robust, industry-tested set of operators to read and write Kafka, JMS, ZeroMQ, and RabbitMQ messages.</li>
+<li><strong>Notification Systems</strong> – Malhar includes an operator for sending notifications via SMTP.</li>
+<li><strong>In-memory Databases &amp; Caching platforms</strong> - Some streaming use cases need instantaneous access to shared state across the application. Caching platforms and in-memory databases serve this purpose really well. To support these use cases, Malhar has operators for memcached and Redis.</li>
+<li><strong>Social Media</strong> - Malhar includes an operator to connect to the popular Twitter stream fire hose.</li>
+<li><strong>Protocols</strong> - Malhar provides connectors that can communicate in HTTP, RSS, Socket, WebSocket, FTP, and MQTT.</li>
+</ul>
+<h2 id="parsers">Parsers</h2>
+<p>There are many industry vertical specific data formats that a streaming application developer might need to parse. Often there are existing parsers available for these that can be directly plugged into an Apache Apex application. For example in the Telco space, a Java based CDR parser can be directly plugged into Apache Apex operator. To further simplify development experience, Malhar also provides some operators for parsing common formats like XML (DOM &amp; SAX), JSON (flat map conv [...]
+<h2 id="stream-manipulation">Stream manipulation</h2>
+<p>Streaming data inevitably needs processing to clean, filter, tag, summarize, etc. The goal of Malhar is to enable the application developer to focus on WHAT needs to be done to the stream to get it in the right format and not worry about the HOW.  Malhar has several operators to perform the common stream manipulation actions like – GroupBy, Join, Distinct/Unique, Limit, OrderBy, Split, Sample, Inner join, Outer join, Select, Update etc.</p>
+<h2 id="compute">Compute</h2>
+<p>One of the most important promises of a streaming analytics platform like Apache Apex is the ability to do analytics in real-time. However delivering on the promise becomes really difficult when the platform does not provide out of the box operators to support variety of common compute functions as the user then has to worry about making these scalable, fault tolerant, stateful, etc.  Malhar takes this responsibility away from the application developer by providing a variety of out of [...]
+<p>Below is just a snapshot of the compute operators available in Malhar</p>
+<ul>
+<li>Statistics and math - Various mathematical and statistical computations over application defined time windows.</li>
+<li>Filtering and pattern matching</li>
+<li>Sorting, maps, frequency, TopN, BottomN</li>
+<li>Random data generators</li>
+</ul>
+<h2 id="languages-support">Languages Support</h2>
+<p>Migrating to a new platform often requires re-use of the existing code that would be difficult or time-consuming to re-write.  With this in mind, Malhar supports invocation of code written in other languages by wrapping them in one of the library operators, and allows execution of software written in:</p>
+<ul>
+<li>JavaScript</li>
+<li>Python</li>
+<li>R</li>
+<li>Ruby</li>
+</ul>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="apis/calcite/" class="btn btn-neutral float-right" title="SQL">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+      
+        <span style="margin-left: 15px"><a href="apis/calcite/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
+
+<!--
+MkDocs version : 0.15.3
+Build Date UTC : 2017-11-05 00:19:18.391611
+-->
diff --git a/content/docs/malhar-3.8/js/highlight.pack.js b/content/docs/malhar-3.8/js/highlight.pack.js
new file mode 100644
index 0000000..a5818df
--- /dev/null
+++ b/content/docs/malhar-3.8/js/highlight.pack.js
@@ -0,0 +1,2 @@
+!function(e){"undefined"!=typeof exports?e(exports):(window.hljs=e({}),"function"==typeof define&&define.amd&&define([],function(){return window.hljs}))}(function(e){function n(e){return e.replace(/&/gm,"&amp;").replace(/</gm,"&lt;").replace(/>/gm,"&gt;")}function t(e){return e.nodeName.toLowerCase()}function r(e,n){var t=e&&e.exec(n);return t&&0==t.index}function a(e){var n=(e.className+" "+(e.parentNode?e.parentNode.className:"")).split(/\s+/);return n=n.map(function(e){return e.replac [...]
+c:[{cN:"comment",b:/\(\*/,e:/\*\)/},e.ASM,e.QSM,e.CNM,{cN:"list",b:/\{/,e:/\}/,i:/:/}]}});hljs.registerLanguage("fsharp",function(e){var t={b:"<",e:">",c:[e.inherit(e.TM,{b:/'[a-zA-Z0-9_]+/})]};return{aliases:["fs"],k:"yield! return! let! do!abstract and as assert base begin class default delegate do done downcast downto elif else end exception extern false finally for fun function global if in inherit inline interface internal lazy let match member module mutable namespace new null of o [...]
\ No newline at end of file
diff --git a/content/docs/malhar-3.8/js/jquery-2.1.1.min.js b/content/docs/malhar-3.8/js/jquery-2.1.1.min.js
new file mode 100644
index 0000000..e5ace11
--- /dev/null
+++ b/content/docs/malhar-3.8/js/jquery-2.1.1.min.js
@@ -0,0 +1,4 @@
+/*! jQuery v2.1.1 | (c) 2005, 2014 jQuery Foundation, Inc. | jquery.org/license */
+!function(a,b){"object"==typeof module&&"object"==typeof module.exports?module.exports=a.document?b(a,!0):function(a){if(!a.document)throw new Error("jQuery requires a window with a document");return b(a)}:b(a)}("undefined"!=typeof window?window:this,function(a,b){var c=[],d=c.slice,e=c.concat,f=c.push,g=c.indexOf,h={},i=h.toString,j=h.hasOwnProperty,k={},l=a.document,m="2.1.1",n=function(a,b){return new n.fn.init(a,b)},o=/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,p=/^-ms-/,q=/-([\da-z])/gi,r= [...]
+},_data:function(a,b,c){return L.access(a,b,c)},_removeData:function(a,b){L.remove(a,b)}}),n.fn.extend({data:function(a,b){var c,d,e,f=this[0],g=f&&f.attributes;if(void 0===a){if(this.length&&(e=M.get(f),1===f.nodeType&&!L.get(f,"hasDataAttrs"))){c=g.length;while(c--)g[c]&&(d=g[c].name,0===d.indexOf("data-")&&(d=n.camelCase(d.slice(5)),P(f,d,e[d])));L.set(f,"hasDataAttrs",!0)}return e}return"object"==typeof a?this.each(function(){M.set(this,a)}):J(this,function(b){var c,d=n.camelCase(a); [...]
+},removeAttr:function(a,b){var c,d,e=0,f=b&&b.match(E);if(f&&1===a.nodeType)while(c=f[e++])d=n.propFix[c]||c,n.expr.match.bool.test(c)&&(a[d]=!1),a.removeAttribute(c)},attrHooks:{type:{set:function(a,b){if(!k.radioValue&&"radio"===b&&n.nodeName(a,"input")){var c=a.value;return a.setAttribute("type",b),c&&(a.value=c),b}}}}}),Zb={set:function(a,b,c){return b===!1?n.removeAttr(a,c):a.setAttribute(c,c),c}},n.each(n.expr.match.bool.source.match(/\w+/g),function(a,b){var c=$b[b]||n.find.attr;$ [...]
diff --git a/content/docs/malhar-3.8/js/modernizr-2.8.3.min.js b/content/docs/malhar-3.8/js/modernizr-2.8.3.min.js
new file mode 100644
index 0000000..40dd2a9
--- /dev/null
+++ b/content/docs/malhar-3.8/js/modernizr-2.8.3.min.js
@@ -0,0 +1 @@
+window.Modernizr=function(e,t,n){function r(e){b.cssText=e}function o(e,t){return r(S.join(e+";")+(t||""))}function a(e,t){return typeof e===t}function i(e,t){return!!~(""+e).indexOf(t)}function c(e,t){for(var r in e){var o=e[r];if(!i(o,"-")&&b[o]!==n)return"pfx"==t?o:!0}return!1}function s(e,t,r){for(var o in e){var i=t[e[o]];if(i!==n)return r===!1?e[o]:a(i,"function")?i.bind(r||t):i}return!1}function u(e,t,n){var r=e.charAt(0).toUpperCase()+e.slice(1),o=(e+" "+k.join(r+" ")+r).split("  [...]
\ No newline at end of file
diff --git a/content/docs/malhar-3.8/js/theme.js b/content/docs/malhar-3.8/js/theme.js
new file mode 100644
index 0000000..6396162
--- /dev/null
+++ b/content/docs/malhar-3.8/js/theme.js
@@ -0,0 +1,55 @@
+$( document ).ready(function() {
+
+    // Shift nav in mobile when clicking the menu.
+    $(document).on('click', "[data-toggle='wy-nav-top']", function() {
+      $("[data-toggle='wy-nav-shift']").toggleClass("shift");
+      $("[data-toggle='rst-versions']").toggleClass("shift");
+    });
+
+    // Close menu when you click a link.
+    $(document).on('click', ".wy-menu-vertical .current ul li a", function() {
+      $("[data-toggle='wy-nav-shift']").removeClass("shift");
+      $("[data-toggle='rst-versions']").toggleClass("shift");
+    });
+
+    $(document).on('click', "[data-toggle='rst-current-version']", function() {
+      $("[data-toggle='rst-versions']").toggleClass("shift-up");
+    });
+
+    // Make tables responsive
+    $("table.docutils:not(.field-list)").wrap("<div class='wy-table-responsive'></div>");
+
+    hljs.initHighlightingOnLoad();
+
+    $('table').addClass('docutils');
+});
+
+window.SphinxRtdTheme = (function (jquery) {
+    var stickyNav = (function () {
+        var navBar,
+            win,
+            stickyNavCssClass = 'stickynav',
+            applyStickNav = function () {
+                if (navBar.height() <= win.height()) {
+                    navBar.addClass(stickyNavCssClass);
+                } else {
+                    navBar.removeClass(stickyNavCssClass);
+                }
+            },
+            enable = function () {
+                applyStickNav();
+                win.on('resize', applyStickNav);
+            },
+            init = function () {
+                navBar = jquery('nav.wy-nav-side:first');
+                win    = jquery(window);
+            };
+        jquery(init);
+        return {
+            enable : enable
+        };
+    }());
+    return {
+        StickyNav : stickyNav
+    };
+}($));
diff --git a/content/docs/malhar-3.8/mkdocs/js/lunr.min.js b/content/docs/malhar-3.8/mkdocs/js/lunr.min.js
new file mode 100644
index 0000000..b0198df
--- /dev/null
+++ b/content/docs/malhar-3.8/mkdocs/js/lunr.min.js
@@ -0,0 +1,7 @@
+/**
+ * lunr - http://lunrjs.com - A bit like Solr, but much smaller and not as bright - 0.7.0
+ * Copyright (C) 2016 Oliver Nightingale
+ * MIT Licensed
+ * @license
+ */
+!function(){var t=function(e){var n=new t.Index;return n.pipeline.add(t.trimmer,t.stopWordFilter,t.stemmer),e&&e.call(n,n),n};t.version="0.7.0",t.utils={},t.utils.warn=function(t){return function(e){t.console&&console.warn&&console.warn(e)}}(this),t.utils.asString=function(t){return void 0===t||null===t?"":t.toString()},t.EventEmitter=function(){this.events={}},t.EventEmitter.prototype.addListener=function(){var t=Array.prototype.slice.call(arguments),e=t.pop(),n=t;if("function"!=typeof  [...]
diff --git a/content/docs/malhar-3.8/mkdocs/js/mustache.min.js b/content/docs/malhar-3.8/mkdocs/js/mustache.min.js
new file mode 100644
index 0000000..7fc6da8
--- /dev/null
+++ b/content/docs/malhar-3.8/mkdocs/js/mustache.min.js
@@ -0,0 +1 @@
+(function(global,factory){if(typeof exports==="object"&&exports){factory(exports)}else if(typeof define==="function"&&define.amd){define(["exports"],factory)}else{factory(global.Mustache={})}})(this,function(mustache){var Object_toString=Object.prototype.toString;var isArray=Array.isArray||function(object){return Object_toString.call(object)==="[object Array]"};function isFunction(object){return typeof object==="function"}function escapeRegExp(string){return string.replace(/[\-\[\]{}()*+ [...]
\ No newline at end of file
diff --git a/content/docs/malhar-3.8/mkdocs/js/require.js b/content/docs/malhar-3.8/mkdocs/js/require.js
new file mode 100644
index 0000000..8638a31
--- /dev/null
+++ b/content/docs/malhar-3.8/mkdocs/js/require.js
@@ -0,0 +1,36 @@
+/*
+ RequireJS 2.1.16 Copyright (c) 2010-2015, The Dojo Foundation All Rights Reserved.
+ Available via the MIT or new BSD license.
+ see: http://github.com/jrburke/requirejs for details
+*/
+var requirejs,require,define;
+(function(ba){function G(b){return"[object Function]"===K.call(b)}function H(b){return"[object Array]"===K.call(b)}function v(b,c){if(b){var d;for(d=0;d<b.length&&(!b[d]||!c(b[d],d,b));d+=1);}}function T(b,c){if(b){var d;for(d=b.length-1;-1<d&&(!b[d]||!c(b[d],d,b));d-=1);}}function t(b,c){return fa.call(b,c)}function m(b,c){return t(b,c)&&b[c]}function B(b,c){for(var d in b)if(t(b,d)&&c(b[d],d))break}function U(b,c,d,e){c&&B(c,function(c,g){if(d||!t(b,g))e&&"object"===typeof c&&c&&!H(c)& [...]
+RegExp)?(b[g]||(b[g]={}),U(b[g],c,d,e)):b[g]=c});return b}function u(b,c){return function(){return c.apply(b,arguments)}}function ca(b){throw b;}function da(b){if(!b)return b;var c=ba;v(b.split("."),function(b){c=c[b]});return c}function C(b,c,d,e){c=Error(c+"\nhttp://requirejs.org/docs/errors.html#"+b);c.requireType=b;c.requireModules=e;d&&(c.originalError=d);return c}function ga(b){function c(a,k,b){var f,l,c,d,e,g,i,p,k=k&&k.split("/"),h=j.map,n=h&&h["*"];if(a){a=a.split("/");l=a.leng [...]
+Q.test(a[l])&&(a[l]=a[l].replace(Q,""));"."===a[0].charAt(0)&&k&&(l=k.slice(0,k.length-1),a=l.concat(a));l=a;for(c=0;c<l.length;c++)if(d=l[c],"."===d)l.splice(c,1),c-=1;else if(".."===d&&!(0===c||1==c&&".."===l[2]||".."===l[c-1])&&0<c)l.splice(c-1,2),c-=2;a=a.join("/")}if(b&&h&&(k||n)){l=a.split("/");c=l.length;a:for(;0<c;c-=1){e=l.slice(0,c).join("/");if(k)for(d=k.length;0<d;d-=1)if(b=m(h,k.slice(0,d).join("/")))if(b=m(b,e)){f=b;g=c;break a}!i&&(n&&m(n,e))&&(i=m(n,e),p=c)}!f&&i&&(f=i,g= [...]
+g,f),a=l.join("/"))}return(f=m(j.pkgs,a))?f:a}function d(a){z&&v(document.getElementsByTagName("script"),function(k){if(k.getAttribute("data-requiremodule")===a&&k.getAttribute("data-requirecontext")===i.contextName)return k.parentNode.removeChild(k),!0})}function e(a){var k=m(j.paths,a);if(k&&H(k)&&1<k.length)return k.shift(),i.require.undef(a),i.makeRequire(null,{skipMap:!0})([a]),!0}function n(a){var k,c=a?a.indexOf("!"):-1;-1<c&&(k=a.substring(0,c),a=a.substring(c+1,a.length));return [...]
+k,b,f){var l,d,e=null,g=k?k.name:null,j=a,p=!0,h="";a||(p=!1,a="_@r"+(K+=1));a=n(a);e=a[0];a=a[1];e&&(e=c(e,g,f),d=m(r,e));a&&(e?h=d&&d.normalize?d.normalize(a,function(a){return c(a,g,f)}):-1===a.indexOf("!")?c(a,g,f):a:(h=c(a,g,f),a=n(h),e=a[0],h=a[1],b=!0,l=i.nameToUrl(h)));b=e&&!d&&!b?"_unnormalized"+(O+=1):"";return{prefix:e,name:h,parentMap:k,unnormalized:!!b,url:l,originalName:j,isDefine:p,id:(e?e+"!"+h:h)+b}}function s(a){var k=a.id,b=m(h,k);b||(b=h[k]=new i.Module(a));return b}f [...]
+k,b){var f=a.id,c=m(h,f);if(t(r,f)&&(!c||c.defineEmitComplete))"defined"===k&&b(r[f]);else if(c=s(a),c.error&&"error"===k)b(c.error);else c.on(k,b)}function w(a,b){var c=a.requireModules,f=!1;if(b)b(a);else if(v(c,function(b){if(b=m(h,b))b.error=a,b.events.error&&(f=!0,b.emit("error",a))}),!f)g.onError(a)}function x(){R.length&&(ha.apply(A,[A.length,0].concat(R)),R=[])}function y(a){delete h[a];delete V[a]}function F(a,b,c){var f=a.map.id;a.error?a.emit("error",a.error):(b[f]=!0,v(a.depM [...]
+d){var e=f.id,g=m(h,e);g&&(!a.depMatched[d]&&!c[e])&&(m(b,e)?(a.defineDep(d,r[e]),a.check()):F(g,b,c))}),c[f]=!0)}function D(){var a,b,c=(a=1E3*j.waitSeconds)&&i.startTime+a<(new Date).getTime(),f=[],l=[],g=!1,h=!0;if(!W){W=!0;B(V,function(a){var i=a.map,j=i.id;if(a.enabled&&(i.isDefine||l.push(a),!a.error))if(!a.inited&&c)e(j)?g=b=!0:(f.push(j),d(j));else if(!a.inited&&(a.fetched&&i.isDefine)&&(g=!0,!i.prefix))return h=!1});if(c&&f.length)return a=C("timeout","Load timeout for modules:  [...]
+f),a.contextName=i.contextName,w(a);h&&v(l,function(a){F(a,{},{})});if((!c||b)&&g)if((z||ea)&&!X)X=setTimeout(function(){X=0;D()},50);W=!1}}function E(a){t(r,a[0])||s(p(a[0],null,!0)).init(a[1],a[2])}function I(a){var a=a.currentTarget||a.srcElement,b=i.onScriptLoad;a.detachEvent&&!Y?a.detachEvent("onreadystatechange",b):a.removeEventListener("load",b,!1);b=i.onScriptError;(!a.detachEvent||Y)&&a.removeEventListener("error",b,!1);return{node:a,id:a&&a.getAttribute("data-requiremodule")}}f [...]
+for(x();A.length;){a=A.shift();if(null===a[0])return w(C("mismatch","Mismatched anonymous define() module: "+a[a.length-1]));E(a)}}var W,Z,i,L,X,j={waitSeconds:7,baseUrl:"./",paths:{},bundles:{},pkgs:{},shim:{},config:{}},h={},V={},$={},A=[],r={},S={},aa={},K=1,O=1;L={require:function(a){return a.require?a.require:a.require=i.makeRequire(a.map)},exports:function(a){a.usingExports=!0;if(a.map.isDefine)return a.exports?r[a.map.id]=a.exports:a.exports=r[a.map.id]={}},module:function(a){retu [...]
+a.module:a.module={id:a.map.id,uri:a.map.url,config:function(){return m(j.config,a.map.id)||{}},exports:a.exports||(a.exports={})}}};Z=function(a){this.events=m($,a.id)||{};this.map=a;this.shim=m(j.shim,a.id);this.depExports=[];this.depMaps=[];this.depMatched=[];this.pluginMaps={};this.depCount=0};Z.prototype={init:function(a,b,c,f){f=f||{};if(!this.inited){this.factory=b;if(c)this.on("error",c);else this.events.error&&(c=u(this,function(a){this.emit("error",a)}));this.depMaps=a&&a.slice [...]
+c;this.inited=!0;this.ignore=f.ignore;f.enabled||this.enabled?this.enable():this.check()}},defineDep:function(a,b){this.depMatched[a]||(this.depMatched[a]=!0,this.depCount-=1,this.depExports[a]=b)},fetch:function(){if(!this.fetched){this.fetched=!0;i.startTime=(new Date).getTime();var a=this.map;if(this.shim)i.makeRequire(this.map,{enableBuildCallback:!0})(this.shim.deps||[],u(this,function(){return a.prefix?this.callPlugin():this.load()}));else return a.prefix?this.callPlugin():this.loa [...]
+this.map.url;S[a]||(S[a]=!0,i.load(this.map.id,a))},check:function(){if(this.enabled&&!this.enabling){var a,b,c=this.map.id;b=this.depExports;var f=this.exports,l=this.factory;if(this.inited)if(this.error)this.emit("error",this.error);else{if(!this.defining){this.defining=!0;if(1>this.depCount&&!this.defined){if(G(l)){if(this.events.error&&this.map.isDefine||g.onError!==ca)try{f=i.execCb(c,l,b,f)}catch(d){a=d}else f=i.execCb(c,l,b,f);this.map.isDefine&&void 0===f&&((b=this.module)?f=b.ex [...]
+(f=this.exports));if(a)return a.requireMap=this.map,a.requireModules=this.map.isDefine?[this.map.id]:null,a.requireType=this.map.isDefine?"define":"require",w(this.error=a)}else f=l;this.exports=f;if(this.map.isDefine&&!this.ignore&&(r[c]=f,g.onResourceLoad))g.onResourceLoad(i,this.map,this.depMaps);y(c);this.defined=!0}this.defining=!1;this.defined&&!this.defineEmitted&&(this.defineEmitted=!0,this.emit("defined",this.exports),this.defineEmitComplete=!0)}}else this.fetch()}},callPlugin:f [...]
+this.map,b=a.id,d=p(a.prefix);this.depMaps.push(d);q(d,"defined",u(this,function(f){var l,d;d=m(aa,this.map.id);var e=this.map.name,P=this.map.parentMap?this.map.parentMap.name:null,n=i.makeRequire(a.parentMap,{enableBuildCallback:!0});if(this.map.unnormalized){if(f.normalize&&(e=f.normalize(e,function(a){return c(a,P,!0)})||""),f=p(a.prefix+"!"+e,this.map.parentMap),q(f,"defined",u(this,function(a){this.init([],function(){return a},null,{enabled:!0,ignore:!0})})),d=m(h,f.id)){this.depMa [...]
+if(this.events.error)d.on("error",u(this,function(a){this.emit("error",a)}));d.enable()}}else d?(this.map.url=i.nameToUrl(d),this.load()):(l=u(this,function(a){this.init([],function(){return a},null,{enabled:!0})}),l.error=u(this,function(a){this.inited=!0;this.error=a;a.requireModules=[b];B(h,function(a){0===a.map.id.indexOf(b+"_unnormalized")&&y(a.map.id)});w(a)}),l.fromText=u(this,function(f,c){var d=a.name,e=p(d),P=M;c&&(f=c);P&&(M=!1);s(e);t(j.config,b)&&(j.config[d]=j.config[b]);tr [...]
+"fromText eval for "+b+" failed: "+h,h,[b]))}P&&(M=!0);this.depMaps.push(e);i.completeLoad(d);n([d],l)}),f.load(a.name,n,l,j))}));i.enable(d,this);this.pluginMaps[d.id]=d},enable:function(){V[this.map.id]=this;this.enabling=this.enabled=!0;v(this.depMaps,u(this,function(a,b){var c,f;if("string"===typeof a){a=p(a,this.map.isDefine?this.map:this.map.parentMap,!1,!this.skipMap);this.depMaps[b]=a;if(c=m(L,a.id)){this.depExports[b]=c(this);return}this.depCount+=1;q(a,"defined",u(this,function [...]
+a);this.check()}));this.errback?q(a,"error",u(this,this.errback)):this.events.error&&q(a,"error",u(this,function(a){this.emit("error",a)}))}c=a.id;f=h[c];!t(L,c)&&(f&&!f.enabled)&&i.enable(a,this)}));B(this.pluginMaps,u(this,function(a){var b=m(h,a.id);b&&!b.enabled&&i.enable(a,this)}));this.enabling=!1;this.check()},on:function(a,b){var c=this.events[a];c||(c=this.events[a]=[]);c.push(b)},emit:function(a,b){v(this.events[a],function(a){a(b)});"error"===a&&delete this.events[a]}};i={conf [...]
+registry:h,defined:r,urlFetched:S,defQueue:A,Module:Z,makeModuleMap:p,nextTick:g.nextTick,onError:w,configure:function(a){a.baseUrl&&"/"!==a.baseUrl.charAt(a.baseUrl.length-1)&&(a.baseUrl+="/");var b=j.shim,c={paths:!0,bundles:!0,config:!0,map:!0};B(a,function(a,b){c[b]?(j[b]||(j[b]={}),U(j[b],a,!0,!0)):j[b]=a});a.bundles&&B(a.bundles,function(a,b){v(a,function(a){a!==b&&(aa[a]=b)})});a.shim&&(B(a.shim,function(a,c){H(a)&&(a={deps:a});if((a.exports||a.init)&&!a.exportsFn)a.exportsFn=i.ma [...]
+b[c]=a}),j.shim=b);a.packages&&v(a.packages,function(a){var b,a="string"===typeof a?{name:a}:a;b=a.name;a.location&&(j.paths[b]=a.location);j.pkgs[b]=a.name+"/"+(a.main||"main").replace(ia,"").replace(Q,"")});B(h,function(a,b){!a.inited&&!a.map.unnormalized&&(a.map=p(b))});if(a.deps||a.callback)i.require(a.deps||[],a.callback)},makeShimExports:function(a){return function(){var b;a.init&&(b=a.init.apply(ba,arguments));return b||a.exports&&da(a.exports)}},makeRequire:function(a,e){function [...]
+q;e.enableBuildCallback&&(d&&G(d))&&(d.__requireJsBuild=!0);if("string"===typeof c){if(G(d))return w(C("requireargs","Invalid require call"),m);if(a&&t(L,c))return L[c](h[a.id]);if(g.get)return g.get(i,c,a,j);n=p(c,a,!1,!0);n=n.id;return!t(r,n)?w(C("notloaded",'Module name "'+n+'" has not been loaded yet for context: '+b+(a?"":". Use require([])"))):r[n]}J();i.nextTick(function(){J();q=s(p(null,a));q.skipMap=e.skipMap;q.init(c,d,m,{enabled:!0});D()});return j}e=e||{};U(j,{isBrowser:z,toU [...]
+e=b.lastIndexOf("."),k=b.split("/")[0];if(-1!==e&&(!("."===k||".."===k)||1<e))d=b.substring(e,b.length),b=b.substring(0,e);return i.nameToUrl(c(b,a&&a.id,!0),d,!0)},defined:function(b){return t(r,p(b,a,!1,!0).id)},specified:function(b){b=p(b,a,!1,!0).id;return t(r,b)||t(h,b)}});a||(j.undef=function(b){x();var c=p(b,a,!0),e=m(h,b);d(b);delete r[b];delete S[c.url];delete $[b];T(A,function(a,c){a[0]===b&&A.splice(c,1)});e&&(e.events.defined&&($[b]=e.events),y(b))});return j},enable:function [...]
+s(a).enable()},completeLoad:function(a){var b,c,d=m(j.shim,a)||{},g=d.exports;for(x();A.length;){c=A.shift();if(null===c[0]){c[0]=a;if(b)break;b=!0}else c[0]===a&&(b=!0);E(c)}c=m(h,a);if(!b&&!t(r,a)&&c&&!c.inited){if(j.enforceDefine&&(!g||!da(g)))return e(a)?void 0:w(C("nodefine","No define call for "+a,null,[a]));E([a,d.deps||[],d.exportsFn])}D()},nameToUrl:function(a,b,c){var d,e,h;(d=m(j.pkgs,a))&&(a=d);if(d=m(aa,a))return i.nameToUrl(d,b,c);if(g.jsExtRegExp.test(a))d=a+(b||"");else{d [...]
+a=a.split("/");for(e=a.length;0<e;e-=1)if(h=a.slice(0,e).join("/"),h=m(d,h)){H(h)&&(h=h[0]);a.splice(0,e,h);break}d=a.join("/");d+=b||(/^data\:|\?/.test(d)||c?"":".js");d=("/"===d.charAt(0)||d.match(/^[\w\+\.\-]+:/)?"":j.baseUrl)+d}return j.urlArgs?d+((-1===d.indexOf("?")?"?":"&")+j.urlArgs):d},load:function(a,b){g.load(i,a,b)},execCb:function(a,b,c,d){return b.apply(d,c)},onScriptLoad:function(a){if("load"===a.type||ja.test((a.currentTarget||a.srcElement).readyState))N=null,a=I(a),i.com [...]
+onScriptError:function(a){var b=I(a);if(!e(b.id))return w(C("scripterror","Script error for: "+b.id,a,[b.id]))}};i.require=i.makeRequire();return i}var g,x,y,D,I,E,N,J,s,O,ka=/(\/\*([\s\S]*?)\*\/|([^:]|^)\/\/(.*)$)/mg,la=/[^.]\s*require\s*\(\s*["']([^'"\s]+)["']\s*\)/g,Q=/\.js$/,ia=/^\.\//;x=Object.prototype;var K=x.toString,fa=x.hasOwnProperty,ha=Array.prototype.splice,z=!!("undefined"!==typeof window&&"undefined"!==typeof navigator&&window.document),ea=!z&&"undefined"!==typeof importSc [...]
+z&&"PLAYSTATION 3"===navigator.platform?/^complete$/:/^(complete|loaded)$/,Y="undefined"!==typeof opera&&"[object Opera]"===opera.toString(),F={},q={},R=[],M=!1;if("undefined"===typeof define){if("undefined"!==typeof requirejs){if(G(requirejs))return;q=requirejs;requirejs=void 0}"undefined"!==typeof require&&!G(require)&&(q=require,require=void 0);g=requirejs=function(b,c,d,e){var n,p="_";!H(b)&&"string"!==typeof b&&(n=b,H(c)?(b=c,c=d,d=e):b=[]);n&&n.context&&(p=n.context);(e=m(F,p))||(e [...]
+n&&e.configure(n);return e.require(b,c,d)};g.config=function(b){return g(b)};g.nextTick="undefined"!==typeof setTimeout?function(b){setTimeout(b,4)}:function(b){b()};require||(require=g);g.version="2.1.16";g.jsExtRegExp=/^\/|:|\?|\.js$/;g.isBrowser=z;x=g.s={contexts:F,newContext:ga};g({});v(["toUrl","undef","defined","specified"],function(b){g[b]=function(){var c=F._;return c.require[b].apply(c,arguments)}});if(z&&(y=x.head=document.getElementsByTagName("head")[0],D=document.getElementsB [...]
+x.head=D.parentNode;g.onError=ca;g.createNode=function(b){var c=b.xhtml?document.createElementNS("http://www.w3.org/1999/xhtml","html:script"):document.createElement("script");c.type=b.scriptType||"text/javascript";c.charset="utf-8";c.async=!0;return c};g.load=function(b,c,d){var e=b&&b.config||{};if(z)return e=g.createNode(e,c,d),e.setAttribute("data-requirecontext",b.contextName),e.setAttribute("data-requiremodule",c),e.attachEvent&&!(e.attachEvent.toString&&0>e.attachEvent.toString(). [...]
+!Y?(M=!0,e.attachEvent("onreadystatechange",b.onScriptLoad)):(e.addEventListener("load",b.onScriptLoad,!1),e.addEventListener("error",b.onScriptError,!1)),e.src=d,J=e,D?y.insertBefore(e,D):y.appendChild(e),J=null,e;if(ea)try{importScripts(d),b.completeLoad(c)}catch(m){b.onError(C("importscripts","importScripts failed for "+c+" at "+d,m,[c]))}};z&&!q.skipDataMain&&T(document.getElementsByTagName("script"),function(b){y||(y=b.parentNode);if(I=b.getAttribute("data-main"))return s=I,q.baseUr [...]
+s=E.pop(),O=E.length?E.join("/")+"/":"./",q.baseUrl=O),s=s.replace(Q,""),g.jsExtRegExp.test(s)&&(s=I),q.deps=q.deps?q.deps.concat(s):[s],!0});define=function(b,c,d){var e,g;"string"!==typeof b&&(d=c,c=b,b=null);H(c)||(d=c,c=null);!c&&G(d)&&(c=[],d.length&&(d.toString().replace(ka,"").replace(la,function(b,d){c.push(d)}),c=(1===d.length?["require"]:["require","exports","module"]).concat(c)));if(M){if(!(e=J))N&&"interactive"===N.readyState||T(document.getElementsByTagName("script"),functio [...]
+b.readyState)return N=b}),e=N;e&&(b||(b=e.getAttribute("data-requiremodule")),g=F[e.getAttribute("data-requirecontext")])}(g?g.defQueue:R).push([b,c,d])};define.amd={jQuery:!0};g.exec=function(b){return eval(b)};g(q)}})(this);
diff --git a/content/docs/malhar-3.8/mkdocs/js/search-results-template.mustache b/content/docs/malhar-3.8/mkdocs/js/search-results-template.mustache
new file mode 100644
index 0000000..a8b3862
--- /dev/null
+++ b/content/docs/malhar-3.8/mkdocs/js/search-results-template.mustache
@@ -0,0 +1,4 @@
+<article>
+  <h3><a href="{{location}}">{{title}}</a></h3>
+  <p>{{summary}}</p>
+</article>
diff --git a/content/docs/malhar-3.8/mkdocs/js/search.js b/content/docs/malhar-3.8/mkdocs/js/search.js
new file mode 100644
index 0000000..88d563a
--- /dev/null
+++ b/content/docs/malhar-3.8/mkdocs/js/search.js
@@ -0,0 +1,88 @@
+require([
+    base_url + '/mkdocs/js/mustache.min.js',
+    base_url + '/mkdocs/js/lunr.min.js',
+    'text!search-results-template.mustache',
+    'text!../search_index.json',
+], function (Mustache, lunr, results_template, data) {
+   "use strict";
+
+    function getSearchTerm()
+    {
+        var sPageURL = window.location.search.substring(1);
+        var sURLVariables = sPageURL.split('&');
+        for (var i = 0; i < sURLVariables.length; i++)
+        {
+            var sParameterName = sURLVariables[i].split('=');
+            if (sParameterName[0] == 'q')
+            {
+                return decodeURIComponent(sParameterName[1].replace(/\+/g, '%20'));
+            }
+        }
+    }
+
+    var index = lunr(function () {
+        this.field('title', {boost: 10});
+        this.field('text');
+        this.ref('location');
+    });
+
+    data = JSON.parse(data);
+    var documents = {};
+
+    for (var i=0; i < data.docs.length; i++){
+        var doc = data.docs[i];
+        doc.location = base_url + doc.location;
+        index.add(doc);
+        documents[doc.location] = doc;
+    }
+
+    var search = function(){
+
+        var query = document.getElementById('mkdocs-search-query').value;
+        var search_results = document.getElementById("mkdocs-search-results");
+        while (search_results.firstChild) {
+            search_results.removeChild(search_results.firstChild);
+        }
+
+        if(query === ''){
+            return;
+        }
+
+        var results = index.search(query);
+
+        if (results.length > 0){
+            for (var i=0; i < results.length; i++){
+                var result = results[i];
+                doc = documents[result.ref];
+                doc.base_url = base_url;
+                doc.summary = doc.text.substring(0, 200);
+                var html = Mustache.to_html(results_template, doc);
+                search_results.insertAdjacentHTML('beforeend', html);
+            }
+        } else {
+            search_results.insertAdjacentHTML('beforeend', "<p>No results found</p>");
+        }
+
+        if(jQuery){
+            /*
+             * We currently only automatically hide bootstrap models. This
+             * requires jQuery to work.
+             */
+            jQuery('#mkdocs_search_modal a').click(function(){
+                jQuery('#mkdocs_search_modal').modal('hide');
+            })
+        }
+
+    };
+
+    var search_input = document.getElementById('mkdocs-search-query');
+
+    var term = getSearchTerm();
+    if (term){
+        search_input.value = term;
+        search();
+    }
+
+    search_input.addEventListener("keyup", search);
+
+});
diff --git a/content/docs/malhar-3.8/mkdocs/js/text.js b/content/docs/malhar-3.8/mkdocs/js/text.js
new file mode 100644
index 0000000..17921b6
--- /dev/null
+++ b/content/docs/malhar-3.8/mkdocs/js/text.js
@@ -0,0 +1,390 @@
+/**
+ * @license RequireJS text 2.0.12 Copyright (c) 2010-2014, The Dojo Foundation All Rights Reserved.
+ * Available via the MIT or new BSD license.
+ * see: http://github.com/requirejs/text for details
+ */
+/*jslint regexp: true */
+/*global require, XMLHttpRequest, ActiveXObject,
+  define, window, process, Packages,
+  java, location, Components, FileUtils */
+
+define(['module'], function (module) {
+    'use strict';
+
+    var text, fs, Cc, Ci, xpcIsWindows,
+        progIds = ['Msxml2.XMLHTTP', 'Microsoft.XMLHTTP', 'Msxml2.XMLHTTP.4.0'],
+        xmlRegExp = /^\s*<\?xml(\s)+version=[\'\"](\d)*.(\d)*[\'\"](\s)*\?>/im,
+        bodyRegExp = /<body[^>]*>\s*([\s\S]+)\s*<\/body>/im,
+        hasLocation = typeof location !== 'undefined' && location.href,
+        defaultProtocol = hasLocation && location.protocol && location.protocol.replace(/\:/, ''),
+        defaultHostName = hasLocation && location.hostname,
+        defaultPort = hasLocation && (location.port || undefined),
+        buildMap = {},
+        masterConfig = (module.config && module.config()) || {};
+
+    text = {
+        version: '2.0.12',
+
+        strip: function (content) {
+            //Strips <?xml ...?> declarations so that external SVG and XML
+            //documents can be added to a document without worry. Also, if the string
+            //is an HTML document, only the part inside the body tag is returned.
+            if (content) {
+                content = content.replace(xmlRegExp, "");
+                var matches = content.match(bodyRegExp);
+                if (matches) {
+                    content = matches[1];
+                }
+            } else {
+                content = "";
+            }
+            return content;
+        },
+
+        jsEscape: function (content) {
+            return content.replace(/(['\\])/g, '\\$1')
+                .replace(/[\f]/g, "\\f")
+                .replace(/[\b]/g, "\\b")
+                .replace(/[\n]/g, "\\n")
+                .replace(/[\t]/g, "\\t")
+                .replace(/[\r]/g, "\\r")
+                .replace(/[\u2028]/g, "\\u2028")
+                .replace(/[\u2029]/g, "\\u2029");
+        },
+
+        createXhr: masterConfig.createXhr || function () {
+            //Would love to dump the ActiveX crap in here. Need IE 6 to die first.
+            var xhr, i, progId;
+            if (typeof XMLHttpRequest !== "undefined") {
+                return new XMLHttpRequest();
+            } else if (typeof ActiveXObject !== "undefined") {
+                for (i = 0; i < 3; i += 1) {
+                    progId = progIds[i];
+                    try {
+                        xhr = new ActiveXObject(progId);
+                    } catch (e) {}
+
+                    if (xhr) {
+                        progIds = [progId];  // so faster next time
+                        break;
+                    }
+                }
+            }
+
+            return xhr;
+        },
+
+        /**
+         * Parses a resource name into its component parts. Resource names
+         * look like: module/name.ext!strip, where the !strip part is
+         * optional.
+         * @param {String} name the resource name
+         * @returns {Object} with properties "moduleName", "ext" and "strip"
+         * where strip is a boolean.
+         */
+        parseName: function (name) {
+            var modName, ext, temp,
+                strip = false,
+                index = name.indexOf("."),
+                isRelative = name.indexOf('./') === 0 ||
+                             name.indexOf('../') === 0;
+
+            if (index !== -1 && (!isRelative || index > 1)) {
+                modName = name.substring(0, index);
+                ext = name.substring(index + 1, name.length);
+            } else {
+                modName = name;
+            }
+
+            temp = ext || modName;
+            index = temp.indexOf("!");
+            if (index !== -1) {
+                //Pull off the strip arg.
+                strip = temp.substring(index + 1) === "strip";
+                temp = temp.substring(0, index);
+                if (ext) {
+                    ext = temp;
+                } else {
+                    modName = temp;
+                }
+            }
+
+            return {
+                moduleName: modName,
+                ext: ext,
+                strip: strip
+            };
+        },
+
+        xdRegExp: /^((\w+)\:)?\/\/([^\/\\]+)/,
+
+        /**
+         * Is an URL on another domain. Only works for browser use, returns
+         * false in non-browser environments. Only used to know if an
+         * optimized .js version of a text resource should be loaded
+         * instead.
+         * @param {String} url
+         * @returns Boolean
+         */
+        useXhr: function (url, protocol, hostname, port) {
+            var uProtocol, uHostName, uPort,
+                match = text.xdRegExp.exec(url);
+            if (!match) {
+                return true;
+            }
+            uProtocol = match[2];
+            uHostName = match[3];
+
+            uHostName = uHostName.split(':');
+            uPort = uHostName[1];
+            uHostName = uHostName[0];
+
+            return (!uProtocol || uProtocol === protocol) &&
+                   (!uHostName || uHostName.toLowerCase() === hostname.toLowerCase()) &&
+                   ((!uPort && !uHostName) || uPort === port);
+        },
+
+        finishLoad: function (name, strip, content, onLoad) {
+            content = strip ? text.strip(content) : content;
+            if (masterConfig.isBuild) {
+                buildMap[name] = content;
+            }
+            onLoad(content);
+        },
+
+        load: function (name, req, onLoad, config) {
+            //Name has format: some.module.filext!strip
+            //The strip part is optional.
+            //if strip is present, then that means only get the string contents
+            //inside a body tag in an HTML string. For XML/SVG content it means
+            //removing the <?xml ...?> declarations so the content can be inserted
+            //into the current doc without problems.
+
+            // Do not bother with the work if a build and text will
+            // not be inlined.
+            if (config && config.isBuild && !config.inlineText) {
+                onLoad();
+                return;
+            }
+
+            masterConfig.isBuild = config && config.isBuild;
+
+            var parsed = text.parseName(name),
+                nonStripName = parsed.moduleName +
+                    (parsed.ext ? '.' + parsed.ext : ''),
+                url = req.toUrl(nonStripName),
+                useXhr = (masterConfig.useXhr) ||
+                         text.useXhr;
+
+            // Do not load if it is an empty: url
+            if (url.indexOf('empty:') === 0) {
+                onLoad();
+                return;
+            }
+
+            //Load the text. Use XHR if possible and in a browser.
+            if (!hasLocation || useXhr(url, defaultProtocol, defaultHostName, defaultPort)) {
+                text.get(url, function (content) {
+                    text.finishLoad(name, parsed.strip, content, onLoad);
+                }, function (err) {
+                    if (onLoad.error) {
+                        onLoad.error(err);
+                    }
+                });
+            } else {
+                //Need to fetch the resource across domains. Assume
+                //the resource has been optimized into a JS module. Fetch
+                //by the module name + extension, but do not include the
+                //!strip part to avoid file system issues.
+                req([nonStripName], function (content) {
+                    text.finishLoad(parsed.moduleName + '.' + parsed.ext,
+                                    parsed.strip, content, onLoad);
+                });
+            }
+        },
+
+        write: function (pluginName, moduleName, write, config) {
+            if (buildMap.hasOwnProperty(moduleName)) {
+                var content = text.jsEscape(buildMap[moduleName]);
+                write.asModule(pluginName + "!" + moduleName,
+                               "define(function () { return '" +
+                                   content +
+                               "';});\n");
+            }
+        },
+
+        writeFile: function (pluginName, moduleName, req, write, config) {
+            var parsed = text.parseName(moduleName),
+                extPart = parsed.ext ? '.' + parsed.ext : '',
+                nonStripName = parsed.moduleName + extPart,
+                //Use a '.js' file name so that it indicates it is a
+                //script that can be loaded across domains.
+                fileName = req.toUrl(parsed.moduleName + extPart) + '.js';
+
+            //Leverage own load() method to load plugin value, but only
+            //write out values that do not have the strip argument,
+            //to avoid any potential issues with ! in file names.
+            text.load(nonStripName, req, function (value) {
+                //Use own write() method to construct full module value.
+                //But need to create shell that translates writeFile's
+                //write() to the right interface.
+                var textWrite = function (contents) {
+                    return write(fileName, contents);
+                };
+                textWrite.asModule = function (moduleName, contents) {
+                    return write.asModule(moduleName, fileName, contents);
+                };
+
+                text.write(pluginName, nonStripName, textWrite, config);
+            }, config);
+        }
+    };
+
+    if (masterConfig.env === 'node' || (!masterConfig.env &&
+            typeof process !== "undefined" &&
+            process.versions &&
+            !!process.versions.node &&
+            !process.versions['node-webkit'])) {
+        //Using special require.nodeRequire, something added by r.js.
+        fs = require.nodeRequire('fs');
+
+        text.get = function (url, callback, errback) {
+            try {
+                var file = fs.readFileSync(url, 'utf8');
+                //Remove BOM (Byte Mark Order) from utf8 files if it is there.
+                if (file.indexOf('\uFEFF') === 0) {
+                    file = file.substring(1);
+                }
+                callback(file);
+            } catch (e) {
+                if (errback) {
+                    errback(e);
+                }
+            }
+        };
+    } else if (masterConfig.env === 'xhr' || (!masterConfig.env &&
+            text.createXhr())) {
+        text.get = function (url, callback, errback, headers) {
+            var xhr = text.createXhr(), header;
+            xhr.open('GET', url, true);
+
+            //Allow plugins direct access to xhr headers
+            if (headers) {
+                for (header in headers) {
+                    if (headers.hasOwnProperty(header)) {
+                        xhr.setRequestHeader(header.toLowerCase(), headers[header]);
+                    }
+                }
+            }
+
+            //Allow overrides specified in config
+            if (masterConfig.onXhr) {
+                masterConfig.onXhr(xhr, url);
+            }
+
+            xhr.onreadystatechange = function (evt) {
+                var status, err;
+                //Do not explicitly handle errors, those should be
+                //visible via console output in the browser.
+                if (xhr.readyState === 4) {
+                    status = xhr.status || 0;
+                    if (status > 399 && status < 600) {
+                        //An http 4xx or 5xx error. Signal an error.
+                        err = new Error(url + ' HTTP status: ' + status);
+                        err.xhr = xhr;
+                        if (errback) {
+                            errback(err);
+                        }
+                    } else {
+                        callback(xhr.responseText);
+                    }
+
+                    if (masterConfig.onXhrComplete) {
+                        masterConfig.onXhrComplete(xhr, url);
+                    }
+                }
+            };
+            xhr.send(null);
+        };
+    } else if (masterConfig.env === 'rhino' || (!masterConfig.env &&
+            typeof Packages !== 'undefined' && typeof java !== 'undefined')) {
+        //Why Java, why is this so awkward?
+        text.get = function (url, callback) {
+            var stringBuffer, line,
+                encoding = "utf-8",
+                file = new java.io.File(url),
+                lineSeparator = java.lang.System.getProperty("line.separator"),
+                input = new java.io.BufferedReader(new java.io.InputStreamReader(new java.io.FileInputStream(file), encoding)),
+                content = '';
+            try {
+                stringBuffer = new java.lang.StringBuffer();
+                line = input.readLine();
+
+                // Byte Order Mark (BOM) - The Unicode Standard, version 3.0, page 324
+                // http://www.unicode.org/faq/utf_bom.html
+
+                // Note that when we use utf-8, the BOM should appear as "EF BB BF", but it doesn't due to this bug in the JDK:
+                // http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4508058
+                if (line && line.length() && line.charAt(0) === 0xfeff) {
+                    // Eat the BOM, since we've already found the encoding on this file,
+                    // and we plan to concatenating this buffer with others; the BOM should
+                    // only appear at the top of a file.
+                    line = line.substring(1);
+                }
+
+                if (line !== null) {
+                    stringBuffer.append(line);
+                }
+
+                while ((line = input.readLine()) !== null) {
+                    stringBuffer.append(lineSeparator);
+                    stringBuffer.append(line);
+                }
+                //Make sure we return a JavaScript string and not a Java string.
+                content = String(stringBuffer.toString()); //String
+            } finally {
+                input.close();
+            }
+            callback(content);
+        };
+    } else if (masterConfig.env === 'xpconnect' || (!masterConfig.env &&
+            typeof Components !== 'undefined' && Components.classes &&
+            Components.interfaces)) {
+        //Avert your gaze!
+        Cc = Components.classes;
+        Ci = Components.interfaces;
+        Components.utils['import']('resource://gre/modules/FileUtils.jsm');
+        xpcIsWindows = ('@mozilla.org/windows-registry-key;1' in Cc);
+
+        text.get = function (url, callback) {
+            var inStream, convertStream, fileObj,
+                readData = {};
+
+            if (xpcIsWindows) {
+                url = url.replace(/\//g, '\\');
+            }
+
+            fileObj = new FileUtils.File(url);
+
+            //XPCOM, you so crazy
+            try {
+                inStream = Cc['@mozilla.org/network/file-input-stream;1']
+                           .createInstance(Ci.nsIFileInputStream);
+                inStream.init(fileObj, 1, 0, false);
+
+                convertStream = Cc['@mozilla.org/intl/converter-input-stream;1']
+                                .createInstance(Ci.nsIConverterInputStream);
+                convertStream.init(inStream, "utf-8", inStream.available(),
+                Ci.nsIConverterInputStream.DEFAULT_REPLACEMENT_CHARACTER);
+
+                convertStream.readString(inStream.available(), readData);
+                convertStream.close();
+                inStream.close();
+                callback(readData.value);
+            } catch (e) {
+                throw new Error((fileObj && fileObj.path || '') + ': ' + e);
+            }
+        };
+    }
+    return text;
+});
diff --git a/content/docs/malhar-3.8/mkdocs/search_index.json b/content/docs/malhar-3.8/mkdocs/search_index.json
new file mode 100644
index 0000000..9bd9e0d
--- /dev/null
+++ b/content/docs/malhar-3.8/mkdocs/search_index.json
@@ -0,0 +1,2044 @@
+{
+    "docs": [
+        {
+            "location": "/",
+            "text": "Apache Apex Malhar\n\n\nApache Apex Malhar is an open source operator and codec library that can be used with the \nApache Apex\n platform to build real-time streaming applications.  Enabling users to extract value quickly, Malhar operators help get data in, analyze it in real-time, and get data out of Hadoop.  In addition to the operators, the library contains a number of example applications, demonstrating operator features and capabilities.\n\n\n\n\nCapabilities c [...]
+            "title": "Apache Apex Malhar"
+        },
+        {
+            "location": "/#apache-apex-malhar",
+            "text": "Apache Apex Malhar is an open source operator and codec library that can be used with the  Apache Apex  platform to build real-time streaming applications.  Enabling users to extract value quickly, Malhar operators help get data in, analyze it in real-time, and get data out of Hadoop.  In addition to the operators, the library contains a number of example applications, demonstrating operator features and capabilities.",
+            "title": "Apache Apex Malhar"
+        },
+        {
+            "location": "/#capabilities-common-across-malhar-operators",
+            "text": "For most streaming platforms, connectors are afterthoughts and often end up being simple \u2018bolt-ons\u2019 to the platform. As a result they often cause performance issues or data loss when put through failure scenarios and scalability requirements. Malhar operators do not face these issues as they were designed to be integral parts of Apex. Hence, they have following core streaming runtime capabilities   Fault tolerance  \u2013 Malhar operators where applicable h [...]
+            "title": "Capabilities common across Malhar operators"
+        },
+        {
+            "location": "/#operator-library-overview",
+            "text": "",
+            "title": "Operator Library Overview"
+        },
+        {
+            "location": "/#inputoutput-connectors",
+            "text": "Below is a summary of the various sub categories of input and output operators. Input operators also have a corresponding output operator   File Systems  \u2013 Most streaming analytics use cases require the data to be stored in HDFS or perhaps S3 if the application is running in AWS.  Users often need to re-run their streaming analytical applications against historical data or consume data from upstream processes that are perhaps writing to some NFS share.  Apex sup [...]
+            "title": "Input/output connectors"
+        },
+        {
+            "location": "/#parsers",
+            "text": "There are many industry vertical specific data formats that a streaming application developer might need to parse. Often there are existing parsers available for these that can be directly plugged into an Apache Apex application. For example in the Telco space, a Java based CDR parser can be directly plugged into Apache Apex operator. To further simplify development experience, Malhar also provides some operators for parsing common formats like XML (DOM & SAX), JSON  [...]
+            "title": "Parsers"
+        },
+        {
+            "location": "/#stream-manipulation",
+            "text": "Streaming data inevitably needs processing to clean, filter, tag, summarize, etc. The goal of Malhar is to enable the application developer to focus on WHAT needs to be done to the stream to get it in the right format and not worry about the HOW.  Malhar has several operators to perform the common stream manipulation actions like \u2013 GroupBy, Join, Distinct/Unique, Limit, OrderBy, Split, Sample, Inner join, Outer join, Select, Update etc.",
+            "title": "Stream manipulation"
+        },
+        {
+            "location": "/#compute",
+            "text": "One of the most important promises of a streaming analytics platform like Apache Apex is the ability to do analytics in real-time. However delivering on the promise becomes really difficult when the platform does not provide out of the box operators to support variety of common compute functions as the user then has to worry about making these scalable, fault tolerant, stateful, etc.  Malhar takes this responsibility away from the application developer by providing a [...]
+            "title": "Compute"
+        },
+        {
+            "location": "/#languages-support",
+            "text": "Migrating to a new platform often requires re-use of the existing code that would be difficult or time-consuming to re-write.  With this in mind, Malhar supports invocation of code written in other languages by wrapping them in one of the library operators, and allows execution of software written in:   JavaScript  Python  R  Ruby",
+            "title": "Languages Support"
+        },
+        {
+            "location": "/apis/calcite/",
+            "text": "Apache Apex is a unified stream and batch processing engine that enables application developers to process data at very high throughput with low latency. Although the different types of data have different processing needs, SQL remains a popular and a generic way for processing data. To ensure that existing ETL developers and developers who are well versed with Database applications adopt stream processing application development with ease, integration of SQL with Ap [...]
+            "title": "SQL"
+        },
+        {
+            "location": "/apis/calcite/#apex-calcite-integration",
+            "text": "Apache Calcite is a highly customizable engine for parsing and planning queries on relational data from various data sources; it provides storage independent optimization of queries and ways to integrate them into other frameworks which would like to take advantage and expose SQL capability to their users. For details, please read at  Apache Calcite Website .   Particularly in SQL on Apex, Calcite processes a query and then creates relational algebra to create proces [...]
+            "title": "Apex-Calcite Integration"
+        },
+        {
+            "location": "/apis/calcite/#sql-apis-for-apache-apex",
+            "text": "Listed below are the Java APIs which can be used by SQL/Apex users to create a DAG in the implementation of the  populateDAG  method of the  StreamingApplication  interface.     API  Description      SQLExecEnvironment.getEnvironment()  Creates a new SQL execution environment    SQLExecEnvironment.registerTable(tableName, endpointInstance)  Registers a new abstract table with existing environment.  endpointInstance  is an object of type  Endpoint  which defines a tab [...]
+            "title": "SQL APIs for Apache Apex"
+        },
+        {
+            "location": "/apis/calcite/#example-1-pure-style-sql-application",
+            "text": "With Apache Calcite Integration, you can use SQL queries across different data sources and provide UDFs (User Defined Functions) as per your business logic. This example will use a Kafka topic as the source and a HDFS file as the destination.\nFollowing application code will be used to explain APIs. Actual source code can be found  here .    public class PureStyleSQLApplication implements StreamingApplication\n  {\n    @Override\n    public void populateDAG(DAG dag,  [...]
+            "title": "Example 1: Pure Style SQL Application"
+        },
+        {
+            "location": "/apis/calcite/#constructing-sqlexecenvironment",
+            "text": "The class  SQLExecEnvironment  provides a starting point and a simple way to define metadata needed for running a SQL statement; a new instance of this class is returned by the  getEnvironment  static method.      // Creates SQLExecEnvironment instance by using static method getEnvironment\n  SQLExecEnvironment sqlEnv = SQLExecEnvironment.getEnvironment();",
+            "title": "Constructing SQLExecEnvironment"
+        },
+        {
+            "location": "/apis/calcite/#registering-tables-with-sqlexecenvironment",
+            "text": "Next, we need to register tables which can be used in a query. For this purpose, we can use  registerTable  method from SQLExecEnvironment.    // Register KafkaEnpoint as \"ORDERS\" table with kafka topic and data format as CSV\n  sqlEnv = sqlEnv.registerTable( \n                              \"ORDERS\", \n                              new KafkaEndpoint(\"localhost:9090\", \n                                                \"inputTopic\", \n                            [...]
+            "title": "Registering tables with SQLExecEnvironment"
+        },
+        {
+            "location": "/apis/calcite/#using-user-defined-functions-udf-in-a-sql-query",
+            "text": "We can use our own scalar UDF, implemented in Java, in a SQL statement for data manipulation but first, we need to register the function with the execution environment by using the  registerFunction  method.    sqlEnv = sqlEnv.registerFunction(\"APEXCONCAT\", PureStyleSQLApplication.class, \"apex_concat_str\");  In above code,  registerFunction  takes the UDF name to be used in SQL, JAVA class which implements the static method and name of that method as parameters.  [...]
+            "title": "Using User Defined Functions (UDF) in a SQL query"
+        },
+        {
+            "location": "/apis/calcite/#executing-sql-query",
+            "text": "Finally to execute the query we need to use  executeSQL  function with a DAG and SQL statement as parameters.    // Converting SQL statement to DAG \n  String sql = \"INSERT INTO SALES \n                SELECT STREAM ROWTIME, FLOOR(ROWTIME TO DAY), APEXCONCAT('OILPAINT', SUBSTRING(PRODUCT, 6, 7)) \n                FROM ORDERS \n                WHERE ID > 3 AND PRODUCT LIKE 'paint%'\";\n  sqlEnv.executeSQL(dag, sql);  When executeSQL method is called, the query goes t [...]
+            "title": "Executing SQL Query"
+        },
+        {
+            "location": "/apis/calcite/#example-2-fusion-style-sql-application",
+            "text": "As described in Pure Style SQL application, we can use different data sources as source and sink while developing Apex Applications with Calcite. This example will describe how we can develop Apex application with Apex stream as abstract table for SQL query. Actual source code can be found  here .    // Define Kafka Input Operator for reading data from Kafka\n  KafkaSinglePortInputOperator kafkaInput = dag.addOperator(\"KafkaInput\", \n                                [...]
+            "title": "Example 2: Fusion Style SQL Application"
+        },
+        {
+            "location": "/apis/calcite/#ongoing-efforts",
+            "text": "Apache Apex-Calcite integration provides support for basic queries and efforts are underway to extend support for aggregations, sorting and other features using Tumbling, Hopping and Session Windows.\nSupport for JSON, XML and JDBC endpoint are also planned. The goal of this integration is to make developing a streaming application using SQL easy so that SQL Developers don't have to write any java code at all.",
+            "title": "Ongoing efforts"
+        },
+        {
+            "location": "/operators/block_reader/",
+            "text": "Block Reader\n\n\nThis is a scalable operator that reads and parses blocks of data sources into records. A data source can be a file or a message bus that contains records and a block defines a chunk of data in the source by specifying the block offset and the length of the source belonging to the block. \n\n\nWhy is it needed?\n\n\nA Block Reader is needed to parallelize reading and parsing of a single data source, for example a file. Simple parallelism of reading d [...]
+            "title": "Block Reader"
+        },
+        {
+            "location": "/operators/block_reader/#block-reader",
+            "text": "This is a scalable operator that reads and parses blocks of data sources into records. A data source can be a file or a message bus that contains records and a block defines a chunk of data in the source by specifying the block offset and the length of the source belonging to the block.",
+            "title": "Block Reader"
+        },
+        {
+            "location": "/operators/block_reader/#why-is-it-needed",
+            "text": "A Block Reader is needed to parallelize reading and parsing of a single data source, for example a file. Simple parallelism of reading data sources can be achieved by multiple partitions reading different source of same type (for files see  AbstractFileInputOperator ) but Block Reader partitions can read blocks of same source in parallel and parse them for records ensuring that no record is duplicated or missed.",
+            "title": "Why is it needed?"
+        },
+        {
+            "location": "/operators/block_reader/#class-diagram",
+            "text": "",
+            "title": "Class Diagram"
+        },
+        {
+            "location": "/operators/block_reader/#abstractblockreader",
+            "text": "This is the abstract implementation that serves as the base for different types of data sources. It defines how a block metadata is processed. The flow diagram below describes the processing of a block metadata.",
+            "title": "AbstractBlockReader"
+        },
+        {
+            "location": "/operators/block_reader/#ports",
+            "text": "blocksMetadataInput: input port on which block metadata are received.    blocksMetadataOutput: output port on which block metadata are emitted if the port is connected. This port is useful when a downstream operator that receives records from block reader may also be interested to know the details of the corresponding blocks.    messages: output port on which tuples of type  com.datatorrent.lib.io.block.AbstractBlockReader.ReaderRecord  are emitted. This class encaps [...]
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/block_reader/#readercontext",
+            "text": "This is one of the most important fields in the block reader. It is of type  com.datatorrent.lib.io.block.ReaderContext  and is responsible for fetching bytes that make a record. It also lets the reader know how many total bytes were consumed which may not be equal to the total bytes in a record because consumed bytes also include bytes for the record delimiter which may not be a part of the actual record.  Once the reader creates an input stream for the block (or us [...]
+            "title": "readerContext"
+        },
+        {
+            "location": "/operators/block_reader/#abstract-methods",
+            "text": "STREAM setupStream(B block) : creating a stream for a block is dependent on the type of source which is not known to AbstractBlockReader. Sub-classes which deal with a specific data source provide this implementation.    R convertToRecord(byte[] bytes) : this converts the array of bytes into the actual instance of record type.",
+            "title": "Abstract methods"
+        },
+        {
+            "location": "/operators/block_reader/#auto-scalability",
+            "text": "Block reader can auto-scale, that is, depending on the backlog (total number of all the blocks which are waiting in the  blocksMetadataInput  port queue of all partitions) it can create more partitions or reduce them. Details are discussed in the last section which covers the  partitioner and stats-listener .",
+            "title": "Auto-scalability"
+        },
+        {
+            "location": "/operators/block_reader/#configuration",
+            "text": "maxReaders : when auto-scaling is enabled, this controls the maximum number of block reader partitions that can be created.  minReaders : when auto-scaling is enabled, this controls the minimum number of block reader partitions that should always exist.  collectStats : this enables or disables auto-scaling. When it is set to  true  the stats (number of blocks in the queue) are collected and this triggers partitioning; otherwise auto-scaling is disabled.  intervalMill [...]
+            "title": "Configuration"
+        },
+        {
+            "location": "/operators/block_reader/#example-application",
+            "text": "This simple dag demonstrates how any concrete implementation of  AbstractFSBlockReader  can be plugged into an application.    In the above application, file splitter creates block metadata for files which are sent to block reader. Partitions of the block reader parses the file blocks for records which are filtered, transformed and then persisted to a file (created per block). Therefore block reader is parallel partitioned with the 2 downstream operators - filter/con [...]
+            "title": "Example Application"
+        },
+        {
+            "location": "/operators/block_reader/#abstractfsreadaheadlinereader",
+            "text": "This extension of  AbstractFSBlockReader  parses lines from a block and binds the  readerContext  field to an instance of  ReaderContext.ReadAheadLineReaderContext .  It is abstract because it doesn't provide an implementation of  convertToRecord(byte[] bytes)  since the user may want to convert the bytes that make a line into some other type.",
+            "title": "AbstractFSReadAheadLineReader"
+        },
+        {
+            "location": "/operators/block_reader/#readaheadlinereadercontext",
+            "text": "In order to handle a line split across adjacent blocks, ReadAheadLineReaderContext always reads beyond the block boundary and ignores the bytes till the first end-of-line character of all the blocks except the first block of the file. This ensures that no line is missed or incomplete.  This is one of the most common ways of handling a split record. It doesn't require any further information to decide if a line is complete. However, the cost of this consistent way to  [...]
+            "title": "ReadAheadLineReaderContext"
+        },
+        {
+            "location": "/operators/block_reader/#abstractfslinereader",
+            "text": "Similar to  AbstractFSReadAheadLineReader , even this parses lines from a block. However, it binds the  readerContext  field to an instance of  ReaderContext.LineReaderContext .",
+            "title": "AbstractFSLineReader"
+        },
+        {
+            "location": "/operators/block_reader/#linereadercontext",
+            "text": "This handles the line split differently from  ReadAheadLineReaderContext . It doesn't always read from the next block. If the end of the last line is aligned with the block boundary then it stops processing the block. It does read from the next block when the boundaries are not aligned, that is, last line extends beyond the block boundary. The result of this is an inconsistency in reading the next block.  When the boundary of the last line of the previous block was a [...]
+            "title": "LineReaderContext"
+        },
+        {
+            "location": "/operators/block_reader/#fsslicereader",
+            "text": "A concrete extension of  AbstractFSBlockReader  that reads fixed-size  byte[]  from a block and emits the byte array wrapped in  com.datatorrent.netlet.util.Slice .  This operator binds the  readerContext  to an instance of  ReaderContext.FixedBytesReaderContext .",
+            "title": "FSSliceReader"
+        },
+        {
+            "location": "/operators/block_reader/#fixedbytesreadercontext",
+            "text": "This implementation of  ReaderContext  never reads beyond a block boundary which can result in the last  byte[]  of a block to be of a shorter length than the rest of the records.",
+            "title": "FixedBytesReaderContext"
+        },
+        {
+            "location": "/operators/block_reader/#configuration_1",
+            "text": "readerContext.length : length of each record. By default, this is initialized to the default hdfs block size.",
+            "title": "Configuration"
+        },
+        {
+            "location": "/operators/block_reader/#partitioner-and-statslistener",
+            "text": "The logical instance of the block reader acts as the Partitioner (unless a custom partitioner is set using the operator attribute -  PARTITIONER ) as well as a StatsListener. This is because the  AbstractBlockReader  implements both the  com.datatorrent.api.Partitioner  and  com.datatorrent.api.StatsListener  interfaces and provides an implementation of  definePartitions(...)  and  processStats(...)  which make it auto-scalable.",
+            "title": "Partitioner and StatsListener"
+        },
+        {
+            "location": "/operators/block_reader/#processstats",
+            "text": "The application master invokes  Response processStats(BatchedOperatorStats stats)  method on the logical instance with the stats ( tuplesProcessedPSMA ,  tuplesEmittedPSMA ,  latencyMA , etc.) of each partition. The data which this operator is interested in is the  queueSize  of the input port  blocksMetadataInput .  Usually the  queueSize  of an input port gives the count of waiting control tuples plus data tuples. However, if a stats listener is interested only in  [...]
+            "title": "processStats "
+        },
+        {
+            "location": "/operators/block_reader/#definepartitions",
+            "text": "Based on the  repartitionRequired  field of the  Response  object which is returned by  processStats  method, the application master invokes   Collection<Partition<AbstractBlockReader<...>>> definePartitions(Collection<Partition<AbstractBlockReader<...>>> partitions, PartitioningContext context)  on the logical instance which is also the partitioner instance. The implementation calculates the difference between required partitions and the existing count of partitions [...]
+            "title": "definePartitions"
+        },
+        {
+            "location": "/operators/csvformatter/",
+            "text": "CsvFormatter\n\n\nOperator Objective\n\n\nThis operator receives a POJO (\nPlain Old Java Object\n) as an incoming tuple, converts the data in \nthe incoming POJO to a custom delimited string and emits the delimited string.\n\n\nCsvFormatter supports schema definition as a JSON string. \n\n\nCsvFormatter does not hold any state and is \nidempotent\n, \nfault-tolerant\n and \nstatically/dynamically partitionable\n.\n\n\nOperator Information\n\n\n\n\nOperator location: [...]
+            "title": "CSV Formatter"
+        },
+        {
+            "location": "/operators/csvformatter/#csvformatter",
+            "text": "",
+            "title": "CsvFormatter"
+        },
+        {
+            "location": "/operators/csvformatter/#operator-objective",
+            "text": "This operator receives a POJO ( Plain Old Java Object ) as an incoming tuple, converts the data in \nthe incoming POJO to a custom delimited string and emits the delimited string.  CsvFormatter supports schema definition as a JSON string.   CsvFormatter does not hold any state and is  idempotent ,  fault-tolerant  and  statically/dynamically partitionable .",
+            "title": "Operator Objective"
+        },
+        {
+            "location": "/operators/csvformatter/#operator-information",
+            "text": "Operator location:  malhar-contrib  Available since:  3.2.0  Operator state:  Evolving  Java Packages:  Operator:  com.datatorrent.contrib.formatter.CsvFormatter",
+            "title": "Operator Information"
+        },
+        {
+            "location": "/operators/csvformatter/#properties-attributes-and-ports",
+            "text": "",
+            "title": "Properties, Attributes and Ports"
+        },
+        {
+            "location": "/operators/csvformatter/#platform-attributes-that-influences-operator-behavior",
+            "text": "Attribute  Description  Type  Mandatory      in.TUPLE_CLASS  TUPLE_CLASS attribute on input port which tells operator the class of POJO which will be incoming  Class or FQCN  Yes",
+            "title": "Platform Attributes that influences operator behavior"
+        },
+        {
+            "location": "/operators/csvformatter/#ports",
+            "text": "Port  Description  Type  Mandatory      in  Tuples which need to be formatted are received on this port  Object (POJO)  Yes    out  Tuples that are formatted are emitted from this port  String  No    err  Tuples that could not be converted are emitted on this port  Object  No",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/csvformatter/#limitations",
+            "text": "Current CsvFormatter contain following limitations:   The field names in schema and the pojo field names should match.For eg. if name of the schema field is \"customerName\", then POJO should contain a field with the same name.   Field wise validation/formatting is not yet supported.  The fields will be written to the file in the same order as specified in schema.json",
+            "title": "Limitations"
+        },
+        {
+            "location": "/operators/csvformatter/#example",
+            "text": "Example for CsvFormatter can be found at:  https://github.com/DataTorrent/examples/tree/master/tutorials/csvformatter",
+            "title": "Example"
+        },
+        {
+            "location": "/operators/csvformatter/#advanced",
+            "text": "",
+            "title": "Advanced"
+        },
+        {
+            "location": "/operators/csvformatter/#partitioning-of-csvformatter",
+            "text": "Being stateless operator, CsvFormatter will ensure built-in partitioners present in Malhar library can be directly used by setting properties as follows:",
+            "title": "Partitioning of CsvFormatter"
+        },
+        {
+            "location": "/operators/csvformatter/#stateless-partioning-of-csvformatter",
+            "text": "Stateless partitioning will ensure that CsvFormatter will be partitioned right at the start of the application and will remain partitioned throughout the lifetime of the DAG.\nCsvFormatter can be stateless partitioned by adding following lines to properties.xml:    <property>\n    <name>dt.operator.{OperatorName}.attr.PARTITIONER</name>\n    <value>com.datatorrent.common.partitioner.StatelessPartitioner:2</value>\n  </property>  where {OperatorName} is the name of th [...]
+            "title": "Stateless partioning of CsvFormatter"
+        },
+        {
+            "location": "/operators/csvformatter/#dynamic-partitioning-of-csvformatter",
+            "text": "Dynamic partitioning is a feature of Apex platform which changes the partition of the operator based on certain conditions.\nCsvFormatter can be dynamically partitioned using below out-of-the-box partitioner:",
+            "title": "Dynamic Partitioning of CsvFormatter"
+        },
+        {
+            "location": "/operators/csvformatter/#throughput-based",
+            "text": "Following code can be added to populateDAG method of application to dynamically partition CsvFormatter:      StatelessThroughputBasedPartitioner<CsvFormatter> partitioner = new StatelessThroughputBasedPartitioner<>();\n    partitioner.setCooldownMillis(conf.getLong(COOL_DOWN_MILLIS, 10000));\n    partitioner.setMaximumEvents(conf.getLong(MAX_THROUGHPUT, 30000));\n    partitioner.setMinimumEvents(conf.getLong(MIN_THROUGHPUT, 10000));\n    dag.setAttribute(csvFormatter [...]
+            "title": "Throughput based"
+        },
+        {
+            "location": "/operators/csvParserOperator/",
+            "text": "Csv Parser Operator\n\n\nOperator Objective\n\n\nThis operator is designed to parse delimited records and construct a map or concrete java class also known as \n\"POJO\"\n out of it. User need to provide the schema to describe the delimited data. Based on schema definition the operator will parse the incoming record to object map and POJO.  User can also provide constraints if any, in the schema. The supported constraints are listed in \nconstraints table\n. The inco [...]
+            "title": "CSV Parser"
+        },
+        {
+            "location": "/operators/csvParserOperator/#csv-parser-operator",
+            "text": "",
+            "title": "Csv Parser Operator"
+        },
+        {
+            "location": "/operators/csvParserOperator/#operator-objective",
+            "text": "This operator is designed to parse delimited records and construct a map or concrete java class also known as  \"POJO\"  out of it. User need to provide the schema to describe the delimited data. Based on schema definition the operator will parse the incoming record to object map and POJO.  User can also provide constraints if any, in the schema. The supported constraints are listed in  constraints table . The incoming record will be validated against those constrain [...]
+            "title": "Operator Objective"
+        },
+        {
+            "location": "/operators/csvParserOperator/#overview",
+            "text": "The operator is  idempotent ,  fault-tolerant  and  partitionable .",
+            "title": "Overview"
+        },
+        {
+            "location": "/operators/csvParserOperator/#class-diagram",
+            "text": "",
+            "title": "Class Diagram"
+        },
+        {
+            "location": "/operators/csvParserOperator/#operator-information",
+            "text": "Operator location: malhar-contrib  Available since: 3.2.0  Operator state: Evolving  Java Package: com.datatorrent.contrib.parser.CsvParser",
+            "title": "Operator Information"
+        },
+        {
+            "location": "/operators/csvParserOperator/#platform-attributes-that-influences-operator-behavior",
+            "text": "Attribute  Description  Type  Mandatory      out.TUPLE_CLASS  TUPLE_CLASS attribute on output port which tells operator the class of POJO which need to be emitted  Class  Yes",
+            "title": "Platform Attributes that influences operator behavior"
+        },
+        {
+            "location": "/operators/csvParserOperator/#ports",
+            "text": "Port  Description  Type  Mandatory      in  Tuples that needs to be parsed are recieved on this port  byte[]  Yes    out  Valid Tuples that are emitted as pojo  Object (POJO)  No    parsedOutput  Valid Tuples that are emitted as map  Map  No    err  Invalid Tuples are emitted with error message  KeyValPair <String, String>  No",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/csvParserOperator/#partitioning",
+            "text": "CSV Parser is both statically and dynamically partitionable.",
+            "title": "Partitioning"
+        },
+        {
+            "location": "/operators/csvParserOperator/#static-partitioning",
+            "text": "This can be achieved in 2 ways as shown below.  Specifying the partitioner and number of partitions in the populateDAG() method      CsvParser csvParser = dag.addOperator(\"csvParser\", CsvParser.class);\n    StatelessPartitioner<CsvParser> partitioner1 = new StatelessPartitioner<CsvParser>(2);\n    dag.setAttribute(csvParser, Context.OperatorContext.PARTITIONER, partitioner1);  Specifying the partitioner in properties file.     <property>\n     <name>dt.operator.{Op [...]
+            "title": "Static Partitioning"
+        },
+        {
+            "location": "/operators/csvParserOperator/#dynamic-paritioning",
+            "text": "CsvParser can be dynamically partitioned using out-of-the-box partitioner:",
+            "title": "Dynamic Paritioning"
+        },
+        {
+            "location": "/operators/csvParserOperator/#throughput-based",
+            "text": "Following code can be added to populateDAG method of application to dynamically partition CsvParser:  CsvParser csvParser = dag.addOperator(\"csvParser\", CsvParser.class);\nStatelessThroughputBasedPartitioner<CsvParser> partitioner = new StatelessThroughputBasedPartitioner<>();\npartitioner.setCooldownMillis(conf.getLong(COOL_DOWN_MILLIS, 10000));\npartitioner.setMaximumEvents(conf.getLong(MAX_THROUGHPUT, 30000));\npartitioner.setMinimumEvents(conf.getLong(MIN_THROU [...]
+            "title": "Throughput based"
+        },
+        {
+            "location": "/operators/csvParserOperator/#example",
+            "text": "Example for Csv Parser can be found at:  https://github.com/DataTorrent/examples/tree/master/tutorials/parser",
+            "title": "Example"
+        },
+        {
+            "location": "/operators/deduper/",
+            "text": "Deduper - Operator Documentation\n\n\nIntroduction\n\n\nAbout this document\n\n\nThis document is intended as a guide for understanding and using\nthe Dedup operator.\n\n\nTerminology\n\n\nWe will refer to this operator as the Deduper or Dedup operator\ninterchangeably.\n\n\nOverview\n\n\nDedup - \u201cWhat\u201d in a Nutshell\n\n\nDedup is actually short for Deduplication. Duplicates are omnipresent and\ncan be found in almost any kind of data. Most of the times it  [...]
+            "title": "Deduper"
+        },
+        {
+            "location": "/operators/deduper/#deduper-operator-documentation",
+            "text": "",
+            "title": "Deduper - Operator Documentation"
+        },
+        {
+            "location": "/operators/deduper/#introduction",
+            "text": "",
+            "title": "Introduction"
+        },
+        {
+            "location": "/operators/deduper/#about-this-document",
+            "text": "This document is intended as a guide for understanding and using\nthe Dedup operator.",
+            "title": "About this document"
+        },
+        {
+            "location": "/operators/deduper/#terminology",
+            "text": "We will refer to this operator as the Deduper or Dedup operator\ninterchangeably.",
+            "title": "Terminology"
+        },
+        {
+            "location": "/operators/deduper/#overview",
+            "text": "",
+            "title": "Overview"
+        },
+        {
+            "location": "/operators/deduper/#dedup-what-in-a-nutshell",
+            "text": "Dedup is actually short for Deduplication. Duplicates are omnipresent and\ncan be found in almost any kind of data. Most of the times it is\nessential to discard, or at the very least separate out the data into\nunique\u00a0and duplicate\u00a0components. The entire purpose of this\noperator is to de-duplicate data. In other words, when data passes\nthrough this operator, it will be segregated into two different data\nsets, one of which contains all unique tuples, and [...]
+            "title": "Dedup - \u201cWhat\u201d in a Nutshell"
+        },
+        {
+            "location": "/operators/deduper/#dedup-how-in-a-nutshell",
+            "text": "In order to quickly decide whether an incoming tuple is duplicate\nor unique, it has to store each incoming tuple (or a signature, like key,\nfor example) to be used for comparison later. A plain in-memory storage\nmay work for small datasets, but will not scale for large ones. Deduper employs a large scale distributed persistent hashing mechanism (known as the Managed State) which allows\nit to identify if a particular tuple is duplicate or unique. Managed state is  [...]
+            "title": "Dedup - \u201cHow\u201d in a Nutshell"
+        },
+        {
+            "location": "/operators/deduper/#use-cases-basic-dedup",
+            "text": "",
+            "title": "Use cases - Basic Dedup"
+        },
+        {
+            "location": "/operators/deduper/#dedup-key",
+            "text": "A dedup key is a set of one or more fields in the data tuple which\nacts as the key\u00a0for the tuples.\nThis is used by the deduper to compare tuples to arrive at the\nconclusion on whether two tuples are duplicates.  Consider an example schema and two sample tuples  {Name, Phone, Email, Date, State, Zip, Country}  Tuple 1:  {\n  Austin U. Saunders,\n  +91-319-340-59385,\n  ausaunders@semperegestasurna.com,\n  2015-11-09 13:38:38,\n  Texas,\n  73301,\n  United Stat [...]
+            "title": "Dedup Key"
+        },
+        {
+            "location": "/operators/deduper/#use-case-details",
+            "text": "Consider the case of de-duplicating a master data set\nwhich is stored in a file.\u00a0Further also consider the\nfollowing schema for tuples in the data set.  {Name, Phone, Email, Date, City, Zip, Country}  Also consider that we need to identify unique customers from the\nmaster data set. So, ultimately the output needed for the use case is\ntwo data sets - Unique Records\u00a0and Duplicate Records.  As part of configuring the operator for this use case, we need to\ [...]
+            "title": "Use case Details"
+        },
+        {
+            "location": "/operators/deduper/#use-case-dedup-with-expiry",
+            "text": "",
+            "title": "Use case - Dedup with Expiry"
+        },
+        {
+            "location": "/operators/deduper/#motivation",
+            "text": "The Basic Dedup use case is the most straightforward and is\nusually applied when the amount of data to be processed is not huge.\nHowever, if the incoming data is huge, or even never-ending, it is\nusually not necessary to keep storing all the data. This is because in\nmost real world use cases, the duplicates occur only a short distance\napart. Hence, after a while, it is usually okay to forget part of\nthe history and consider only limited history for identifying\ [...]
+            "title": "Motivation"
+        },
+        {
+            "location": "/operators/deduper/#expiry-key",
+            "text": "The easiest way to understand this use case is to consider\ntime\u00a0as the criterion for expiring\ntuples. Time\u00a0is a natural expiry\nkey and is in line with the concept of expiry. Formally, an expiry field\nis a field in the input tuple which can be used to discard incoming\ntuples as expired. This expiry key\nusually works with another parameter called Expiry Period defined\nnext.",
+            "title": "Expiry Key"
+        },
+        {
+            "location": "/operators/deduper/#expiry-period",
+            "text": "The expiry period is the value supplied by the user to define the\nextent of history which should be considered while expiring\ntuples.",
+            "title": "Expiry Period"
+        },
+        {
+            "location": "/operators/deduper/#use-case-details_1",
+            "text": "Consider an incoming stream of system logs. The use case requires\nus to identify duplicate log messages and pass on only the unique ones.\nAnother relaxation in the use case is that the log messages which are\nolder than a day, may not be considered and must be filtered out as\nexpired. The expiry must be measured with respect to the time stamp in\nthe logs. For example, if the timestamp in the incoming message is 30-12-2014 00:00:00  and the\nlatest message that th [...]
+            "title": "Use case Details"
+        },
+        {
+            "location": "/operators/deduper/#use-cases-summary",
+            "text": "Basic Dedup  - Deduplication of\n    bounded datasets. Data is assumed to be bounded. This use case is\n    not meant for never ending streams of data. For example:\n    Deduplication of master data like customer records, product catalogs\n    etc.  Time Based Dedup \u00a0- Deduplication of\n    unlimited streams of data. This use case handles unbounded streams\n    of data and can run forever. An expiry key and criterion is expected\n    as part of the input which h [...]
+            "title": "Use cases - Summary"
+        },
+        {
+            "location": "/operators/deduper/#technical-architecture",
+            "text": "",
+            "title": "Technical Architecture"
+        },
+        {
+            "location": "/operators/deduper/#class-structure",
+            "text": "",
+            "title": "Class Structure"
+        },
+        {
+            "location": "/operators/deduper/#architectural-details",
+            "text": "",
+            "title": "Architectural Details"
+        },
+        {
+            "location": "/operators/deduper/#concepts",
+            "text": "",
+            "title": "Concepts"
+        },
+        {
+            "location": "/operators/deduper/#dedup-key-specified-by-keyexpression-parameter",
+            "text": "A dedup key is a set of one or more fields in the data tuple which\nacts as the key\u00a0for the tuples.\nThis is used by the deduper to compare tuples to arrive at the\nconclusion on whether two tuples are duplicates. If Dedup Key of two\ntuples match, then they are duplicates, else they are unique.",
+            "title": "Dedup Key - Specified by keyExpression\u00a0parameter"
+        },
+        {
+            "location": "/operators/deduper/#expiry-key-specified-by-timeexpression-parameter",
+            "text": "A tuple may or may not have an Expiry Key. Dedup operator cannot\nkeep storing all the data that is flowing into the operator. At some\npoint it becomes essential to discard some of the historical tuples in\ninterest of memory and efficiency.  At the same time, tuples are expected to arrive at the Dedup\noperator within some time after they are generated. After this time, the\ntuples may be considered as stale or obsolete.  In such cases, the Deduper considers these  [...]
+            "title": "Expiry Key - Specified by timeExpression\u00a0parameter"
+        },
+        {
+            "location": "/operators/deduper/#expiry-period_1",
+            "text": "The Expiry Period is the value supplied by the user which decides\nwhen a particular tuple expires.",
+            "title": "Expiry Period"
+        },
+        {
+            "location": "/operators/deduper/#time-points",
+            "text": "For every dataset that the deduper processes, a set of time points is maintained:   Latest Point \u00a0- This is the maximum\n    time point observed in all the processed tuples.  Expiry Point \u00a0- This is given by:\n     Expiry Point = Latest Point - Expiry Period   These points help the deduper to make decisions related to expiry\nof a tuple.",
+            "title": "Time Points"
+        },
+        {
+            "location": "/operators/deduper/#example-expiry",
+            "text": "Tuple Id  Expiry Key (Expiry Period = 10)  Latest Point  Expiry Point  Decision for Tuple      1  10  10  1  Not Expired    2  20  20  11  Not Expired    3  25  25  16  Not Expired    4  40  40  31  Not Expired    5  21  40  31  Expired    6  35  40  31  Not Expired    7  45  45  36  Not Expired    8  57  57  48  Not Expired",
+            "title": "Example - Expiry"
+        },
+        {
+            "location": "/operators/deduper/#time-buckets-a-component-of-managed-state",
+            "text": "One of the requirements of the Deduper is to store all the unique\ntuples (actually, just the keys of tuples). Keeping an ever growing\ncache in memory is not scalable. So what we need is a limited cache\nbacked by a persistent store. When data is requested to be fetched from managed\nstate, it is also cached in an in-memory cache. Buckets help\nnarrow down the search of duplicates for incoming tuples. A Bucket is an\nabstraction for a collection of tuples all of whi [...]
+            "title": "Time Buckets (A component of Managed State)"
+        },
+        {
+            "location": "/operators/deduper/#bucket-span",
+            "text": "Bucket span is simply the range of the domain\nthat is covered by the Bucket. This span is specified in\nthe domain of the Expiry key. If the Expiry\nKey is time, \u00a0then the Bucket span\nwill be specified in seconds. It is\nonly defined in case tuples have an Expiry Key.",
+            "title": "Bucket Span"
+        },
+        {
+            "location": "/operators/deduper/#number-of-buckets",
+            "text": "The number of buckets can be given by -  Num Buckets = Expiry\nPeriod / Bucket Span  This is because at any point of time, we need only store Expiry\nPeriod worth of data.",
+            "title": "Number of Buckets"
+        },
+        {
+            "location": "/operators/deduper/#example-buckets",
+            "text": "",
+            "title": "Example - Buckets"
+        },
+        {
+            "location": "/operators/deduper/#assumptions",
+            "text": "",
+            "title": "Assumptions"
+        },
+        {
+            "location": "/operators/deduper/#assumption-1",
+            "text": "This assumption is only applicable in case of Dedup with\nExpiry.  For any two tuples, t1 and t2 having dedup keys d1 and d2, and\nexpiry keys e1 and e2, respectively, the following holds:  If d1 = d2,\n  then e1 = e2  In other words, there may never\nbe\u00a0two tuples t1 and t2 such that:  Tuple 1: d1, e1\nTuple 2: d2, e2\nd1 = d2 and e1 != e2  In other words, any two tuples with the same dedup key are assumed to have the\nsame expiry key as well.\nThis assumption  [...]
+            "title": "Assumption 1 "
+        },
+        {
+            "location": "/operators/deduper/#flow-of-a-tuple-through-dedup-operator",
+            "text": "Tuples flow through the Dedup operator one by one. Deduper may process a tuple immediately, or store it in some data\nstructure for later processing.  When a tuple always arrives at the input\nport\u00a0of the Dedup operator, it does\nthe following tasks.",
+            "title": "Flow of a Tuple through Dedup Operator"
+        },
+        {
+            "location": "/operators/deduper/#check-if-tuple-is-expired",
+            "text": "This is only done in case of Dedup with expiry. The\nfollowing condition is used to check if the tuple is expired.  if ( Latest Point - Expiry Key < Expiry Point )\n  then Expired  If the tuple is expired, then send it to the expired port.",
+            "title": "Check if tuple is Expired"
+        },
+        {
+            "location": "/operators/deduper/#check-if-tuple-is-a-duplicate-or-unique",
+            "text": "Once a tuple passes the check of expiry, we proceed to check if\nthe tuple is a duplicate of some earlier tuple. Note that\nif the tuple in question is not expired, the duplicate will also not\nhave expired due to the assumption listed  here .\nThe Deduper queries the Managed state to fetch the value for the tuple key.\nThis request is processed by the Managed state in a separate asynchronous thread.\nOnce this request is submitted, the Deduper moves on to process ot [...]
+            "title": "Check if tuple is a Duplicate or Unique"
+        },
+        {
+            "location": "/operators/deduper/#process-pending-tuples",
+            "text": "Once the Deduper has looked at the all the tuples in the current window,\nit starts to process the tuples in the waiting queue to finalize the decision\n(unique or duplicate) for these tuples.\nOnce the request to Managed state is completed for a tuple and the value is\nfetched from persistent storage, the Deduper can decide if the tuple in\nquestion is a duplicate or a unique.\nDepending on whether there is enough time left in the current window,\nit can do one of t [...]
+            "title": "Process pending tuples"
+        },
+        {
+            "location": "/operators/deduper/#ports-attributes-and-properties",
+            "text": "",
+            "title": "Ports, Attributes and Properties"
+        },
+        {
+            "location": "/operators/deduper/#ports",
+            "text": "The deduper has a single input port and multiple output\nports.   input  - This is the input port through\n    which the tuples arrive at the Deduper.  unique \u00a0- This is the output port on\n    which unique tuples are sent out by the Deduper.  duplicate \u00a0- This is the output port on\n    which duplicate tuples are sent out by the Deduper.  expired \u00a0- This is the output port on\n    which expired tuples are sent out by the Deduper.   The user can choose [...]
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/deduper/#attributes",
+            "text": "Input port Attribute - input.TUPLE_CLASS \u00a0- Class or the fully\nqualified class name.  Mandatory attribute  Tells the operator about the type of the incoming\ntuple.",
+            "title": "Attributes"
+        },
+        {
+            "location": "/operators/deduper/#properties",
+            "text": "keyExpression \u00a0- String   Mandatory parameter.  The java expression to extract the key fields in the incoming tuple (POJO)     timeExpression \u00a0- String - (Time Based Deduper only)   The java expression to extract the time field in the incoming tuple (POJO).     expireBefore \u00a0- Long (Seconds) - (Time Based Deduper only)   This is the total time period during which a tuple stays in the system and blocks any other tuple with the same key.     bucketSpan \ [...]
+            "title": "Properties"
+        },
+        {
+            "location": "/operators/deduper/#example",
+            "text": "Please refer to  https://github.com/DataTorrent/examples/tree/master/tutorials/dedup \u00a0for\nan example on how to use Deduper.",
+            "title": "Example"
+        },
+        {
+            "location": "/operators/deduper/#partitioning",
+            "text": "Deduper can be statically partitioned using the operator\nattribute: PARTITIONER  Add the following property to the properties.xml file:  <property>\n\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0<name>dt.operator.{OperatorName}.attr.PARTITIONER</name>\n\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0<value>com.datatorrent.common.partitioner.StatelessPartitioner:2</value>\n</property>  This will partition the Dedup operator into 2 static partitions. Change the numb [...]
+            "title": "Partitioning"
+        },
+        {
+            "location": "/operators/enricher/",
+            "text": "POJO Enricher\n\n\nOperator Objective\n\n\nThis operator receives an POJO (\nPlain Old Java Object\n) as an incoming tuple and uses an external source to enrich the data in \nthe incoming tuple and finally emits the enriched data as a new enriched POJO.\n\n\nPOJOEnricher supports enrichment from following external sources:\n\n\n\n\nJSON File Based\n - Reads the file in memory having content stored in JSON format and use that to enrich the data. This can be done using [...]
+            "title": "Enricher"
+        },
+        {
+            "location": "/operators/enricher/#pojo-enricher",
+            "text": "",
+            "title": "POJO Enricher"
+        },
+        {
+            "location": "/operators/enricher/#operator-objective",
+            "text": "This operator receives an POJO ( Plain Old Java Object ) as an incoming tuple and uses an external source to enrich the data in \nthe incoming tuple and finally emits the enriched data as a new enriched POJO.  POJOEnricher supports enrichment from following external sources:   JSON File Based  - Reads the file in memory having content stored in JSON format and use that to enrich the data. This can be done using FSLoader implementation.  JDBC Based  - Any JDBC store c [...]
+            "title": "Operator Objective"
+        },
+        {
+            "location": "/operators/enricher/#operator-usecase",
+            "text": "Bank  transaction records  usually contains customerId. For further analysis of transaction one wants the customer name and other customer related information. \nSuch information is present in another database. One could enrich the transaction's record with customer information using POJOEnricher.  Call Data Record (CDR)  contains only mobile/telephone numbers of the customer. Customer information is missing in CDR. POJO Enricher can be used to enrich \nCDR with cust [...]
+            "title": "Operator Usecase"
+        },
+        {
+            "location": "/operators/enricher/#operator-information",
+            "text": "Operator location:  malhar-contrib  Available since:  3.4.0  Operator state:  Evolving  Java Packages:  Operator:  com.datatorrent.contrib.enrich.POJOEnricher  FSLoader:  com.datatorrent.contrib.enrich.FSLoader  JDBCLoader:  com.datatorrent.contrib.enrich.JDBCLoader",
+            "title": "Operator Information"
+        },
+        {
+            "location": "/operators/enricher/#properties-attributes-and-ports",
+            "text": "",
+            "title": "Properties, Attributes and Ports"
+        },
+        {
+            "location": "/operators/enricher/#properties-of-jdbcloader-backendstore",
+            "text": "Property  Description  Type  Mandatory  Default Value      databaseUrl  Connection string for connecting to JDBC  String  Yes  N/A    databaseDriver  JDBC Driver class for connection to JDBC Store. This driver should be there in classpath  String  Yes  N/A    tableName  Name of the table from which data needs to be retrieved  String  Yes  N/A    connectionProperties  Command seperated list of advanced connection properties that need to be passed to JDBC Driver. For e [...]
+            "title": "Properties of JDBCLoader (BackendStore)"
+        },
+        {
+            "location": "/operators/enricher/#platform-attributes-that-influences-operator-behavior",
+            "text": "Attribute  Description  Type  Mandatory      input.TUPLE_CLASS  TUPLE_CLASS attribute on input port which tells operator the class of POJO which will be incoming  Class or FQCN  Yes    output.TUPLE_CLASS  TUPLE_CLASS attribute on output port which tells operator the class of POJO which need to be emitted  Class or FQCN  Yes",
+            "title": "Platform Attributes that influences operator behavior"
+        },
+        {
+            "location": "/operators/enricher/#ports",
+            "text": "Port  Description  Type  Mandatory      input  Tuple which needs to be enriched are received on this port  Object (POJO)  Yes    output  Tuples that are enriched from external source are emitted from on this port  Object (POJO)  No",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/enricher/#limitations",
+            "text": "Current POJOEnricher contains following limitation:   FSLoader loads the file content in memory. Though it loads only the composite key and composite value in memory, a very large amount of data would bloat the memory and make the operator go OOM. In case the filesize is large, allocate sufficient memory to the POJOEnricher.  Incoming POJO should be a subset of outgoing POJO.  includeFields  property should contains fields having same name in database column as well  [...]
+            "title": "Limitations"
+        },
+        {
+            "location": "/operators/enricher/#example",
+            "text": "Example for POJOEnricher can be found at:  https://github.com/DataTorrent/examples/tree/master/tutorials/enricher",
+            "title": "Example"
+        },
+        {
+            "location": "/operators/enricher/#advanced",
+            "text": "",
+            "title": "Advanced"
+        },
+        {
+            "location": "/operators/enricher/#caching-mechanism-in-pojoenricher",
+            "text": "POJOEnricher contains an cache which makes the lookup for keys more efficient. This is specially useful when data in external store is not changing much. \nHowever, one should carefully tune the  cacheExpirationInterval  property for desirable results.  On every incoming tuple, POJOEnricher first queries the cache. If the cache contains desired record and is within expiration interval, then it uses that to\nenrich the tuple, otherwise does a lookup to configured stor [...]
+            "title": "Caching mechanism in POJOEnricher"
+        },
+        {
+            "location": "/operators/enricher/#partitioning-of-pojoenricher",
+            "text": "Being stateless operator, POJOEnricher will ensure built-in partitioners present in Malhar library can be directly simply by setting few properties as follows:",
+            "title": "Partitioning of POJOEnricher"
+        },
+        {
+            "location": "/operators/enricher/#stateless-partioning-of-pojoenricher",
+            "text": "Stateless partitioning will ensure that POJOEnricher will will be partitioned right at the starting of the application and will remain partitioned throughout the lifetime of the DAG.\nPOJOEnricher can be stateless partitioned by adding following lines to properties.xml:    <property>\n    <name>dt.operator.{OperatorName}.attr.PARTITIONER</name>\n    <value>com.datatorrent.common.partitioner.StatelessPartitioner:2</value>\n  </property>  where {OperatorName} is the na [...]
+            "title": "Stateless partioning of POJOEnricher"
+        },
+        {
+            "location": "/operators/enricher/#dynamic-partitioning-of-pojoenricher",
+            "text": "Dynamic partitioning is a feature of Apex platform which changes the partition of the operator based on certain condition.\nPOJOEnricher can be dynamically partitioned using 2 out-of-the-box partitioners:",
+            "title": "Dynamic Partitioning of POJOEnricher"
+        },
+        {
+            "location": "/operators/enricher/#throughput-based",
+            "text": "Following code can be added to populateDAG method of application to dynamically partitioning POJOEnricher:      StatelessThroughputBasedPartitioner<POJOEnricher> partitioner = new StatelessThroughputBasedPartitioner<>();\n    partitioner.setCooldownMillis(conf.getLong(COOL_DOWN_MILLIS, 10000));\n    partitioner.setMaximumEvents(conf.getLong(MAX_THROUGHPUT, 30000));\n    partitioner.setMinimumEvents(conf.getLong(MIN_THROUGHPUT, 10000));\n    dag.setAttribute(pojoEnric [...]
+            "title": "Throughput based"
+        },
+        {
+            "location": "/operators/enricher/#latency-based",
+            "text": "Following code can be added to populateDAG method of application to dynamically partitioning POJOEnricher:      StatelessLatencyBasedPartitioner<POJOEnricher> partitioner = new StatelessLatencyBasedPartitioner<>();\n    partitioner.setCooldownMillis(conf.getLong(COOL_DOWN_MILLIS, 10000));\n    partitioner.setMaximumLatency(conf.getLong(MAX_THROUGHPUT, 10));\n    partitioner.setMinimumLatency(conf.getLong(MIN_THROUGHPUT, 3));\n    dag.setAttribute(pojoEnricherObj, Ope [...]
+            "title": "Latency based"
+        },
+        {
+            "location": "/operators/fsInputOperator/",
+            "text": "File Input Operator\n\n\nOperator Objective\n\n\nThis operator is designed to scan a directory for files, read and split file content into tuples\nsuch as lines or a block of bytes, and finally emit them on output ports defined in concrete\nsubclasses for further processing by downstream operators.\nIt can be used with any filesystem supported by Hadoop like HDFS, S3, ftp, NFS etc.\n\n\nOverview\n\n\nThe operator is \nidempotent\n, \nfault-tolerant\n and \npartitiona [...]
+            "title": "File Input"
+        },
+        {
+            "location": "/operators/fsInputOperator/#file-input-operator",
+            "text": "",
+            "title": "File Input Operator"
+        },
+        {
+            "location": "/operators/fsInputOperator/#operator-objective",
+            "text": "This operator is designed to scan a directory for files, read and split file content into tuples\nsuch as lines or a block of bytes, and finally emit them on output ports defined in concrete\nsubclasses for further processing by downstream operators.\nIt can be used with any filesystem supported by Hadoop like HDFS, S3, ftp, NFS etc.",
+            "title": "Operator Objective"
+        },
+        {
+            "location": "/operators/fsInputOperator/#overview",
+            "text": "The operator is  idempotent ,  fault-tolerant  and  partitionable .  Logic for directory scanning is encapsulated in the  DirectoryScanner  static inner class\nwhich provides functions such as matching file names against a regular expression, tracking files\nthat have already been processed (so that they are not processed again), filtering files based\non the hashcode of the file names in the presence of partitioning so that each file is\nprocessed by a unique partit [...]
+            "title": "Overview"
+        },
+        {
+            "location": "/operators/fsInputOperator/#use-cases",
+            "text": "This operator is suitable for use in an environment where small to medium sized files are\ndeposited in a specific directory on a regular basis. For very large files a better alternative\nis the  FileSplitter  and  BlockReader  combination since they allow such files to be processed\nby multiple partitions to achieve higher throughput. Additionally, files which are continually\nmodified by other processes are not suitable for processing with this operator since they  [...]
+            "title": "Use Cases"
+        },
+        {
+            "location": "/operators/fsInputOperator/#how-to-use",
+            "text": "The tuple type in the abstract class is a generic parameter.\nConcrete subclasses need to choose an appropriate class (such as  String  or  byte[] ) for the\ngeneric parameter and also implement a couple of abstract methods:  readEntity()  to read\nthe next tuple from the currently open file and  emit()  to process the next tuple.  In principle, no ports need be defined in the rare case that the operator simply writes\ntuples to some external sink or merely maintains [...]
+            "title": "How to Use?"
+        },
+        {
+            "location": "/operators/fsInputOperator/#partitioning",
+            "text": "",
+            "title": "Partitioning"
+        },
+        {
+            "location": "/operators/fsInputOperator/#static-partitioning",
+            "text": "Configure parameter  partitionCount  to define the desired number of initial partitions\n(4 in this example).  <property>\n  <name>dt.operator.{OperatorName}.prop.partitionCount</name>\n  <value>4</value>\n</property>  where  {OperatorName}  is the name of the input operator.",
+            "title": "Static Partitioning"
+        },
+        {
+            "location": "/operators/fsInputOperator/#dynamic-partitioning",
+            "text": "Dynamic partitioning -- changing the number of partitions of one or more operators\nin a running application -- can be achieved in multiple ways:\n- Use the command line tool  apex  or the UI console to change the value of the\n   partitionCount  property of the running operator. This change is detected in\n   processStats()  (which is invoked periodically by the platform) where, if the\n  current partition count ( currentPartitions ) and the desired partition count\ [...]
+            "title": "Dynamic Partitioning"
+        },
+        {
+            "location": "/operators/fsInputOperator/#operator-information",
+            "text": "Operator location:  malhar-library  Available since:  1.0.2  Operator state:  Stable  Java Packages:  Operator:  com.datatorrent.lib.io.fs.AbstractFileInputOperator",
+            "title": "Operator Information"
+        },
+        {
+            "location": "/operators/fsInputOperator/#abstractfileinputoperator",
+            "text": "This is the abstract implementation that, as noted above, scans a single directory.\nIt can be extended to modify functionality or add new capabilities. For example, the\ndirectory scanner can be overriden to monitor multiple directories.  This  example demonstrates how to do that.\nAs noted in the overview above, this class has no ports, so concrete subclasses will need to\nprovide them if necessary.",
+            "title": "AbstractFileInputOperator"
+        },
+        {
+            "location": "/operators/fsInputOperator/#ports",
+            "text": "This operator has no ports.",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/fsInputOperator/#abstract-methods",
+            "text": "As described above, concrete subclasses need to provide implementations for these two\nmethods:  void emit(T tuple);\nT readEntity();  Examples of implementations are in the  LineByLineFileInputOperator  operator and also in\nthe example at the end of this guide.",
+            "title": "Abstract Methods"
+        },
+        {
+            "location": "/operators/fsInputOperator/#derived-classes",
+            "text": "",
+            "title": "Derived Classes"
+        },
+        {
+            "location": "/operators/fsInputOperator/#1-abstractftpinputoperator",
+            "text": "The class is used to read files from FTP file system. As for the above abstract class, concrete\nsubclasses need to implement the readEntity  and emit  methods.",
+            "title": "1. AbstractFTPInputOperator"
+        },
+        {
+            "location": "/operators/fsInputOperator/#ports_1",
+            "text": "This operator has no ports.",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/fsInputOperator/#2-ftpstringinputoperator",
+            "text": "This class extends AbstractFTPInputOperator and  implements abstract methods to read files available on FTP file system line by line.",
+            "title": "2. FTPStringInputOperator"
+        },
+        {
+            "location": "/operators/fsInputOperator/#ports_2",
+            "text": "Port  Description  Type  Mandatory      output  Tuples that are read from file are emitted on this port  String  Yes",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/fsInputOperator/#3-abstractparquetfilereader",
+            "text": "Reads Parquet files from input directory using GroupReadSupport. Derived classes need to implement  convertGroup(Group)  method to convert Group to other type. Also it should implement   readEntity()  and  emit(T)  methods.",
+            "title": "3. AbstractParquetFileReader"
+        },
+        {
+            "location": "/operators/fsInputOperator/#ports_3",
+            "text": "This operator has no ports.",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/fsInputOperator/#4-abstractthroughputfileinputoperator",
+            "text": "This operator extends  AbstractFileInputOperator  by providing the capability to partition\ndynamically based the file backlog. The user can set the preferred number of pending files per operator as well as the maximum number of operators and define a re-partition interval. If a physical operator runs out of files to process and an amount of time greater than or equal to the repartition interval has passed then a new number of operators are created to accommodate the [...]
+            "title": "4. AbstractThroughputFileInputOperator"
+        },
+        {
+            "location": "/operators/fsInputOperator/#ports_4",
+            "text": "This operator has no ports.",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/fsInputOperator/#5-linebylinefileinputoperator",
+            "text": "As mentioned in the overview above, this operator defines a single output port; it reads files\nas lines and emits them as Java Strings on the output port. The output port  must  be connected.\nLines are extracted using the Java  BufferedReader  class and the default character encoding.\nAn example illustrating the use of a custom encoding (such as UTF_8) is provided below",
+            "title": "5. LineByLineFileInputOperator"
+        },
+        {
+            "location": "/operators/fsInputOperator/#properties_2",
+            "text": "This operator defines no additional properties beyond those defined in the parent class .",
+            "title": "Properties"
+        },
+        {
+            "location": "/operators/fsInputOperator/#ports_5",
+            "text": "Port  Description  Type  Mandatory      output  Tuples that are read from file are emitted on this port  String  Yes",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/fsInputOperator/#example-implementation-using-a-custom-character-encoding",
+            "text": "This example demonstrates how to extend the  AbstractFileInputOperator  to read\nUTF-8 encoded data.  public class EncodedDataReader extends AbstractFileInputOperator<String>\n{\n  public final transient DefaultOutputPort<String> output = new DefaultOutputPort<>();\n  protected transient BufferedReader br;\n\n  protected InputStream openFile(Path path) throws IOException\n  {\n    InputStream is = super.openFile(path);\n    br = new BufferedReader(new InputStreamRead [...]
+            "title": "Example Implementation Using a Custom Character Encoding"
+        },
+        {
+            "location": "/operators/fsInputOperator/#common-implementation-scenarios",
+            "text": "Sometimes, downstream operators need to know which file each tuple came from; there are a\nnumber of ways of achieving this, each with its own tradeoffs. Some alternatives:   If the generic tuple type is a String, each tuple can be prefixed with the file name\n  with a suitable separator, for example:  foo.txt: first line . This works but\n  has obvious additional costs in both processing (to parse out the two pieces of each\n  tuple) and network bandwidth utilizatio [...]
+            "title": "Common Implementation Scenarios"
+        },
+        {
+            "location": "/operators/file_output/",
+            "text": "AbstractFileOutputOperator\n\n\nThe abstract file output operator in Apache Apex Malhar library \u2014 \nAbstractFileOutputOperator\n writes streaming data to files. The main features of this operator are:\n\n\n\n\nPersisting data to files.\n\n\nAutomatic rotation of files based on:\n\n  a. maximum length of a file.\n\n  b. time-based rotation where time is specified using a count of application windows.\n\n\nFault-tolerance.\n\n\nCompression and encryption of data b [...]
+            "title": "File Output"
+        },
+        {
+            "location": "/operators/file_output/#abstractfileoutputoperator",
+            "text": "The abstract file output operator in Apache Apex Malhar library \u2014  AbstractFileOutputOperator  writes streaming data to files. The main features of this operator are:   Persisting data to files.  Automatic rotation of files based on: \n  a. maximum length of a file. \n  b. time-based rotation where time is specified using a count of application windows.  Fault-tolerance.  Compression and encryption of data before it is persisted.   In this tutorial we will cover [...]
+            "title": "AbstractFileOutputOperator"
+        },
+        {
+            "location": "/operators/file_output/#persisting-data-to-files",
+            "text": "The principal function of this operator is to persist tuples to files efficiently. These files are created under a specific directory on the file system. The relevant configuration item is:  filePath : path specifying the directory where files are written.  Different types of file system that are implementations of  org.apache.hadoop.fs.FileSystem  are supported. The file system instance which is used for creating streams is constructed from the  filePath  URI.  File [...]
+            "title": "Persisting data to files"
+        },
+        {
+            "location": "/operators/file_output/#ports",
+            "text": "input : the input port on which tuples to be persisted are received.",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/file_output/#streamscache",
+            "text": "This transient state caches output streams per file in memory. The file to which the data is appended may change with incoming tuples. It will be highly inefficient to keep re-opening streams for a file just because tuples for that file are interleaved with tuples for another file. Therefore, the operator maintains a cache of limited size with open output streams.  streamsCache  is of type  com.google.common.cache.LoadingCache . A  LoadingCache  has an attached  Cach [...]
+            "title": "streamsCache"
+        },
+        {
+            "location": "/operators/file_output/#cacheloader",
+            "text": "streamsCache  is created with a  CacheLoader  that opens an  FSDataOutputStream  for a file which is not in the cache. The output stream is opened in either  append  or  create  mode and the basic logic to determine this is explained by the simple diagram below.   This process gets complicated when fault-tolerance (writing to temporary files)  and rotation is added.  Following are few configuration items used for opening the streams:   replication : specifies the rep [...]
+            "title": "CacheLoader"
+        },
+        {
+            "location": "/operators/file_output/#removallistener",
+            "text": "A  Guava  cache also allows specification of removal listener which can perform some operation when an entry is removed from the cache. Since  streamsCache  is of limited size and also has time-based expiry enabled, it is imperative that when a stream is evicted from the cache it is closed properly. Therefore, we attach a removal listener to  streamsCache  which closes the stream when it is evicted.",
+            "title": "RemovalListener"
+        },
+        {
+            "location": "/operators/file_output/#setupoperatorcontext-context",
+            "text": "During setup the following main tasks are performed:   FileSystem instance is created.  The cache of streams is created.  Files are recovered (see Fault-tolerance section).  Stray part files are cleaned (see Automatic rotation section).",
+            "title": "setup(OperatorContext context)"
+        },
+        {
+            "location": "/operators/file_output/#automatic-rotation",
+            "text": "In a streaming application where data is being continuously processed, when this output operator is used, data will be continuously written to an output file. The users may want to be able to take the data from time to time to use it, copy it out of Hadoop or do some other processing. Having all the data in a single file makes it difficult as the user needs to keep track of how much data has been read from the file each time so that the same data is not read again. A [...]
+            "title": "Automatic rotation"
+        },
+        {
+            "location": "/operators/file_output/#part-filename",
+            "text": "The filename for a part file is formed by using the original file name and the part number. The part number starts from 0 and is incremented each time a new part file created. The default filename has the format, assuming origfile represents the original filename and partnum represents the part number,  origfile.partnum  This naming scheme can be changed by the user. It can be done so by overriding the following method  protected String getPartFileName(String fileNam [...]
+            "title": "Part filename"
+        },
+        {
+            "location": "/operators/file_output/#mechanisms",
+            "text": "The user has a couple of ways to specify when a file gets rotated. First is based on size and second on time. In the first case the files are limited by size and in the second they are rotated by time.",
+            "title": "Mechanisms"
+        },
+        {
+            "location": "/operators/file_output/#size-based",
+            "text": "With size based rotation the user specifies a size limit. Once the size of the currently file reaches this limit the file is rotated. The size limit can be specified by setting the following property  maxLength  Like any other property this can be set in Java application code or in the property file.",
+            "title": "Size Based"
+        },
+        {
+            "location": "/operators/file_output/#time-based",
+            "text": "In time based rotation user specifies a time interval. This interval is specified as number of application windows. The files are rotated periodically once the specified number of application windows have elapsed. Since the interval is application window based it is not always exactly constant time. The interval can be specified using the following property  rotationWindows",
+            "title": "Time Based"
+        },
+        {
+            "location": "/operators/file_output/#setupoperatorcontext-context_1",
+            "text": "When an operator is being started there may be stray part files and they need to be cleaned up. One common scenario, when these could be present, is in the case of failure, where a node running the operator failed and a previous instance of the operator was killed. This cleanup and other initial processing for the part files happens in the operator setup. The following diagram describes this process",
+            "title": "setup(OperatorContext context)"
+        },
+        {
+            "location": "/operators/file_output/#fault-tolerance",
+            "text": "There are two issues that should be addressed in order to make the operator fault-tolerant:    The operator flushes data to the filesystem every application window. This implies that after a failure when the operator is re-deployed and tuples of a window are replayed, then duplicate data will be saved to the files. This is handled by recording how much the operator has written to each file every window in a state that is checkpointed and truncating files back to the  [...]
+            "title": "Fault-tolerance"
+        },
+        {
+            "location": "/operators/file_output/#checkpointed-states-needed-for-fault-tolerance",
+            "text": "endOffsets : contains the size of each file as it is being updated by the operator. It helps the operator to restore a file during recovery in operator  setup(...)  and is also used while loading a stream to find out if the operator has seen a file before.    fileNameToTmpName : contains the name of the temporary file per actual file. It is needed because the name of a temporary file is random. They are named based on the timestamp when the stream is created. During  [...]
+            "title": "Checkpointed states needed for fault-tolerance"
+        },
+        {
+            "location": "/operators/file_output/#recovering-files",
+            "text": "When the operator is re-deployed, it checks in its  setup(...)  method if the state of a file which it has seen before the failure is consistent with the file's state on the file system, that is, the size of the file on the file system should match the size in the  endOffsets . When it doesn't the operator truncates the file.  For example, let's say the operator wrote 100 bytes to test1.txt by the end of window 10. It wrote another 20 bytes by the end of window 12 bu [...]
+            "title": "Recovering files"
+        },
+        {
+            "location": "/operators/file_splitter/",
+            "text": "File Splitter\n\n\nThis is a simple operator whose main function is to split a file virtually and create metadata describing the files and the splits. \n\n\nWhy is it needed?\n\n\nIt is a common operation to read a file and parse it. This operation can be parallelized by having multiple partitions of such operators and each partition operating on different files. However, at times when a file is large then a single partition reading it can become a bottleneck.\nIn th [...]
+            "title": "File Splitter"
+        },
+        {
+            "location": "/operators/file_splitter/#file-splitter",
+            "text": "This is a simple operator whose main function is to split a file virtually and create metadata describing the files and the splits.",
+            "title": "File Splitter"
+        },
+        {
+            "location": "/operators/file_splitter/#why-is-it-needed",
+            "text": "It is a common operation to read a file and parse it. This operation can be parallelized by having multiple partitions of such operators and each partition operating on different files. However, at times when a file is large then a single partition reading it can become a bottleneck.\nIn these cases, throughput can be increased if instances of the partitioned operator can read and parse non-overlapping sets of file blocks. This is where file splitter comes in handy.  [...]
+            "title": "Why is it needed?"
+        },
+        {
+            "location": "/operators/file_splitter/#class-diagram",
+            "text": "",
+            "title": "Class Diagram"
+        },
+        {
+            "location": "/operators/file_splitter/#abstractfilesplitter",
+            "text": "The abstract implementation defines the logic of processing  FileInfo . This comprises the following tasks -      building  FileMetadata  per file and emitting it. This metadata contains the file information such as filepath, no. of blocks in it, length of the file, all the block ids, etc.    creating  BlockMetadataIterator  from  FileMetadata . The iterator lazy-loads the block metadata when needed. We use an iterator because the no. of blocks in a file can be huge  [...]
+            "title": "AbstractFileSplitter"
+        },
+        {
+            "location": "/operators/file_splitter/#ports",
+            "text": "Declares only output ports on which file metadata and block metadata are emitted.   filesMetadataOutput: metadata for each file is emitted on this port.   blocksMetadataOutput: metadata for each block is emitted on this port.",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/file_splitter/#abstract-methods",
+            "text": "FileInfo getFileInfo() : called from within the  process()  and provides the next file to process.    long getDefaultBlockSize() : provides the block size which is used when user hasn't configured the size.    FileStatus getFileStatus(Path path) : provides the  org.apache.hadoop.fs.FileStatus  instance for a path.",
+            "title": "Abstract methods"
+        },
+        {
+            "location": "/operators/file_splitter/#configuration",
+            "text": "blockSize : size of a block.  blocksThreshold : threshold on the number of blocks emitted by file splitter every window. This setting is used for throttling the work for downstream operators.",
+            "title": "Configuration"
+        },
+        {
+            "location": "/operators/file_splitter/#filesplitterbase",
+            "text": "Simple operator that receives tuples of type  FileInfo  on its  input  port.  FileInfo  contains the information (currently just the file path) about the file which this operator uses to create file metadata and block metadata.",
+            "title": "FileSplitterBase"
+        },
+        {
+            "location": "/operators/file_splitter/#example-application",
+            "text": "This is a simple sub-dag that demonstrates how FileSplitterBase can be plugged into an application.  The upstream operator emits tuples of type  FileInfo  on its output port which is connected to splitter input port. The downstream receives tuples of type  BlockMetadata.FileBlockMetadata  from the splitter's block metadata output port.  public class ApplicationWithBaseSplitter implements StreamingApplication\n{\n  @Override\n  public void populateDAG(DAG dag, Configu [...]
+            "title": "Example application"
+        },
+        {
+            "location": "/operators/file_splitter/#ports_1",
+            "text": "Declares an input port on which it receives tuples from the upstream operator. Output ports are inherited from AbstractFileSplitter.   input: non optional port on which tuples of type  FileInfo  are received.",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/file_splitter/#configuration_1",
+            "text": "file : path of the file from which the filesystem is inferred. FileSplitter creates an instance of  org.apache.hadoop.fs.FileSystem  which is why this path is needed.     FileSystem.newInstance(new Path(file).toUri(), new Configuration());  The fs instance is then used to fetch the default block size and  org.apache.hadoop.fs.FileStatus  for each file path.",
+            "title": "Configuration"
+        },
+        {
+            "location": "/operators/file_splitter/#filesplitterinput",
+            "text": "This is an input operator that discovers files itself. The scanning of the directories for new files is asynchronous which is handled by  TimeBasedDirectoryScanner . The function of TimeBasedDirectoryScanner is to periodically scan specified directories and find files which were newly added or modified. The interaction between the operator and the scanner is depicted in the diagram below.",
+            "title": "FileSplitterInput"
+        },
+        {
+            "location": "/operators/file_splitter/#example-application_1",
+            "text": "This is a simple sub-dag that demonstrates how FileSplitterInput can be plugged into an application.   Splitter is the input operator here that sends block metadata to the downstream BlockReader.    @Override\n  public void populateDAG(DAG dag, Configuration configuration)\n  {\n    FileSplitterInput input = dag.addOperator(\"Input\", new FileSplitterInput());\n    FSSliceReader reader = dag.addOperator(\"Block Reader\", new FSSliceReader());\n    ...\n    dag.addStr [...]
+            "title": "Example application"
+        },
+        {
+            "location": "/operators/file_splitter/#ports_2",
+            "text": "Since it is an input operator there are no input ports and output ports are inherited from AbstractFileSplitter.",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/file_splitter/#configuration_2",
+            "text": "scanner : the component that scans directories asynchronously. It is of type  com.datatorrent.lib.io.fs.FileSplitter.TimeBasedDirectoryScanner . The basic implementation of TimeBasedDirectoryScanner can be customized by users.     a.  files : comma separated list of directories to scan.    b.  recursive : flag that controls whether the directories should be scanned recursively.    c.  scanIntervalMillis : interval specified in milliseconds after which another scan it [...]
+            "title": "Configuration"
+        },
+        {
+            "location": "/operators/file_splitter/#handling-of-split-records",
+            "text": "Splitting of files to create tasks for downstream operator needs to be a simple operation that doesn't consume a lot of resources and is fast. This is why the file splitter doesn't open files to read. The downside of that is if the file contains records then a record may split across adjacent blocks. Handling of this is left to the downstream operator.  We have created Block readers in Apex-malhar library that handle line splits efficiently. The 2 line readers-  Abst [...]
+            "title": "Handling of split records"
+        },
+        {
+            "location": "/operators/filter/",
+            "text": "Filter\n\n\nOperator Objective\n\n\nThis operator receives an POJO (\nPlain Old Java Object\n) as an incoming tuple\nand based on the filter condition it emits filtered tuples on one output port and rest on another output port.\n\n\nFilter operator supports quasi Java expressions to specify filter rule.\n\n\nFilter operator does not hold any state and is \nidempotent\n, \nfault-tolerant\n and \nstatically/dynamically partitionable\n.\n\n\nOperator Usecase\n\n\n\n\nCu [...]
+            "title": "Filter"
+        },
+        {
+            "location": "/operators/filter/#filter",
+            "text": "",
+            "title": "Filter"
+        },
+        {
+            "location": "/operators/filter/#operator-objective",
+            "text": "This operator receives an POJO ( Plain Old Java Object ) as an incoming tuple\nand based on the filter condition it emits filtered tuples on one output port and rest on another output port.  Filter operator supports quasi Java expressions to specify filter rule.  Filter operator does not hold any state and is  idempotent ,  fault-tolerant  and  statically/dynamically partitionable .",
+            "title": "Operator Objective"
+        },
+        {
+            "location": "/operators/filter/#operator-usecase",
+            "text": "Customer data  usually contains a field customer category/segment. One wants some analysis to be done for specific customer segment. One could use this filter operator to filter the records based on segment for some analysis for specific customer segment.   Log data  processing pipeline may want to filter logs from specific machine/router/switch.",
+            "title": "Operator Usecase"
+        },
+        {
+            "location": "/operators/filter/#operator-information",
+            "text": "Operator location:  malhar-library  Available since:  3.5.0  Operator state:  Evolving  Java Packages:  Operator:  com.datatorrent.lib.filter.FilterOperator",
+            "title": "Operator Information"
+        },
+        {
+            "location": "/operators/filter/#properties-attributes-and-ports",
+            "text": "",
+            "title": "Properties, Attributes and Ports"
+        },
+        {
+            "location": "/operators/filter/#platform-attributes-that-influences-operator-behavior",
+            "text": "Attribute  Description  Type  Mandatory      port.input.attr.TUPLE_CLASS  TUPLE_CLASS attribute on input port indicates the class of POJO which incoming tuple  Class or FQCN  Yes",
+            "title": "Platform Attributes that influences operator behavior"
+        },
+        {
+            "location": "/operators/filter/#ports",
+            "text": "Port  Description  Type  Connection Required      input  Tuple which needs to be filtered are received on this port  Object (POJO)  Yes    truePort  Tuples which satisfies  condition  are emitted on this port  Object (POJO)  No    falsePort  Tuples which does not satisfy  condition  are emitted on this port  Object (POJO)  No",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/filter/#limitations",
+            "text": "Current  FilterOperator  has following limitation:   APEXMALHAR-2175  : Filter condition is not able to correctly handle java reserved words.",
+            "title": "Limitations"
+        },
+        {
+            "location": "/operators/filter/#example",
+            "text": "Example for  FilterOperator  can be found at:  https://github.com/DataTorrent/examples/tree/master/tutorials/filter",
+            "title": "Example"
+        },
+        {
+            "location": "/operators/fixedWidthParserOperator/",
+            "text": "Fixed Width Parser Operator\n\n\nOperator Objective\n\n\nThis operator is designed to parse fixed width records and construct a map or concrete java class also known as \n\"POJO\"\n out of it. User needs to provide the schema to describe the fixed width data. The incoming records will be parsed according to the schema and either a map or a POJO (or both) is emitted.\nInvalid records will be emitted on the error port along with an error message.\n\n\nNote\n: field nam [...]
+            "title": "Fixed Width Parser"
+        },
+        {
+            "location": "/operators/fixedWidthParserOperator/#fixed-width-parser-operator",
+            "text": "",
+            "title": "Fixed Width Parser Operator"
+        },
+        {
+            "location": "/operators/fixedWidthParserOperator/#operator-objective",
+            "text": "This operator is designed to parse fixed width records and construct a map or concrete java class also known as  \"POJO\"  out of it. User needs to provide the schema to describe the fixed width data. The incoming records will be parsed according to the schema and either a map or a POJO (or both) is emitted.\nInvalid records will be emitted on the error port along with an error message.  Note : field names in the schema must match field names of the POJO and must be  [...]
+            "title": "Operator Objective"
+        },
+        {
+            "location": "/operators/fixedWidthParserOperator/#class-diagram",
+            "text": "",
+            "title": "Class Diagram"
+        },
+        {
+            "location": "/operators/fixedWidthParserOperator/#operator-information",
+            "text": "Operator location: malhar-contrib  Available since: 3.8.0  Operator state: Evolving  Java Package: com.datatorrent.contrib.parser.FixedWidthParser",
+            "title": "Operator Information"
+        },
+        {
+            "location": "/operators/fixedWidthParserOperator/#platform-attributes-that-influences-operator-behavior",
+            "text": "Attribute  Description  Type  Mandatory      TUPLE_CLASS  TUPLE_CLASS attribute on output port which tells operator the class of POJO which need to be emitted  Class  Yes     User can set this property in  properties.xml  in the following way:\nIn the examples below, {OperatorName} is the name of the Operator, {ApplicationName} is the name of the application and \"com.datatorrent.tutorial.fixedwidthparser.Ad\" is the fully qualified name of the Tuple class  <property [...]
+            "title": "Platform Attributes that influences operator behavior"
+        },
+        {
+            "location": "/operators/fixedWidthParserOperator/#ports",
+            "text": "Port  Description  Type  Mandatory      in  Tuples that needs to be parsed are received on this port  byte[]  Yes    out  Valid Tuples that are emitted as pojo  Object (POJO)  No    parsedOutput  Valid Tuples that are emitted as maps  Map  No    err  Invalid Tuples are emitted with error message  KeyValPair <String, String>  No",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/fixedWidthParserOperator/#partitioning",
+            "text": "Fixed Width Parser is both statically and dynamically partitionable.",
+            "title": "Partitioning"
+        },
+        {
+            "location": "/operators/fixedWidthParserOperator/#static-partitioning",
+            "text": "Static partitioning can be achieved by specifying the partitioner and number of partitions in the populateDAG() method.  FixedWidthParser fixedWidthParser = dag.addOperator(\"fixedWidthParser\", FixedWidthParser.class);\nStatelessPartitioner<FixedWidthParser> partitioner1 = new StatelessPartitioner<FixedWidthParser>(2);\ndag.setAttribute(fixedWidthParser, Context.OperatorContext.PARTITIONER, partitioner1);  Static partitioning can also be achieved by specifying the p [...]
+            "title": "Static Partitioning"
+        },
+        {
+            "location": "/operators/fixedWidthParserOperator/#dynamic-partioning",
+            "text": "FixedWidthParser can be dynamically partitioned using out-of-the-box partitioner:",
+            "title": "Dynamic Partioning"
+        },
+        {
+            "location": "/operators/fixedWidthParserOperator/#throughput-based",
+            "text": "Following code can be added to  populateDAG()  method of application to dynamically partition FixedWidthParser:  FixedWidthParser fixedWidthParser = dag.addOperator(\"fixedWidthParser\", FixedWidthParser.class);\nStatelessThroughputBasedPartitioner<FixedWidthParser> partitioner = new StatelessThroughputBasedPartitioner<>();\npartitioner.setCooldownMillis(conf.getLong(\"dt.cooldown\", 10000));\npartitioner.setMaximumEvents(conf.getLong(\"dt.maxThroughput\", 30000));\n [...]
+            "title": "Throughput based"
+        },
+        {
+            "location": "/operators/fixedWidthParserOperator/#example",
+            "text": "Example for Fixed Width Parser can be found at:  https://github.com/DataTorrent/examples/tree/master/tutorials/parser",
+            "title": "Example"
+        },
+        {
+            "location": "/operators/ftpInputOperator/",
+            "text": "FTP Input Operator\n\n\nOperator Objective\n\n\nThis operator(\nAbstractFTPInputOperator\n) is designed to scan a directory from an FTP server for files, read\nand split file content into tuples such as lines or blocks of bytes, and finally\nemit them on the output port for further processing by downstream operators.\nThe operator extends the \nAbstractFileInputOperator\n. It overrides the\ngetFSInstance() method and returns an instance of the FTPFileSystem\n(\norg.a [...]
+            "title": "FTP Input Operator"
+        },
+        {
+            "location": "/operators/ftpInputOperator/#ftp-input-operator",
+            "text": "",
+            "title": "FTP Input Operator"
+        },
+        {
+            "location": "/operators/ftpInputOperator/#operator-objective",
+            "text": "This operator( AbstractFTPInputOperator ) is designed to scan a directory from an FTP server for files, read\nand split file content into tuples such as lines or blocks of bytes, and finally\nemit them on the output port for further processing by downstream operators.\nThe operator extends the  AbstractFileInputOperator . It overrides the\ngetFSInstance() method and returns an instance of the FTPFileSystem\n( org.apache.hadoop.fs.ftp.FTPFileSystem )",
+            "title": "Operator Objective"
+        },
+        {
+            "location": "/operators/ftpInputOperator/#class-diagram",
+            "text": "",
+            "title": "Class Diagram"
+        },
+        {
+            "location": "/operators/ftpInputOperator/#operator-information",
+            "text": "Operator location :  malhar-lib  Available since :  2.0.0  Java Package :  com.datatorrent.lib.io",
+            "title": "Operator Information"
+        },
+        {
+            "location": "/operators/ftpInputOperator/#ports",
+            "text": "Because this is an input operator, there are no input ports.     Port  Description  Type  Mandatory      output  output port on which data is emitted  String  Yes",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/ftpInputOperator/#configuration",
+            "text": "Property  Description  Type  Mandatory  Default Value      host  the hostname of the FTP Server  String  Yes  N/A    source  the directory path from where to scan and read files  String  Yes  N/A    username  the username for authenticating against the FTP server. This is an optional property and can be skipped when anonymous FTP is enabled  String  Yes  N/A    password  the password to be used in conjunction with the above username  String  Yes  N/A",
+            "title": "Configuration"
+        },
+        {
+            "location": "/operators/ftpInputOperator/#partitioning",
+            "text": "",
+            "title": "Partitioning"
+        },
+        {
+            "location": "/operators/ftpInputOperator/#static-partitioning",
+            "text": "Configure parameter  partitionCount  to define the desired number of initial partitions\n(4 in this example).  <property>\n  <name>dt.operator.{OperatorName}.prop.partitionCount</name>\n  <value>4</value>\n</property>  Alternatively, this can be changed in the application code by setting the operator property  partitionCount  to the desired number of partitions:  FTPStringInputOperator reader = dag.addOperator(\"Reader\", new FTPStringInputOperator());\nreader.setPar [...]
+            "title": "Static Partitioning"
+        },
+        {
+            "location": "/operators/ftpInputOperator/#dynamic-partitioning",
+            "text": "Dynamic partitioning -- changing the number of partitions of one or more operators\nin a running application -- can be achieved in multiple ways:\n- Use the command line tool  apex  or the UI console to change the value of the\n   partitionCount  property of the running operator. This change is detected in\n   processStats()  (which is invoked periodically by the platform) where, if the\n  current partition count ( currentPartitions ) and the desired partition count\ [...]
+            "title": "Dynamic Partitioning"
+        },
+        {
+            "location": "/operators/ftpInputOperator/#example-application",
+            "text": "An example application for the FTP input operator can be found at  https://github.com/apache/apex-malhar/tree/master/examples/ftp",
+            "title": "Example application"
+        },
+        {
+            "location": "/operators/AbstractJdbcTransactionableOutputOperator/",
+            "text": "JDBC Transactional POJO Output Operator\n\n\nOperator Objective\n\n\nThis operator receives an input stream of POJOs and inserts them as rows in a database table in a fault-tolerant way.\n\n\nOverview\n\n\nThe main features of this operator (\nAbstractJdbcTransactionableOutputOperator\n) are persisting data to the database table and fault tolerance. This operator creates a transaction at the start of each window, executes batches of SQL updates, and closes the transa [...]
+            "title": "Jdbc Output Operator"
+        },
+        {
+            "location": "/operators/AbstractJdbcTransactionableOutputOperator/#jdbc-transactional-pojo-output-operator",
+            "text": "",
+            "title": "JDBC Transactional POJO Output Operator"
+        },
+        {
+            "location": "/operators/AbstractJdbcTransactionableOutputOperator/#operator-objective",
+            "text": "This operator receives an input stream of POJOs and inserts them as rows in a database table in a fault-tolerant way.",
+            "title": "Operator Objective"
+        },
+        {
+            "location": "/operators/AbstractJdbcTransactionableOutputOperator/#overview",
+            "text": "The main features of this operator ( AbstractJdbcTransactionableOutputOperator ) are persisting data to the database table and fault tolerance. This operator creates a transaction at the start of each window, executes batches of SQL updates, and closes the transaction at the end of the window. Each tuple corresponds to an SQL update statement. The operator groups the updates in a batch and submits them with one call to the database. Batch processing improves performa [...]
+            "title": "Overview"
+        },
+        {
+            "location": "/operators/AbstractJdbcTransactionableOutputOperator/#operator-information",
+            "text": "Operator location:  malhar-library  Available since:  0.9.4  Java Packages:  Operator:  com.datatorrent.lib.db.jdbc.AbstractJdbcTransactionableOutputOperator",
+            "title": "Operator Information"
+        },
+        {
+            "location": "/operators/AbstractJdbcTransactionableOutputOperator/#how-to-use",
+            "text": "Concrete subclasses need to implement a couple of abstract methods (if not using AbstractJdbcPOJOOutputOperator):  setStatementParameters(PreparedStatement statement, T tuple)  to set the parameter of the insert/update statement (which is a PreparedStatement) with values from the tuple and  getUpdateCommand()  to return the SQL statement to update a tuple in the database.  Note that subclasses of AbstractJdbcPOJOOutputOperator need not define these methods since they [...]
+            "title": "How to Use?"
+        },
+        {
+            "location": "/operators/AbstractJdbcTransactionableOutputOperator/#abstract-methods",
+            "text": "These methods are defined as abstract in AbstractJdbcTransactionableOutputOperator  void setStatementParameters(PreparedStatement statement, T tuple) :Sets the parameter of the insert/update statement with values from the tuple. String getUpdateCommand() :Gets the statement which insert/update the table in the database.",
+            "title": "Abstract Methods"
+        },
+        {
+            "location": "/operators/AbstractJdbcTransactionableOutputOperator/#abstractjdbcpojooutputoperator",
+            "text": "This is the abstract implementation extending the functionality of AbstractJdbcTransactionableOutputOperator that serves as base class for inserting rows in a table using a JDBC store. It has the definition for the abstract methods in AbstractJdbcTransactionableOutputOperator. It can be further extended to modify functionality or add new capabilities. This class has an input port to recieve the records in the form of tuples, so concrete subclasses won't need to provi [...]
+            "title": "AbstractJdbcPOJOOutputOperator"
+        },
+        {
+            "location": "/operators/AbstractJdbcTransactionableOutputOperator/#platform-attributes-that-influence-operator-behavior",
+            "text": "Attribute  Description  Type  Mandatory      TUPLE_CLASS  TUPLE_CLASS attribute on input port which tells operator the class of POJO which is being received  Class  Yes     Those attributes can be set like this:  <property>\n  <name>dt.operator.{OperatorName}.port.input.attr.TUPLE_CLASS</name>    \n  <value>com.example.mydtapp.PojoEvent</value>\n</property>  A concrete implementation is provided in Malhar as  JdbcPOJOInsertOutputOperator .  The incoming tuples will b [...]
+            "title": "Platform Attributes that influence operator behavior"
+        },
+        {
+            "location": "/operators/AbstractJdbcTransactionableOutputOperator/#features",
+            "text": "The operator is  idempotent ,  fault-tolerant  and  statically partitionable .",
+            "title": "Features"
+        },
+        {
+            "location": "/operators/AbstractJdbcTransactionableOutputOperator/#partitioning-of-jdbc-output-operator",
+            "text": "",
+            "title": "Partitioning of JDBC Output Operator"
+        },
+        {
+            "location": "/operators/AbstractJdbcTransactionableOutputOperator/#static-partitioning",
+            "text": "Only static partitioning is supported for this operator.  Static partitioning can be achieved by specifying the partitioner and number of partitions in the populateDAG() method    JdbcPOJOInsertOutputOperator jdbcPOJOInsertOutputOperator = dag.addOperator(\"jdbcPOJOInsertOutputOperator\", JdbcPOJOInsertOutputOperator.class);\n  StatelessPartitioner<JdbcPOJOInsertOutputOperator> partitioner1 = new StatelessPartitioner<JdbcPOJOInsertOutputOperator>(2);\n  dag.setAttrib [...]
+            "title": "Static Partitioning"
+        },
+        {
+            "location": "/operators/AbstractJdbcTransactionableOutputOperator/#dynamic-partitioning",
+            "text": "Not supported.",
+            "title": "Dynamic Partitioning"
+        },
+        {
+            "location": "/operators/AbstractJdbcTransactionableOutputOperator/#example",
+            "text": "An example application using this operator can be found  here . This example shows how to read files from HDFS, parse into POJOs and then insert into a table in MySQL.",
+            "title": "Example"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/",
+            "text": "JDBC Poller Input Operator\n\n\nOperator Objective\n\n\nThis operator scans JDBC database table in parallel fashion. This operator is added to address common input operator problems like,\n\n\n\n\n\n\nAs discussed in \nDevelopment Best Practices\n,\n    the operator callbacks such as \nbeginWindow()\n, \nendWindow()\n, \nemitTuples()\n, etc.\n    (which are invoked by the main operator thread)\n    are required to return quickly, well within the default streaming win [...]
+            "title": "JDBC Poller Input"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#jdbc-poller-input-operator",
+            "text": "",
+            "title": "JDBC Poller Input Operator"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#operator-objective",
+            "text": "This operator scans JDBC database table in parallel fashion. This operator is added to address common input operator problems like,    As discussed in  Development Best Practices ,\n    the operator callbacks such as  beginWindow() ,  endWindow() ,  emitTuples() , etc.\n    (which are invoked by the main operator thread)\n    are required to return quickly, well within the default streaming window duration of\n    500ms. This requirement can be an issue when retrievi [...]
+            "title": "Operator Objective"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#assumption",
+            "text": "Assumption is that there is an ordered column using which range queries can be formed. That means database has a column or combination of columns which has unique constraint as well as every newly inserted record should have column value more than max value in that column, as we poll only appended records.",
+            "title": "Assumption"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#use-cases",
+            "text": "Ingest large database tables. An example application that copies database contents to HDFS is available  here .",
+            "title": "Use cases"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#how-to-use",
+            "text": "The tuple type in the abstract class is a generic parameter. Concrete subclasses need to choose an appropriate class (such as String or an appropriate concrete java class, having no-argument constructor so that it can be serialized using Kryo). Also implement a couple of abstract methods:  getTuple(ResultSet)  to convert database rows to objects of concrete class and  emitTuple(T)  to emit the tuple.  In principle, no ports need be defined in the rare case that the o [...]
+            "title": "How to Use?"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#partitioning-of-jdbc-poller",
+            "text": "",
+            "title": "Partitioning of JDBC Poller"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#static-partitioning",
+            "text": "Only static partitioning is supported for JDBC Poller Input Operator. Configure parameter  partitionCount  to define the desired number of initial partitions (4 in this example). Note : An additional partition will be created to poll newly added records, so the total number of partitions will always be 1 + partitionCount.    <property>\n    <name>apex.operator.{OperatorName}.prop.partitionCount</name>\n    <value>4</value>\n  </property>  where {OperatorName} is the  [...]
+            "title": "Static Partitioning"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#dynamic-partitioning",
+            "text": "Not supported.",
+            "title": "Dynamic Partitioning"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#operator-information",
+            "text": "Operator location:  malhar-library  Available since:  3.5.0  Operator state:  Evolving  Java Packages:  AbstractJdbcPollInputOperator   JDBC Poller is  idempotent ,  fault-tolerant  and  statically partitionable .",
+            "title": "Operator Information"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#abstractjdbcpollinputoperator",
+            "text": "This is the abstract implementation that serves as base class for polling messages from JDBC store. It can be extended to modify functionality or add new capabilities. This class doesn\u2019t have any ports, so concrete subclasses will need to provide them if necessary.",
+            "title": "AbstractJdbcPollInputOperator"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#abstract-methods",
+            "text": "void emitTuple(T tuple) : Abstract method that emits tuple extracted from JDBC store.  T getTuple(ResultSet result) : Abstract method to extract the tuple from the JDBC ResultSet object and convert it to the required type (T).",
+            "title": "Abstract Methods"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#concrete-classes",
+            "text": "",
+            "title": "Concrete Classes"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#1-jdbcpojopollinputoperator",
+            "text": "This implementation converts JDBC store records to  POJO  and emits POJO on output port.",
+            "title": "1. JdbcPOJOPollInputOperator"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#platform-attributes-that-influence-operator-behavior",
+            "text": "Attribute  Description  Type  Mandatory      outputPort.TUPLE_CLASS  TUPLE_CLASS attribute on output port which tells operator the class of POJO which need to be emitted  Class or FQCN (Fully Qualified Class Name)  Yes",
+            "title": "Platform Attributes that influence operator behavior"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#ports",
+            "text": "Port  Description  Type  Mandatory      outputPort  Tuples that are read from JDBC store are emitted from on this port  Object (POJO)  No",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#2-jdbcpollinputoperator",
+            "text": "This implementation converts JDBC store records to comma separated CSV records. This operator is normally used when you just want to copy the data from database to somewhere else and don't want to do much of processing.",
+            "title": "2. JdbcPollInputOperator"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#ports_1",
+            "text": "Port  Description  Type  Mandatory      outputPort  Tuples that are read from JDBC store are emitted on this port  String  No",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/jdbcPollInputOperator/#limitations",
+            "text": "Out of order insertion/deletion won't be supported.",
+            "title": "Limitations"
+        },
+        {
+            "location": "/operators/jmsInputOperator/",
+            "text": "JMS INPUT OPERATOR\n\n\nIntroduction: About the JMS Input Operator\n\n\nThe JMS input operator consumes data from a messaging system using the JMS client API. JMS not being a communication protocol, the operator needs an underlying JMS client API library to talk to a messaging system. Currently the operator has been tested with the Amazon SQS and Apache ActiveMQ System brokers via their respective JMS client API libraries.\n\n\nWhy is it needed ?\n\n\nYou will need t [...]
+            "title": "JMS Input"
+        },
+        {
+            "location": "/operators/jmsInputOperator/#jms-input-operator",
+            "text": "",
+            "title": "JMS INPUT OPERATOR"
+        },
+        {
+            "location": "/operators/jmsInputOperator/#introduction-about-the-jms-input-operator",
+            "text": "The JMS input operator consumes data from a messaging system using the JMS client API. JMS not being a communication protocol, the operator needs an underlying JMS client API library to talk to a messaging system. Currently the operator has been tested with the Amazon SQS and Apache ActiveMQ System brokers via their respective JMS client API libraries.",
+            "title": "Introduction: About the JMS Input Operator"
+        },
+        {
+            "location": "/operators/jmsInputOperator/#why-is-it-needed",
+            "text": "You will need the operator to read data from a messaging system (e.g. Apache ActiveMQ) via the JMS client API. The operator supports both the publish-subscribe (topics) and point-to-point (queues) modes. The operator currently does not support partitioning and dynamic scalability.",
+            "title": "Why is it needed ?"
+        },
+        {
+            "location": "/operators/jmsInputOperator/#jmsbase",
+            "text": "This class encapsulates various JMS properties and behaviors and maintains connections with the JMS broker. This is the base class for JMS input and output adaptor operators. Operators should not directly subclass JMSBase but one of the JMS input or output operators.",
+            "title": "JMSBase"
+        },
+        {
+            "location": "/operators/jmsInputOperator/#abstractjmsinputoperator",
+            "text": "This abstract implementation serves as the base class for consuming generic messages from an external messaging system. Concrete subclasses implement conversion and emit methods to emit tuples for a concrete type. JMSStringInputOperator is one such subclass in the library used for String messages. JMSObjectInputOperator is another one used for multiple message types where the user has the ability to get String, byte array, Map or POJO messages on the respective outpu [...]
+            "title": "AbstractJMSInputOperator"
+        },
+        {
+            "location": "/operators/jmsInputOperator/#configuration-parameters",
+            "text": "Common configuration parameters are described here.      Parameter  Description    windowDataManager  This is an instance of  WindowDataManager  that implements idempotency. Idempotency ensures that an operator will process the same set of messages in a window before and after a failure. For example, say the operator completed window 10 and failed before or during window 11. If the operator gets restored at window 10, it will replay the messages of window 10 which we [...]
+            "title": "Configuration Parameters"
+        },
+        {
+            "location": "/operators/jmsInputOperator/#abstract-methods",
+            "text": "The following abstract methods need to be implemented by concrete subclasses.  T convert(Message message): This method converts a JMS Message object to type T.  void emit(T payload): This method emits a tuple given the payload extracted from a JMS message.",
+            "title": "Abstract Methods"
+        },
+        {
+            "location": "/operators/jmsInputOperator/#concrete-classes",
+            "text": "JMSStringInputOperator :\nThis class extends AbstractJMSInputOperator to deliver String payloads in the tuple.    JMSObjectInputOperator:\nThis class extends AbstractJMSInputOperator to deliver String, byte array, Map or POJO payloads in the tuple.",
+            "title": "Concrete Classes"
+        },
+        {
+            "location": "/operators/jmsInputOperator/#application-examples",
+            "text": "",
+            "title": "Application Examples"
+        },
+        {
+            "location": "/operators/jmsInputOperator/#activemq-example",
+            "text": "The source code for the tutorial can be found here:  https://github.com/DataTorrent/examples/tree/master/tutorials/jmsActiveMQ  The following code snippet from the example illustrates how the DAG is created:    @Override\n  public void populateDAG(DAG dag, Configuration conf)\n  {\n    JMSStringInputOperator amqInput = dag.addOperator(\"amqIn\", \n        new JMSStringInputOperator());\n\n    LineOutputOperator out = dag.addOperator(\"fileOut\", new LineOutputOperato [...]
+            "title": "ActiveMQ Example"
+        },
+        {
+            "location": "/operators/jmsInputOperator/#sqs-example",
+            "text": "The source code for the tutorial can be found here:  https://github.com/DataTorrent/examples/tree/master/tutorials/jmsSqs  The following code snippet from the example illustrates how the DAG is created:   @Override\n public void populateDAG(DAG dag, Configuration conf)\n {\n\n   JMSStringInputOperator sqsInput = dag.addOperator(\"sqsIn\", \n       new JMSStringInputOperator());\n\n   MyConnectionFactoryBuilder factoryBuilder = new MyConnectionFactoryBuilder();\n\n    [...]
+            "title": "SQS Example"
+        },
+        {
+            "location": "/operators/jsonFormatter/",
+            "text": "Json Formatter\n\n\nOperator Objective\n\n\nPurpose of JsonFormatter is to consume Plain Old Java Object (\"POJO\") and write them as JSON.\nJson Formatter is \nidempotent\n, \nfault-tolerance\n & \nstatically/dynamically partitionable\n.\n\n\nClass Diagram\n\n\n\n\nOperator Information\n\n\n\n\nOperator location:\n_malhar-library\n\n\nAvailable since:\n3.2.0\n\n\nOperator state:\nEvolving\n\n\nJava Package:\ncom.datatorrent.lib.formatter.JsonFormatter\n\n\n\n\nPrope [...]
+            "title": "JSON Formatter"
+        },
+        {
+            "location": "/operators/jsonFormatter/#json-formatter",
+            "text": "",
+            "title": "Json Formatter"
+        },
+        {
+            "location": "/operators/jsonFormatter/#operator-objective",
+            "text": "Purpose of JsonFormatter is to consume Plain Old Java Object (\"POJO\") and write them as JSON.\nJson Formatter is  idempotent ,  fault-tolerance  &  statically/dynamically partitionable .",
+            "title": "Operator Objective"
+        },
+        {
+            "location": "/operators/jsonFormatter/#class-diagram",
+            "text": "",
+            "title": "Class Diagram"
+        },
+        {
+            "location": "/operators/jsonFormatter/#operator-information",
+            "text": "Operator location: _malhar-library  Available since: 3.2.0  Operator state: Evolving  Java Package: com.datatorrent.lib.formatter.JsonFormatter",
+            "title": "Operator Information"
+        },
+        {
+            "location": "/operators/jsonFormatter/#properties-attributes-and-ports",
+            "text": "",
+            "title": "Properties, Attributes and Ports"
+        },
+        {
+            "location": "/operators/jsonFormatter/#platform-attributes-that-influences-operator-behavior",
+            "text": "Attribute  Description  Type  Mandatory      in.TUPLE_CLASS  TUPLE_CLASS attribute on input port which tells operator the class of incoming POJO  Class or FQCN  Yes",
+            "title": "Platform Attributes that influences operator behavior"
+        },
+        {
+            "location": "/operators/jsonFormatter/#ports",
+            "text": "Port  Description  Type  Mandatory      in  Tuples that needs to be formatted are recieved on this port  Object (POJO)  Yes    out  Valid Tuples that are emitted as JSON  String  No    err  Invalid Tuples are emitted on this port  Object  No",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/jsonFormatter/#partitioning",
+            "text": "JSON Formatter is both statically and dynamically partitionable.",
+            "title": "Partitioning"
+        },
+        {
+            "location": "/operators/jsonFormatter/#static-partitioning",
+            "text": "This can be achieved in 2 ways   Specifying the partitioner and number of partitions in the populateDAG() method   JsonFormatter jsonFormatter = dag.addOperator(\"jsonFormatter\", JsonFormatter.class);\nStatelessPartitioner<JsonFormatter> partitioner1 = new StatelessPartitioner<JsonFormatter>(2);\ndag.setAttribute(jsonFormatter, Context.OperatorContext.PARTITIONER, partitioner1 );   Specifying the partitioner in properties file.    <property>\n   <name>dt.operator.{O [...]
+            "title": "Static Partitioning"
+        },
+        {
+            "location": "/operators/jsonFormatter/#dynamic-paritioning",
+            "text": "JsonFormatter can be dynamically partitioned using an out-of-the-box partitioner:",
+            "title": "Dynamic Paritioning"
+        },
+        {
+            "location": "/operators/jsonFormatter/#throughput-based",
+            "text": "Following code can be added to populateDAG method of application to dynamically partition JsonFormatter:  JsonFormatter jsonFormatter = dag.addOperator(\"jsonFormatter\", JsonFormatter.class);\nStatelessThroughputBasedPartitioner<JsonFormatter> partitioner = new StatelessThroughputBasedPartitioner<>();\npartitioner.setCooldownMillis(conf.getLong(COOL_DOWN_MILLIS, 10000));\npartitioner.setMaximumEvents(conf.getLong(MAX_THROUGHPUT, 30000));\npartitioner.setMinimumEvent [...]
+            "title": "Throughput based"
+        },
+        {
+            "location": "/operators/jsonFormatter/#example",
+            "text": "Example for Json Formatter can be found at:  https://github.com/DataTorrent/examples/tree/master/tutorials/parser",
+            "title": "Example"
+        },
+        {
+            "location": "/operators/jsonFormatter/#advance-features",
+            "text": "JsonFormatter is based on  jackson-databind  and so users can make use of  annotations  in POJO class. Here are few annotations that are relavant while using JsonFormatter\n1.  @JsonProperty  : Sometimes POJOs contain properties that has different name from incoming POJOs.You can specify names as:  public class Ad{\n  @JsonProperty(\"desc\")\n  public String description;\n  public List<String> sizes;\n}   @JsonIgnore  : Sometimes POJOs contain properties that you do  [...]
+            "title": "Advance Features"
+        },
+        {
+            "location": "/operators/jsonParser/",
+            "text": "Json Parser\n\n\nOperator Objective\n\n\nPurpose of JsonParser operator is to parse JSON records and construct a Plain Old Java Object (\"POJO\") out of it. The operator also emits each record as JSONObject if the relevant output port is connected. User can also provide a schema describing JSON data to validate incoming JSON records. Valid records will be emitted as POJO / JSONObject while invalid ones are emitted on error port with the error message if the error por [...]
+            "title": "JSON Parser"
+        },
+        {
+            "location": "/operators/jsonParser/#json-parser",
+            "text": "",
+            "title": "Json Parser"
+        },
+        {
+            "location": "/operators/jsonParser/#operator-objective",
+            "text": "Purpose of JsonParser operator is to parse JSON records and construct a Plain Old Java Object (\"POJO\") out of it. The operator also emits each record as JSONObject if the relevant output port is connected. User can also provide a schema describing JSON data to validate incoming JSON records. Valid records will be emitted as POJO / JSONObject while invalid ones are emitted on error port with the error message if the error port is connected.  Json Parser is  idempote [...]
+            "title": "Operator Objective"
+        },
+        {
+            "location": "/operators/jsonParser/#class-diagram",
+            "text": "",
+            "title": "Class Diagram"
+        },
+        {
+            "location": "/operators/jsonParser/#operator-information",
+            "text": "Operator location: malhar-contrib  Available since: 3.2.0  Operator state: Evolving  Java Package: com.datatorrent.contrib.parser.JsonParser",
+            "title": "Operator Information"
+        },
+        {
+            "location": "/operators/jsonParser/#properties-attributes-and-ports",
+            "text": "",
+            "title": "Properties, Attributes and Ports"
+        },
+        {
+            "location": "/operators/jsonParser/#platform-attributes-that-influences-operator-behavior",
+            "text": "Attribute  Description  Type  Mandatory      out.TUPLE_CLASS  TUPLE_CLASS attribute on output port which tells operator the class of POJO which needs to be emitted. The name of the field members of the class must match with the names in incoming POJO. The operator ignores unknown properties.  Class or FQCN  Yes",
+            "title": "Platform Attributes that influences operator behavior"
+        },
+        {
+            "location": "/operators/jsonParser/#ports",
+            "text": "Port  Description  Type  Mandatory      in  Tuples that needs to be parsed are recieved on this port  byte[]  Yes    out  Valid Tuples that are emitted as pojo. Tuples are converted to POJO only if the port is connected.  Object (POJO)  No    parsedOutput  Valid Tuples that are emitted as JSONObject. Tuples are converted to JSONObject only if the port is connected.  JSONObject  No    err  Invalid Tuples are emitted with error message. Invaid tuples are discarded if t [...]
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/jsonParser/#partitioning",
+            "text": "JSON Parser is both statically and dynamically partitionable.",
+            "title": "Partitioning"
+        },
+        {
+            "location": "/operators/jsonParser/#static-partitioning",
+            "text": "This can be achieved in 2 ways   Specifying the partitioner and number of partitions in the populateDAG() method   JsonParser jsonParser = dag.addOperator(\"jsonParser\", JsonParser.class);\nStatelessPartitioner<JsonParser> partitioner1 = new StatelessPartitioner<JsonParser>(2);\ndag.setAttribute(jsonParser, Context.OperatorContext.PARTITIONER, partitioner1 );   Specifying the partitioner in properties file.    <property>\n   <name>dt.operator.{OperatorName}.attr.PAR [...]
+            "title": "Static Partitioning"
+        },
+        {
+            "location": "/operators/jsonParser/#dynamic-paritioning",
+            "text": "JsonParser can be dynamically partitioned using an out-of-the-box partitioner:",
+            "title": "Dynamic Paritioning"
+        },
+        {
+            "location": "/operators/jsonParser/#throughput-based",
+            "text": "Following code can be added to populateDAG method of application to dynamically partition JsonParser:  JsonParser jsonParser = dag.addOperator(\"jsonParser\", JsonParser.class);\nStatelessThroughputBasedPartitioner<JsonParser> partitioner = new StatelessThroughputBasedPartitioner<>();\npartitioner.setCooldownMillis(conf.getLong(COOL_DOWN_MILLIS, 10000));\npartitioner.setMaximumEvents(conf.getLong(MAX_THROUGHPUT, 30000));\npartitioner.setMinimumEvents(conf.getLong(MIN [...]
+            "title": "Throughput based"
+        },
+        {
+            "location": "/operators/jsonParser/#example",
+            "text": "Example for Json Parser can be found at:  https://github.com/DataTorrent/examples/tree/master/tutorials/parser",
+            "title": "Example"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/",
+            "text": "KAFKA INPUT OPERATOR\n\n\nIntroduction\n\n\nApache Kafka\n is a pull-based and distributed publish subscribe messaging system,\ntopics are partitioned and replicated across nodes. \n\n\nThe Kafka input operator consumes data from the partitions of a Kafka topic for processing in Apex. \nThe operator has the ability to automatically scale with the Kafka partitioning for high throughput. \nIt is fault-tolerant (consumer offset checkpointing) and guarantees idempotency  [...]
+            "title": "Kafka Input"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#kafka-input-operator",
+            "text": "",
+            "title": "KAFKA INPUT OPERATOR"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#introduction",
+            "text": "Apache Kafka  is a pull-based and distributed publish subscribe messaging system,\ntopics are partitioned and replicated across nodes.   The Kafka input operator consumes data from the partitions of a Kafka topic for processing in Apex. \nThe operator has the ability to automatically scale with the Kafka partitioning for high throughput. \nIt is fault-tolerant (consumer offset checkpointing) and guarantees idempotency to allow exactly-once results in the downstream p [...]
+            "title": "Introduction"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#kafka-input-operator-for-kafka-08x",
+            "text": "Package:  com.datatorrent.contrib.kafka  Maven artifact:  malhar-contrib",
+            "title": "Kafka Input Operator for Kafka 0.8.x"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#abstractkafkainputoperator",
+            "text": "This is the abstract implementation that serves as base class for consuming messages from Kafka messaging system. This class doesn\u2019t have any ports.",
+            "title": "AbstractKafkaInputOperator"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#configuration-parameters",
+            "text": "Parameter  Description    maxTuplesPerWindow  Controls the maximum number of messages emitted in each streaming window from this operator. Minimum value is 1. Default value = MAX_VALUE     idempotentStorageManager  This is an instance of IdempotentStorageManager. Idempotency ensures that the operator will process the same set of messages in a window before and after a failure. For example, let's say the operator completed window 10 and failed somewhere between window [...]
+            "title": "Configuration Parameters"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#abstract-methods",
+            "text": "void emitTuple(Message message) : Abstract method that emits tuples extracted from Kafka message.",
+            "title": "Abstract Methods"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#kafkaconsumer",
+            "text": "This is an abstract implementation of Kafka consumer. It sends the fetch\nrequests to the leading brokers of Kafka partitions. For each request,\nit receives the set of messages and stores them into the buffer which is\nArrayBlockingQueue. SimpleKafkaConsumer\u00a0which extends\nKafkaConsumer and serves the functionality of Simple Consumer API and\nHighLevelKafkaConsumer which extends KafkaConsumer and \u00a0serves the\nfunctionality of High Level Consumer API.",
+            "title": "KafkaConsumer"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#pre-requisites",
+            "text": "This operator uses the Kafka 0.8.2.1 client consumer API\nand will work with 0.8.x and 0.7.x versions of Kafka broker.",
+            "title": "Pre-requisites"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#configuration-parameters_1",
+            "text": "Parameter  Type  Default  Description    zookeeper  String   Specifies the zookeeper quorum of Kafka clusters that you want to consume messages from. zookeeper \u00a0is a string in the form of hostname1:port1,hostname2:port2,hostname3:port3 \u00a0where hostname1,hostname2,hostname3 are hosts and port1,port2,port3 are ports of zookeeper server. \u00a0If the topic name is the same across the Kafka clusters and want to consume data from these clusters, then configure th [...]
+            "title": "Configuration Parameters"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#abstract-methods_1",
+            "text": "void commitOffset(): Commit the offsets at checkpoint.  Map <KafkaPartition, Long> getCurrentOffsets(): Return the current\n    offset status.  resetPartitionsAndOffset(Set <KafkaPartition> partitionIds,\n    Map <KafkaPartition, Long> startOffset): Reset the partitions with\n    parittionIds and offsets with startOffset.",
+            "title": "Abstract Methods"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#configuration-parameters-for-simplekafkaconsumer",
+            "text": "Parameter  Type  Default  Description    bufferSize  int  1 MB  Specifies the maximum total size of messages for each fetch request.    metadataRefreshInterval  int  30 Seconds  Interval in between refresh the metadata change(broker change) in milliseconds. Enabling metadata refresh guarantees an automatic reconnect when a new broker is elected as the host. A value of -1 disables this feature.    metadataRefreshRetryLimit  int  -1  Specifies the maximum brokers' meta [...]
+            "title": "Configuration Parameters\u00a0for SimpleKafkaConsumer"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#offsetmanager",
+            "text": "This is an interface for offset management and is useful when consuming data\nfrom specified offsets. Updates the offsets for all the Kafka partitions\nperiodically. Below is the code snippet:\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0\u00a0  public interface OffsetManager\n{\n  public Map<KafkaPartition, Long> loadInitialOffsets();\n  public void updateOffsets(Map<KafkaPartition, Long> offsetsOfPartitions);\n}",
+            "title": "OffsetManager"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#abstract-methods_2",
+            "text": "Map <KafkaPartition, Long> loadInitialOffsets() : Specifies the initial offset for consuming messages; called at the activation stage.  updateOffsets(Map<KafkaPartition, Long> offsetsOfPartitions) : \u00a0This\nmethod is called at every repartitionCheckInterval to update offsets.",
+            "title": "Abstract Methods"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#partitioning",
+            "text": "The logical instance of the KafkaInputOperator acts as the Partitioner\nas well as a StatsListener. This is because the\nAbstractKafkaInputOperator implements both the\ncom.datatorrent.api.Partitioner and com.datatorrent.api.StatsListener\ninterfaces and provides an implementation of definePartitions(...) and\nprocessStats(...) which makes it auto-scalable.",
+            "title": "Partitioning"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#response-processstatsbatchedoperatorstats-stats",
+            "text": "The application master invokes this method on the logical instance with\nthe stats (tuplesProcessedPS, bytesPS, etc.) of each partition.\nRe-partitioning happens based on whether any new Kafka partitions added for\nthe topic or bytesPS and msgPS cross their respective upper bounds.",
+            "title": "Response processStats(BatchedOperatorStats stats)"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#definepartitions",
+            "text": "Based on the repartitionRequired field of the Response object which is\nreturned by processStats(...) method, the application master invokes\ndefinePartitions(...) on the logical instance which is also the\npartitioner instance. Dynamic partition can be disabled by setting the\nparameter repartitionInterval value to a negative value.",
+            "title": "DefinePartitions"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#abstractsingleportkafkainputoperator",
+            "text": "This class extends AbstractKafkaInputOperator to emit messages through single output port.",
+            "title": "AbstractSinglePortKafkaInputOperator"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#ports",
+            "text": "outputPort <T> : Tuples extracted from Kafka messages are emitted through this port.",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#abstract-methods_3",
+            "text": "T getTuple(Message msg) : Converts the Kafka message to tuple.",
+            "title": "Abstract Methods"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#concrete-classes",
+            "text": "KafkaSinglePortStringInputOperator: extends  AbstractSinglePortKafkaInputOperator , extracts string from Kafka message.  KafkaSinglePortByteArrayInputOperator: extends  AbstractSinglePortKafkaInputOperator , extracts byte array from Kafka message.",
+            "title": "Concrete Classes"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#application-example",
+            "text": "This section builds an Apex application using Kafka input operator.\nBelow is the code snippet:  @ApplicationAnnotation(name = \"KafkaApp\")\npublic class ExampleKafkaApplication implements StreamingApplication\n{\n  @Override\n  public void populateDAG(DAG dag, Configuration entries)\n  {\n    KafkaSinglePortByteArrayInputOperator input =  dag.addOperator(\"MessageReader\", new KafkaSinglePortByteArrayInputOperator());\n    ConsoleOutputOperator output = dag.addOper [...]
+            "title": "Application Example"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#kafka-input-operator-for-kafka-09x",
+            "text": "Package:  org.apache.apex.malhar.kafka  Maven Artifact:  malhar-kafka  This version uses the new 0.9 version of consumer API and works with Kafka broker version 0.9 and later.\nThe operator is fault-tolerant, scalable and supports input from multiple clusters and multiple topics in a single operator instance.",
+            "title": "Kafka Input Operator for Kafka 0.9.x"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#pre-requisites_1",
+            "text": "This operator requires version 0.9.0 or later of the Kafka Consumer API.",
+            "title": "Pre-requisites"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#abstractkafkainputoperator_1",
+            "text": "",
+            "title": "AbstractKafkaInputOperator"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#ports_1",
+            "text": "This abstract class doesn't have any ports.",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#configuration-properties",
+            "text": "clusters  - String[]   Mandatory Parameter.  Specifies the Kafka clusters that you want to consume messages from. To configure multi-cluster support, you need to specify the clusters separated by \";\".     topics  - String[]   Mandatory Parameter.  Specified the Kafka topics that you want to consume messages from. If you want multi-topic support, then specify the topics separated by \",\".     strategy  - PartitionStrategy    Operator supports two types of partition [...]
+            "title": "Configuration properties"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#abstract-methods_4",
+            "text": "void emitTuple(String cluster, ConsumerRecord<byte[], byte[]> message) : Abstract method that emits tuples\nextracted from Kafka message.",
+            "title": "Abstract Methods"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#concrete-classes_1",
+            "text": "",
+            "title": "Concrete Classes"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#kafkasingleportinputoperator",
+            "text": "This class extends from AbstractKafkaInputOperator and defines the  getTuple()  method which extracts byte array from Kafka message.",
+            "title": "KafkaSinglePortInputOperator"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#ports_2",
+            "text": "outputPort <byte[]> : Tuples extracted from Kafka messages are emitted through this port.",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#application-example_1",
+            "text": "This section builds an Apex application using Kafka input operator.\nBelow is the code snippet:  @ApplicationAnnotation(name = \"KafkaApp\")\npublic class ExampleKafkaApplication implements StreamingApplication\n{\n  @Override\n  public void populateDAG(DAG dag, Configuration entries)\n  {\n    KafkaSinglePortInputOperator input =  dag.addOperator(\"MessageReader\", new KafkaSinglePortInputOperator());\n    ConsoleOutputOperator output = dag.addOperator(\"Output\", n [...]
+            "title": "Application Example"
+        },
+        {
+            "location": "/operators/kafkaInputOperator/#security",
+            "text": "Kafka from 0.9.x onwards supports  Authentication, Encryption and Authorization .  See  here  for more information.",
+            "title": "Security"
+        },
+        {
+            "location": "/operators/regexparser/",
+            "text": "Regex Parser Operator\n\n\nOperator Objective\n\n\nRegexParser\n is designed to parse records based on a regex pattern and construct a concrete java class also known as \n\"POJO\"\n out of it. User needs to provide the regex pattern and schema definition to describe the data pattern. Based on regex pattern, the operator will split the data and then schema definition will be used to map the incoming record to POJO. User can also provide date format if any, in the sche [...]
+            "title": "Regex Parser"
+        },
+        {
+            "location": "/operators/regexparser/#regex-parser-operator",
+            "text": "",
+            "title": "Regex Parser Operator"
+        },
+        {
+            "location": "/operators/regexparser/#operator-objective",
+            "text": "RegexParser  is designed to parse records based on a regex pattern and construct a concrete java class also known as  \"POJO\"  out of it. User needs to provide the regex pattern and schema definition to describe the data pattern. Based on regex pattern, the operator will split the data and then schema definition will be used to map the incoming record to POJO. User can also provide date format if any, in the schema. The supported constraints are listed in  constrain [...]
+            "title": "Operator Objective"
+        },
+        {
+            "location": "/operators/regexparser/#overview",
+            "text": "The operator is  idempotent ,  fault-tolerant  and  partitionable .",
+            "title": "Overview"
+        },
+        {
+            "location": "/operators/regexparser/#operator-information",
+            "text": "Operator location:  malhar-contrib  Available since:  3.7.0  Operator state:  Evolving  Java Package:  com.datatorrent.contrib.parser.RegexParser",
+            "title": "Operator Information"
+        },
+        {
+            "location": "/operators/regexparser/#platform-attributes-that-influence-operator-behavior",
+            "text": "Attribute  Description  Type  Mandatory      TUPLE_CLASS  TUPLE_CLASS attribute on output port which tells operator the class of POJO which need to be emitted  Class  Yes     The operator takes care of converting the byte array received on the input port to a string by decoding using the JVM's default  Charset . Then, splits the string using the  splitRegexPattern  and populates an object using the  schema . Apex platform converts this object to the object of  TUPLE_ [...]
+            "title": "Platform Attributes that influence operator behavior"
+        },
+        {
+            "location": "/operators/regexparser/#ports",
+            "text": "Port  Description  Type  Mandatory      in  Tuples that needs to be parsed are received on this port  byte[]  Yes    out  Valid tuples that are emitted as POJO  Object (POJO)  No    err  Invalid tuples are emitted with error message  KeyValPair <String, String>  No",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/regexparser/#partitioning",
+            "text": "Regex Parser can be statically or dynamically partitioned.",
+            "title": "Partitioning"
+        },
+        {
+            "location": "/operators/regexparser/#static-partitioning",
+            "text": "This can be achieved in the below 2 ways.  Specifying the partitioner and number of partitions in the populateDAG() method.      RegexParser regexParser = dag.addOperator(\"regexParser\", RegexParser.class);\n    StatelessPartitioner<RegexParser> partitioner = new StatelessPartitioner<RegexParser>(2);\n    dag.setAttribute(regexParser, Context.OperatorContext.PARTITIONER, partitioner);  Specifying the partitioner in properties file.      <property>\n        <name>dt. [...]
+            "title": "Static Partitioning"
+        },
+        {
+            "location": "/operators/regexparser/#dynamic-partitioning",
+            "text": "RegexParser can be dynamically partitioned using the out-of-the-box partitioner:",
+            "title": "Dynamic Partitioning"
+        },
+        {
+            "location": "/operators/regexparser/#throughput-based",
+            "text": "Following code can be added to the  populateDAG  method of application to dynamically partition RegexParser:      RegexParser regexParser = dag.addOperator(\"regexParser\", RegexParser.class);\n    StatelessThroughputBasedPartitioner<RegexParser> partitioner = new StatelessThroughputBasedPartitioner<>();\n    partitioner.setCooldownMillis(conf.getLong(COOL_DOWN_MILLIS, 10000));\n    partitioner.setMaximumEvents(conf.getLong(MAX_THROUGHPUT, 30000));\n    partitioner.s [...]
+            "title": "Throughput based"
+        },
+        {
+            "location": "/operators/regexparser/#example",
+            "text": "Coming Soon",
+            "title": "Example"
+        },
+        {
+            "location": "/operators/s3outputmodule/",
+            "text": "S3OutputModule\n\n\nAbout Amazon S3\n\n\n\n\nAmazon S3 (Simple Storage Service)\n is an object storage system with a web service interface to store and retrieve any amount of data at any time from anywhere on the web, offered by Amazon Web Services.\n\n\nS3 Output Module\n\n\n\n\nPurpose of S3Output module is to upload files/directories into an Amazon S3 bucket using the multipart upload feature(see below).\n\n\nS3Output module is \nfault-tolerant\n, \nstatically/dyn [...]
+            "title": "S3 Output Module"
+        },
+        {
+            "location": "/operators/s3outputmodule/#s3outputmodule",
+            "text": "",
+            "title": "S3OutputModule"
+        },
+        {
+            "location": "/operators/s3outputmodule/#about-amazon-s3",
+            "text": "Amazon S3 (Simple Storage Service)  is an object storage system with a web service interface to store and retrieve any amount of data at any time from anywhere on the web, offered by Amazon Web Services.",
+            "title": "About Amazon S3"
+        },
+        {
+            "location": "/operators/s3outputmodule/#s3-output-module",
+            "text": "Purpose of S3Output module is to upload files/directories into an Amazon S3 bucket using the multipart upload feature(see below).  S3Output module is  fault-tolerant ,  statically/dynamically partitionable  and has  exactly once  semantics.  Module class is  S3OutputModule  located in the package  org.apache.apex.malhar.lib.fs.s3 ; please refer to  github URL .",
+            "title": "S3 Output Module"
+        },
+        {
+            "location": "/operators/s3outputmodule/#overview",
+            "text": "File upload to S3 can also be done using  AbstractFileOutputOperator  but that operator uploads large files sequentially; the current module in contrast can substantially improve the upload speed of large files by reading and uploading their constituent blocks in parallel.   The table below lists additional benefits of this module over  AbstractFileOutputOperator .     S3OutputModule  AbstractFileOutputOperator      Maximum upload file size is 5TB.  Maximum upload fi [...]
+            "title": "Overview"
+        },
+        {
+            "location": "/operators/s3outputmodule/#multipart-upload-feature",
+            "text": "Uploading parts of a file is done via the  multipart feature ; using this feature, each part of a file can be uploaded independently.\nAfter all parts of a file are uploaded successfully, Amazon S3 combines the parts as a single object.  Please refer to the  Java code  for uploading file into Amazon S3 bucket using multipart feature.",
+            "title": "Multipart Upload Feature"
+        },
+        {
+            "location": "/operators/s3outputmodule/#module",
+            "text": "A  module  is a group of operators pre-wired together so they work as a single conceptual entity in an application. Typically, a module will contain a set of input ports, output ports and configuration properties. The operators internal to the module will be automatically configured based on the supplied module properties.",
+            "title": "Module"
+        },
+        {
+            "location": "/operators/s3outputmodule/#operators-in-s3outputmodule",
+            "text": "Following diagram illustrates the DAG in this module:     S3InitiateFileUploadOperator   Initiate the upload for the file using  AmazonS3Client.initiateMultipartUpload(...)  method only if the number of blocks for a file is greater than 1. By successfully initiating the upload, S3 returns a response of type  InitiateMultipartUploadResult , which includes the  upload ID , which is the unique identifier for the multipart upload. This  upload ID  must be included in eac [...]
+            "title": "Operators in S3OutputModule"
+        },
+        {
+            "location": "/operators/s3outputmodule/#configuration-parameters",
+            "text": "accessKey  -   String   Mandatory Parameter      Specifies the AWS access key to access Amazon S3 and has permissions to access the specified bucket.  Example value = AKIAJVAGFANC2LSZCJ4Q     secretAccessKey    -   String   Mandatory Parameter  Specifies the AWS secret access key to access Amazon S3 and has permissions to access the specified bucket.  Example value = wpVr3U82RmCKJoY007YfkaawT7CenhTcK1B8clue     endPoint   -   String   Endpoint is the URL for the entr [...]
+            "title": "Configuration Parameters"
+        },
+        {
+            "location": "/operators/s3outputmodule/#ports",
+            "text": "filesMetadataInput     -   AbstractFileSplitter.FileMetadata   Input port for files metadata.  Mandatory     blocksMetadataInput    -   BlockMetadata.FileBlockMetadata   Input port for blocks metadata.  Mandatory     blockData  -   AbstractBlockReader.ReaderRecord   Input port for blocks data.  Mandatory",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/s3outputmodule/#application-example",
+            "text": "Please refer to  Example  for S3OutputModule sample application.",
+            "title": "Application Example"
+        },
+        {
+            "location": "/operators/s3outputmodule/#partitioning",
+            "text": "Partitioning the module means that the operators in the module can be partitioned.",
+            "title": "Partitioning"
+        },
+        {
+            "location": "/operators/s3outputmodule/#stateless-partitioning",
+            "text": "Partitioning the operator in module can be achieved as follows:",
+            "title": "Stateless Partitioning"
+        },
+        {
+            "location": "/operators/s3outputmodule/#s3initiatefileuploadoperator",
+            "text": "Partition of this operator is achieved indirectly as follows:  <property>\n  <name>dt.operator.{ModuleName}#InitiateUpload.attr.PARTITIONER</name>\n  <value>com.datatorrent.common.partitioner.StatelessPartitioner:{N}</value>\n</property>       where {ModuleName} is the name of the S3OutputModule and\n      {N} is the number of static partitions.\nAbove lines will partition S3InitiateFileUploadOperator statically {N} times.",
+            "title": "S3InitiateFileUploadOperator"
+        },
+        {
+            "location": "/operators/s3outputmodule/#s3blockuploadoperator",
+            "text": "Locality of S3BlockUploadOperator with upstream operator (FSInputModule/BlockReader) must set to PARTITION_PARALLEL for performance benefits by avoiding serialization/deserialization of objects. So, partitioning of this operator depends on upstream operator which is of type FSInputModule/BlockReader.",
+            "title": "S3BlockUploadOperator"
+        },
+        {
+            "location": "/operators/s3outputmodule/#s3filemerger",
+            "text": "By setting the parameter \"mergerCount\",  S3FileMerger  be statically partitioned. This can be achieved by two ways:  (a) Following code can be added to populateDAG(DAG dag, Configuration conf) method of application to statically partitioning  S3FileMerger  {N} times:    FSInputModule inputModule = dag.addModule(\"HDFSInputModule\", new FSInputModule());\n  S3OutputModule outputModule = dag.addModule(\"S3OutputModule\", new S3OutputModule());\n  outputModule.setMerg [...]
+            "title": "S3FileMerger"
+        },
+        {
+            "location": "/operators/s3outputmodule/#dynamic-partitioning",
+            "text": "Dynamic partitioning is a feature of Apex platform which changes the number of partitions of an operator at run time.\nLocality of  S3BlockUploadOperator  with upstream operator(FSInputModule/BlockReader) must set to PARTITION_PARALLEL for performance benefits by avoiding serialization/deserialization of objects. So, dynamic partitioning of this operator depends on upstream operator which is of type FSInputModule/BlockReader.  From the example application, by setting [...]
+            "title": "Dynamic Partitioning"
+        },
+        {
+            "location": "/operators/transform/",
+            "text": "Transform - Operator Documentation\n\n\nAbout Transform operator\n\n\n\n\nTransform means mapping of field expression from input to output or conversion of fields from one type to another.\nThis operator is stateless. This operator receives objects on its input port; for each such input object, it creates a new output object whose fields are computed as expressions involving fields of the input object. \nThe types of the input and output objects are configurable as a [...]
+            "title": "Transformer"
+        },
+        {
+            "location": "/operators/transform/#transform-operator-documentation",
+            "text": "",
+            "title": "Transform - Operator Documentation"
+        },
+        {
+            "location": "/operators/transform/#about-transform-operator",
+            "text": "Transform means mapping of field expression from input to output or conversion of fields from one type to another.\nThis operator is stateless. This operator receives objects on its input port; for each such input object, it creates a new output object whose fields are computed as expressions involving fields of the input object. \nThe types of the input and output objects are configurable as are the expressions used to compute the output fields.   The operator class [...]
+            "title": "About Transform operator"
+        },
+        {
+            "location": "/operators/transform/#use-case",
+            "text": "Consider the data that needs to be transformed as per output schema.  Consider input objects with these fields:     Name  Type      FirstName  String    LastName  String    Phone  String    DateOfBirth  java.util.Date    Address  String     and output objects with fields:      Name  Type      Name  String    Phone  String    Age  Integer    Address  String     Suppose  Name  is a concatenation of  FirstName  and  LastName  and \n         Age  is computed by subtracti [...]
+            "title": "Use Case"
+        },
+        {
+            "location": "/operators/transform/#configuration-parameters",
+            "text": "expressionMap  -   Map   Mandatory Parameter  Specifies the map between the output field (key) and the expression used to compute it (value) using fields of the input Java object.     expressionFunctions  -   List   List of imported classes or methods should be made available to expression to use. It overrides the default list.  Default Value = {java.lang.Math. , org.apache.commons.lang3.StringUtils. , org.apache.commons.lang3.StringEscapeUtils. , org.apache.commons. [...]
+            "title": "Configuration Parameters"
+        },
+        {
+            "location": "/operators/transform/#configuration-example",
+            "text": "Consider input object with fields:     Name  Type      FirstName  String    LastName  String    StartDate  org.joda.time.DateTime     and output objects with fields:     Name  Type      Name  String    isLeapYear  Boolean     Note:  org.joda.time.DateTime  class is not present in the default list. So, we need to add this library to  expressionFunctions  as below in populateDAG method:  TransformOperator operator = dag.addOperator(\"transform\", new TransformOperator( [...]
+            "title": "Configuration Example"
+        },
+        {
+            "location": "/operators/transform/#ports",
+            "text": "input  -   Port for input tuples.   Mandatory input port     output     -   Port for transformed output tuples.   Mandatory output port",
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/transform/#attributes",
+            "text": "Input port Attribute - input.TUPLE_CLASS \u00a0- Fully qualified class name and class should be Kryo serializable.   Mandatory attribute  Type of input tuple.     Output port Attribute - output.TUPLE_CLASS \u00a0- Fully qualified class name and class should be Kryo serializable.   Mandatory attribute  Type of output tuple.",
+            "title": "Attributes"
+        },
+        {
+            "location": "/operators/transform/#application-example",
+            "text": "Please refer  Example  for transform sample application.",
+            "title": "Application Example"
+        },
+        {
+            "location": "/operators/transform/#partitioning",
+            "text": "Being stateless, this operator can be partitioned using any of the built-in partitioners present in the Malhar library by setting a few properties as follows:",
+            "title": "Partitioning"
+        },
+        {
+            "location": "/operators/transform/#stateless-partitioning",
+            "text": "Stateless partitioning will ensure that TransformOperator will be partitioned right at the starting of the application and will remain partitioned throughout the lifetime of the DAG.\nTransformOperator can be stateless partitioned by adding following lines to properties.xml:    <property>\n    <name>dt.operator.{OperatorName}.attr.PARTITIONER</name>\n    <value>com.datatorrent.common.partitioner.StatelessPartitioner:{N}/value>\n  </property>  where {OperatorName} is  [...]
+            "title": "Stateless partitioning"
+        },
+        {
+            "location": "/operators/transform/#dynamic-partitioning",
+            "text": "Dynamic partitioning is a feature of Apex platform which changes the partition of the operator based on certain condition.\nTransformOperator can be dynamically partitioned using the below two partitioners:",
+            "title": "Dynamic Partitioning"
+        },
+        {
+            "location": "/operators/transform/#throughput-based",
+            "text": "Following code can be added to populateDAG(DAG dag, Configuration conf) method of application to dynamically partitioning TransformOperator:  StatelessThroughputBasedPartitioner<TransformOperator> partitioner = new StatelessThroughputBasedPartitioner<>();\npartitioner.setCooldownMillis(10000);\npartitioner.setMaximumEvents(30000);\npartitioner.setMinimumEvents(10000);\ndag.setAttribute(transform, OperatorContext.STATS_LISTENERS, Arrays.asList(new StatsListener[]{part [...]
+            "title": "Throughput based"
+        },
+        {
+            "location": "/operators/windowedOperator/",
+            "text": "WINDOWED OPERATOR\n\n\nIntroduction\n\n\nThe \nWindowedOperator\n is an operator in the Apex Malhar Library that supports the windowing semantics outlined by Apache Beam, including the notions of watermarks, triggers, accumulation modes, and allowed lateness. It currently supports event time windows, sliding event time windows, session windows, and global window. The reader of this document is encouraged to read this \nblog\n for the basic concepts of streaming appli [...]
+            "title": "Windowed Operator"
+        },
+        {
+            "location": "/operators/windowedOperator/#windowed-operator",
+            "text": "",
+            "title": "WINDOWED OPERATOR"
+        },
+        {
+            "location": "/operators/windowedOperator/#introduction",
+            "text": "The  WindowedOperator  is an operator in the Apex Malhar Library that supports the windowing semantics outlined by Apache Beam, including the notions of watermarks, triggers, accumulation modes, and allowed lateness. It currently supports event time windows, sliding event time windows, session windows, and global window. The reader of this document is encouraged to read this  blog  for the basic concepts of streaming applications, and this  blog  for Apache Beam's wi [...]
+            "title": "Introduction"
+        },
+        {
+            "location": "/operators/windowedOperator/#operator-overview",
+            "text": "In this document, we will explore the following features in the WindowedOperator.   Keyed or Not Keyed  Window Option  Timestamp Extractor  Watermarks  Allowed Lateness  Accumulation  Triggers  Accumulation Mode  Window Propagation  Merging two streams",
+            "title": "Operator Overview"
+        },
+        {
+            "location": "/operators/windowedOperator/#keyed-or-not-keyed",
+            "text": "One of the first things the user of the operator has to decide is whether the operator is keyed ( KeyedWindowedOperatorImpl ) or not keyed ( WindowedOperatorImpl ). State storage, accumulation and triggers behave differently based on whether or not the operator is keyed.  Here are examples of creating a windowed operator.  Non-keyed:  // Creating a non-keyed windowed operator\nWindowedOperatorImpl<InputType, AccumulationType, OutputType> windowedOperator = new Window [...]
+            "title": "Keyed or Not Keyed"
+        },
+        {
+            "location": "/operators/windowedOperator/#window-option",
+            "text": "Each incoming tuple of the WindowedOperator is assigned to one or more windows. The  WindowOption  provides the way to specify what constitutes a window. The following  WindowOption s are supported.",
+            "title": "Window Option"
+        },
+        {
+            "location": "/operators/windowedOperator/#globalwindow",
+            "text": "There is only one window for the entire duration of the application. All tuples are assigned to this one window.  // Setting the global window option\nwindowedOperator.setWindowOption(new WindowOption.GlobalWindow());",
+            "title": "GlobalWindow"
+        },
+        {
+            "location": "/operators/windowedOperator/#timewindows",
+            "text": "A tuple is assigned to exactly one window based on event time, and each window has a fixed duration. One window is followed immediately by another window, and they do not overlap. As a result, one timestamp can only belong to one window.     // Setting a time window option with a duration of 10 minutes\nwindowedOperator.setWindowOption(new WindowOption.TimeWindows(Duration.standardMinutes(10)));",
+            "title": "TimeWindows"
+        },
+        {
+            "location": "/operators/windowedOperator/#slidingtimewindows",
+            "text": "Similar to  TimeWindow , each window has a fixed duration. But it takes an additional duration parameter  slideBy  which must be smaller than the window duration and the window duration must be divisible by the  slideBy  duration. Each window overlaps with multiple windows. In this case, since one timestamp belongs to multiple windows, a tuple is assigned to multiple windows. The number of windows a tuple belongs to is exactly the window duration divided by the  slid [...]
+            "title": "SlidingTimeWindows"
+        },
+        {
+            "location": "/operators/windowedOperator/#sessionwindows",
+            "text": "SessionWindow s have variable durations and are based on the key of the tuple. Each tuple is assigned to exactly one window. It takes a duration parameter  minGap , which specifies the minimum time gap between two tuples that belong to two  different   SessionWindows  of the same key.  minGap  is also the duration of the \"proto-session\" window for a single timestamp, and it is the minimum duration of any session window.  // Setting a session window option with a mi [...]
+            "title": "SessionWindows"
+        },
+        {
+            "location": "/operators/windowedOperator/#the-proto-session-window-can-be-fit-into-an-existing-sessionwindow-of-the-same-key-without-change",
+            "text": "The new tuple is simply applied to the state of the existing  SessionWindow .",
+            "title": "The proto-session window can be fit into an existing SessionWindow of the same key without change"
+        },
+        {
+            "location": "/operators/windowedOperator/#the-proto-session-window-overlaps-with-two-existing-session-windows-of-the-same-key",
+            "text": "A new Session Window is created with the merged state of the two existing  SessionWindow s, plus the new tuple. The two existing  SessionWindow s will be deleted and retraction triggers for the two deleted windows will be fired. (Please see  here  for details on  Trigger s)",
+            "title": "The proto-session window overlaps with two existing session windows of the same key"
+        },
+        {
+            "location": "/operators/windowedOperator/#the-proto-session-window-overlaps-with-one-existing-session-window-of-the-same-key",
+            "text": "A new  SessionWindow  is created with the state of the existing  SessionWindow , plus the new tuple, with a longer duration than the existing  SessionWindow  and possibly an earlier begin timestamp to cover the new tuple. The existing  SessionWindow  will be deleted and a retraction trigger for the old window will be fired.",
+            "title": "The proto-session window overlaps with one existing session window of the same key"
+        },
+        {
+            "location": "/operators/windowedOperator/#all-of-the-above-checks-return-false",
+            "text": "The proto-session window is in effect and the new tuple is assigned to that window.",
+            "title": "All of the above checks return false"
+        },
+        {
+            "location": "/operators/windowedOperator/#timestamp-extractor",
+            "text": "The  WindowedOperator  expects a timestamp extractor. This is for  WindowedOperator  to extract the timestamp from the tuple for window assignment.  // Setting a time extractor\nwindowedOperator.setTimestampExtractor(new Function<InputTupleType, Long>()\n{\n  @Override\n  public Long apply(InputTupleType tuple)\n  {\n    return tuple.timestamp;\n  }\n});",
+            "title": "Timestamp Extractor"
+        },
+        {
+            "location": "/operators/windowedOperator/#watermarks",
+            "text": "Watermarks are control tuples that include a timestamp. A watermark tells  WindowedOperator  that all windows that lie completely before the given timestamp are considered late, and the rest of the windows are considered early.",
+            "title": "Watermarks"
+        },
+        {
+            "location": "/operators/windowedOperator/#fixed-watermark",
+            "text": "If watermarks are not available from upstream, the user of the WindowedOperator can set a fixed watermark. The fixed watermark represents the number of milliseconds before the timestamp derived from the Apex streaming window ID. Note that the Apex streaming window ID is an implicit timestamp that more or less represents the ingression time of the tuple.  // Setting a fixed watermark that is 10 seconds behind the ingression time\nwindowedOperator.setFixedWatermark(10000);",
+            "title": "Fixed Watermark"
+        },
+        {
+            "location": "/operators/windowedOperator/#allowed-lateness",
+            "text": "Allowed Lateness specifies the lateness horizon from the watermark. If a tuple has a timestamp that lies beyond the lateness horizon, it is dropped by the  WindowedOperator . Also, if a window completely lies beyond the lateness horizon as a result of the arrival of a new watermark, the window along with its state is purged from  WindowedOperator .     // Setting allowed lateness to be one hour\nwindowedOperator.setAllowedLateness(Duration.standardHours(1));",
+            "title": "Allowed Lateness"
+        },
+        {
+            "location": "/operators/windowedOperator/#accumulation",
+            "text": "The Accumulation object tells the  WindowedOperator  how the operator state is accumulated. It tells the  WindowedOperator  what to do with its state upon arrival of an incoming tuple. This is where the business logic goes. Please refer to the interface definition  here  in github. For non-keyed WindowedOperator, the state is per window. For keyed WindowedOperator, the state is per key per window.  // Setting the accumulation to be the sum for longs, assuming both th [...]
+            "title": "Accumulation"
+        },
+        {
+            "location": "/operators/windowedOperator/#triggers",
+            "text": "Triggers are tuples emitted to downstream by the  WindowedOperator . The data in the tuples are based on the state of  WindowedOperator  governed by the Accumulation object. There are two types of trigger: time-based triggers and count-based triggers. Time-based triggers are triggers that get fired in a regular time interval, and count-based triggers are triggers that get fired based on the number of tuples received. The user of WindowedOperator can specify different [...]
+            "title": "Triggers"
+        },
+        {
+            "location": "/operators/windowedOperator/#accumulation-mode",
+            "text": "Accumulation Mode tells the operator what to do with the state of the window when a trigger is fired.\nThere are three supported accumulation mode:  ACCUMULATING ,  DISCARDING , and  ACCUMULATING_AND_DISCARDING .   ACCUMULATING : The state of the window is preserved until purged  DISCARDING : The state of the window is discarded after firing of a trigger  ACCUMULATING_AND_RETRACTING : The state of the window is preserved until purged, but if the state has changed upo [...]
+            "title": "Accumulation Mode"
+        },
+        {
+            "location": "/operators/windowedOperator/#window-propagation",
+            "text": "It is possible to chain multiple instances of  WindowedOperator  and have only the most upstream instance assign the windows and have all downstream instances inherit the same windows of the triggers from the upstream instance. If WindowOption is  null  (i.e.  setWindowOption  is not called), the  WindowedOperator  assumes that the incoming tuples are  WindowedTuple s that contain the information of the window assignment for each tuple.",
+            "title": "Window Propagation"
+        },
+        {
+            "location": "/operators/windowedOperator/#state-storage",
+            "text": "One of the most important elements of the  WindowedOperator  is the state storage. Each window in the operator (or each window/key pair if the operator is keyed) has its own state and how the state is stored and checkpointed is likely to be the most important factor for performance.  The  WindowedOperator  currently supports two different state storage mechanisms.  In-Memory Windowed Storage  stores the operator state only in memory and the entire state is copied to  [...]
+            "title": "State Storage"
+        },
+        {
+            "location": "/operators/windowedOperator/#merging-two-streams",
+            "text": "The  WindowedMergeOperator  is a  WindowedOperator  that takes two incoming data streams. It takes a  MergeAccumulation  instead of a regular Accumulation. The user of this operator can implement their custom merge or join accumulation based on their business logic. Examples of this type of accumulation are  InnerJoin  and  Cogroup .  The  WindowedMergeOperator  has its own watermark. Its watermark timestamp is the earlier watermark timestamp between the two input st [...]
+            "title": "Merging two streams"
+        },
+        {
+            "location": "/operators/windowedOperator/#usage-examples",
+            "text": "For an example usage of the  WindowedOperator  via the High level API, click  here .  For an example usage of the  WindowedOperator  via the DAG level API, click  here .",
+            "title": "Usage Examples"
+        },
+        {
+            "location": "/operators/windowedOperator/#advanced-topics",
+            "text": "",
+            "title": "Advanced Topics"
+        },
+        {
+            "location": "/operators/windowedOperator/#idempotency-considerations",
+            "text": "For the  WindowedOperator  to be  idempotent , both data tuples and watermarks must be emitted deterministically. i.e. When replaying from a checkpoint, the same tuples and watermarks must be emitted in the same Apex streaming window as before the failure happens.  In order to achieve this, the  WindowedOperator  has the following behavior:    The time-based triggers are fired based on the implicit timestamp from the Apex streaming windows, not based on the wall cloc [...]
+            "title": "Idempotency Considerations"
+        },
+        {
+            "location": "/operators/xmlParserOperator/",
+            "text": "Xml Parser\n\n\nOperator Objective\n\n\nThe XmlParser operator parses XML records and constructs POJOs (\"Plain Old Java Objects\") from them. The operator also emits each record as a DOM Document if the relevant output port is connected. User can also provide a XSD (XML Schema Definition) to validate incoming XML records. Valid records will be emitted as POJOs / DOM Document while invalid ones are emitted on error port with an error message if the error port is conn [...]
+            "title": "XML Parser"
+        },
+        {
+            "location": "/operators/xmlParserOperator/#xml-parser",
+            "text": "",
+            "title": "Xml Parser"
+        },
+        {
+            "location": "/operators/xmlParserOperator/#operator-objective",
+            "text": "The XmlParser operator parses XML records and constructs POJOs (\"Plain Old Java Objects\") from them. The operator also emits each record as a DOM Document if the relevant output port is connected. User can also provide a XSD (XML Schema Definition) to validate incoming XML records. Valid records will be emitted as POJOs / DOM Document while invalid ones are emitted on error port with an error message if the error port is connected.  XmlParser is  idempotent ,  faul [...]
+            "title": "Operator Objective"
+        },
+        {
+            "location": "/operators/xmlParserOperator/#class-diagram",
+            "text": "",
+            "title": "Class Diagram"
+        },
+        {
+            "location": "/operators/xmlParserOperator/#operator-information",
+            "text": "Operator location:  malhar-library  Available since:  3.2.0  Operator state:  Evolving  Java Package:  com.datatorrent.lib.parser.XmlParser",
+            "title": "Operator Information"
+        },
+        {
+            "location": "/operators/xmlParserOperator/#properties-attributes-and-ports",
+            "text": "",
+            "title": "Properties, Attributes and Ports"
+        },
+        {
+            "location": "/operators/xmlParserOperator/#platform-attributes-that-influence-operator-behavior",
+            "text": "Attribute  Description  Type  Mandatory      out.TUPLE_CLASS  TUPLE_CLASS attribute on output port which tells operator the class of POJO which needs to be emitted. The name of the field members of the class must match with the names in incoming POJO. The operator ignores unknown properties i.e. fields present in POJO but not in TUPLE_CLASS or vice versa.  Class or FQCN  Yes",
+            "title": "Platform Attributes that influence operator behavior"
+        },
+        {
+            "location": "/operators/xmlParserOperator/#ports",
+            "text": "Port  Description  Type  Mandatory      in  Tuples that needs to be parsed are received on this port  byte[]  Yes    out  Valid Tuples that are emitted as pojo. Tuples are converted to POJO only if the port is connected.  Object (POJO)  No    parsedOutput  Valid Tuples that are emitted as DOM Document. Tuples are converted to DOM Document only if the port is connected.  DOM Document  No    err  Invalid Tuples are emitted with error message. Invalid tuples are discard [...]
+            "title": "Ports"
+        },
+        {
+            "location": "/operators/xmlParserOperator/#partitioning",
+            "text": "XML Parser is both statically and dynamically partitionable.",
+            "title": "Partitioning"
+        },
+        {
+            "location": "/operators/xmlParserOperator/#static-partitioning",
+            "text": "This can be achieved in 2 ways   Specifying the partitioner and number of partitions in the 'populateDAG()' method.   XmlParser xmlParser = dag.addOperator(\"xmlParser\", XmlParser.class);\nStatelessPartitioner<XmlParser> partitioner1 = new StatelessPartitioner<XmlParser>(2);\ndag.setAttribute(xmlParser, Context.OperatorContext.PARTITIONER, partitioner1 );   Specifying the partitioner and number of partitions in properties file.    <property>\n   <name>dt.operator.{O [...]
+            "title": "Static Partitioning"
+        },
+        {
+            "location": "/operators/xmlParserOperator/#dynamic-partitioning",
+            "text": "XmlParser can be dynamically partitioned using an out-of-the-box partitioner:",
+            "title": "Dynamic Partitioning"
+        },
+        {
+            "location": "/operators/xmlParserOperator/#throughput-based",
+            "text": "Following code can be added to 'populateDAG' method of application to dynamically partition XmlParser:  XmlParser xmlParser = dag.addOperator(\"xmlParser\", XmlParser.class);\nStatelessThroughputBasedPartitioner<XmlParser> partitioner = new StatelessThroughputBasedPartitioner<>();\npartitioner.setCooldownMillis(conf.getLong(\"dt.cooldown\", 10000));\npartitioner.setMaximumEvents(conf.getLong(\"dt.maxThroughput\", 30000));\npartitioner.setMinimumEvents(conf.getLong(\" [...]
+            "title": "Throughput based"
+        },
+        {
+            "location": "/operators/xmlParserOperator/#example",
+            "text": "Example for Xml Parser can be found at:  https://github.com/DataTorrent/examples/tree/master/tutorials/parser",
+            "title": "Example"
+        }
+    ]
+}
\ No newline at end of file
diff --git a/content/docs/malhar-3.8/operators/AbstractJdbcTransactionableOutputOperator/index.html b/content/docs/malhar-3.8/operators/AbstractJdbcTransactionableOutputOperator/index.html
new file mode 100644
index 0000000..a130982
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/AbstractJdbcTransactionableOutputOperator/index.html
@@ -0,0 +1,594 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>Jdbc Output Operator - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "Jdbc Output Operator";
+    var mkdocs_page_input_path = "operators/AbstractJdbcTransactionableOutputOperator.md";
+    var mkdocs_page_url = "/operators/AbstractJdbcTransactionableOutputOperator/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">Jdbc Output Operator</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#jdbc-transactional-pojo-output-operator">JDBC Transactional POJO Output Operator</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-objective">Operator Objective</a></li>
+                
+                    <li><a class="toctree-l4" href="#overview">Overview</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-information">Operator Information</a></li>
+                
+                    <li><a class="toctree-l4" href="#how-to-use">How to Use?</a></li>
+                
+                    <li><a class="toctree-l4" href="#abstract-methods">Abstract Methods</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#abstractjdbcpojooutputoperator">AbstractJdbcPOJOOutputOperator</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#platform-attributes-that-influence-operator-behavior">Platform Attributes that influence operator behavior</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#features">Features</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#partitioning-of-jdbc-output-operator">Partitioning of JDBC Output Operator</a></li>
+                
+                    <li><a class="toctree-l4" href="#static-partitioning">Static Partitioning</a></li>
+                
+                    <li><a class="toctree-l4" href="#dynamic-partitioning">Dynamic Partitioning</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#example">Example</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>Jdbc Output Operator</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="jdbc-transactional-pojo-output-operator">JDBC Transactional POJO Output Operator</h1>
+<h2 id="operator-objective">Operator Objective</h2>
+<p>This operator receives an input stream of POJOs and inserts them as rows in a database table in a fault-tolerant way.</p>
+<h2 id="overview">Overview</h2>
+<p>The main features of this operator (<code>AbstractJdbcTransactionableOutputOperator</code>) are persisting data to the database table and fault tolerance. This operator creates a transaction at the start of each window, executes batches of SQL updates, and closes the transaction at the end of the window. Each tuple corresponds to an SQL update statement. The operator groups the updates in a batch and submits them with one call to the database. Batch processing improves performance con [...]
+<p>An (indirect) base class for this operator is <code>AbstractPassThruTransactionableStoreOutputOperator</code> which implements a pass-through output adapter for a transactional store; it guarantees exactly-once semantics. "Pass-through" means it does not wait for end window to write to the store. It will begin transaction at <code>beginWindow</code> and write to the store as the tuples come and commit the transaction at <code>endWindow</code>.</p>
+<p>The overall heirarchy is described in the the following diagram:</p>
+<p><img alt="JdbcPOJOInsertOutputOperator.png" src="../images/jdbcoutput/operatorsClassDiagrams.png" /></p>
+<p><code>AbstractTransactionableStoreOutputOperator</code>: A skeleton implementation of an output operator that writes to a transactional store; the tuple type and store type are generic parameters. Defines an input port whose process method invokes the processTuple() abstract method. Exactly-once semantics are not guaranteed and must be provided by subclasses if needed.</p>
+<p><code>AbstractPassThruTransactionableStoreOutputOperator</code>: Simple extension of the above base class which adds exactly-once semantics by starting a transaction in <code>beginWindow()</code> and committing it in <code>endWindow()</code>.</p>
+<p><code>AbstractJdbcTransactionableOutputOperator</code>: (focus of this document) Adds support for JDBC by using an instance of JdbcTransactionalStore as the store. Also adds support for processing tuples in batches and provides an implementation of the <code>proessTuple()</code> abstract method mentioned above.</p>
+<p><code>AbstractJdbcPOJOOutputOperator</code>: Serves as base class for inserting rows in a table using a JDBC store.</p>
+<p><strong>Note</strong>: For enforcing exactly once semantics a table named <code>dt_meta</code> must exist in the database. The sample SQL to create the same is as follows</p>
+<pre><code>&quot;CREATE TABLE IF NOT EXISTS dt_meta (dt_app_id VARCHAR(100) NOT NULL, dt_operator_id INT NOT NULL, dt_window BIGINT NOT NULL, UNIQUE(dt_app_id,dt_operator_id,dt_window))&quot;.
+</code></pre>
+
+<p><strong>Note</strong>: Additionally this operator assumes that the underlying database/table in which records are to be added supports transactions. If the database/table does not support transactions then a tuple may be inserted in a table more than once in case of auto recovery from a failure (violation of exactly once semantics).</p>
+<h2 id="operator-information">Operator Information</h2>
+<ol>
+<li>Operator location: <strong><em>malhar-library</em></strong></li>
+<li>Available since: <strong><em>0.9.4</em></strong></li>
+<li>Java Packages:<ul>
+<li>Operator: <strong><em><a href="https://www.datatorrent.com/docs/apidocs/com/datatorrent/lib/db/jdbc/AbstractJdbcTransactionableOutputOperator.html">com.datatorrent.lib.db.jdbc.AbstractJdbcTransactionableOutputOperator</a></em></strong></li>
+</ul>
+</li>
+</ol>
+<h2 id="how-to-use">How to Use?</h2>
+<p>Concrete subclasses need to implement a couple of abstract methods (if not using AbstractJdbcPOJOOutputOperator): <code>setStatementParameters(PreparedStatement statement, T tuple)</code> to set the parameter of the insert/update statement (which is a PreparedStatement) with values from the tuple and <code>getUpdateCommand()</code> to return the SQL statement to update a tuple in the database.  Note that subclasses of AbstractJdbcPOJOOutputOperator need not define these methods since  [...]
+<p>Several properties are available to configure the behavior of this operator and they are summarized in the table below.</p>
+<h3 id="properties-of-abstractjdbctransactionableoutputoperator"><a name="AbstractJdbcTransactionableOutputOperatorProps"></a>Properties of AbstractJdbcTransactionableOutputOperator</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>batchSize</em></td>
+<td>Maximum number of tuples to insert in a single call (see explanation above).</td>
+<td>int</td>
+<td>No</td>
+<td>1000</td>
+</tr>
+</tbody>
+</table>
+<h4 id="properties-of-jdbc-store"><a name="JdbcTransactionalStore"></a>Properties of JDBC Store</h4>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>databaseDriver</em></td>
+<td>JDBC Driver class for connection to JDBC Store. This driver should be present in the class path</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>databaseUrl</em></td>
+<td><a href="http://www.roseindia.net/tutorial/java/jdbc/databaseurl.html">"Database URL"</a> of the form jdbc:subprotocol:subname</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>userName</em></td>
+<td>Name of the user configured in the database</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>password</em></td>
+<td>Password of the user configured in the database</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+</tbody>
+</table>
+<p>Those attributes can be set like this:</p>
+<pre><code class="xml">&lt;property&gt;
+  &lt;name&gt;dt.operator.{OperatorName}.prop.batchSize&lt;/name&gt;
+  &lt;value&gt;500&lt;/value&gt;
+&lt;/property&gt;
+
+&lt;property&gt;
+  &lt;name&gt;dt.operator.{OperatorName}.prop.store.databaseDriver&lt;/name&gt;
+  &lt;value&gt;com.mysql.jdbc.Driver&lt;/value&gt;
+&lt;/property&gt;
+
+&lt;property&gt;
+  &lt;name&gt;dt.operator.{OperatorName}.prop.store.databaseUrl&lt;/name&gt;
+  &lt;value&gt;jdbc:mysql://localhost:3306/mydb&lt;/value&gt;
+&lt;/property&gt;
+
+&lt;property&gt;
+  &lt;name&gt;dt.operator.{OperatorName}.prop.store.userName&lt;/name&gt;
+  &lt;value&gt;myuser&lt;/value&gt;
+&lt;/property&gt;
+
+&lt;property&gt;
+  &lt;name&gt;dt.operator.{OperatorName}.prop.store.password&lt;/name&gt;
+  &lt;value&gt;mypassword&lt;/value&gt;
+&lt;/property&gt;
+</code></pre>
+
+<h3 id="abstract-methods">Abstract Methods</h3>
+<p>These methods are defined as abstract in AbstractJdbcTransactionableOutputOperator <code>void setStatementParameters(PreparedStatement statement, T tuple)</code>:Sets the parameter of the insert/update statement with values from the tuple.
+<code>String getUpdateCommand()</code>:Gets the statement which insert/update the table in the database.</p>
+<h2 id="abstractjdbcpojooutputoperator">AbstractJdbcPOJOOutputOperator</h2>
+<p>This is the abstract implementation extending the functionality of AbstractJdbcTransactionableOutputOperator that serves as base class for inserting rows in a table using a JDBC store. It has the definition for the abstract methods in AbstractJdbcTransactionableOutputOperator. It can be further extended to modify functionality or add new capabilities. This class has an input port to recieve the records in the form of tuples, so concrete subclasses won't need to provide the same, and p [...]
+<h3 id="properties-of-abstractjdbcpojooutputoperator"><a name="AbstractJdbcPOJOOutputOperatorProps"></a>Properties of AbstractJdbcPOJOOutputOperator</h3>
+<p>Several properties are available to configure the behavior of this operator and they are summarized in the table below.</p>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>tablename</em></td>
+<td>Name of the table where data is to be inserted</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>fieldInfos</em></td>
+<td>JdbcFieldInfo maps a store column to a POJO field name</td>
+<td>List</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+</tbody>
+</table>
+<p>Those attributes can be set like this:</p>
+<pre><code class="xml">&lt;property&gt;
+  &lt;name&gt;dt.operator.{OperatorName}.prop.tablename&lt;/name&gt;
+  &lt;value&gt;ResultTable&lt;/value&gt;
+&lt;/property&gt;
+
+&lt;property&gt;
+  &lt;name&gt;dt.operator.{OperatorName}.fieldInfosItem[0]&lt;/name&gt;
+  &lt;value&gt;
+  {
+    &quot;sqlType&quot;: 0,
+    &quot;columnName&quot;:&quot;ID&quot;,
+    &quot;pojoFieldExpression&quot;: &quot;id&quot;,
+    &quot;type&quot;:&quot;INTEGER&quot;
+  }
+  &lt;/value&gt;
+&lt;/property&gt;
+
+&lt;property&gt;
+  &lt;name&gt;dt.operator.{OperatorName}.fieldInfosItem[1]&lt;/name&gt;
+  &lt;value&gt;
+  {
+    &quot;sqlType&quot;: 4,
+    &quot;columnName&quot;:&quot;NAME&quot;,
+    &quot;pojoFieldExpression&quot;: &quot;name&quot;,
+    &quot;type&quot;:&quot;STRING&quot;
+  }
+  &lt;/value&gt;
+&lt;/property&gt;
+</code></pre>
+
+<h2 id="platform-attributes-that-influence-operator-behavior">Platform Attributes that influence operator behavior</h2>
+<table>
+<thead>
+<tr>
+<th><strong>Attribute</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>TUPLE_CLASS</em></td>
+<td>TUPLE_CLASS attribute on input port which tells operator the class of POJO which is being received</td>
+<td>Class</td>
+<td>Yes</td>
+</tr>
+</tbody>
+</table>
+<p>Those attributes can be set like this:</p>
+<pre><code class="xml">&lt;property&gt;
+  &lt;name&gt;dt.operator.{OperatorName}.port.input.attr.TUPLE_CLASS&lt;/name&gt;    
+  &lt;value&gt;com.example.mydtapp.PojoEvent&lt;/value&gt;
+&lt;/property&gt;
+</code></pre>
+
+<p>A concrete implementation is provided in Malhar as <a href="https://github.com/apache/apex-malhar/blob/master/library/src/main/java/com/datatorrent/lib/db/jdbc/JdbcPOJOInsertOutputOperator.java">JdbcPOJOInsertOutputOperator</a>.  The incoming tuples will be inserted in the table using PreparedStatement of the base class,  which is formed in <code>activate()</code> method of this operator.</p>
+<h2 id="features">Features</h2>
+<p>The operator is <strong>idempotent</strong>, <strong>fault-tolerant</strong> and <strong>statically partitionable</strong>.</p>
+<h2 id="partitioning-of-jdbc-output-operator">Partitioning of JDBC Output Operator</h2>
+<h4 id="static-partitioning">Static Partitioning</h4>
+<p>Only static partitioning is supported for this operator.</p>
+<p>Static partitioning can be achieved by specifying the partitioner and number of partitions in the populateDAG() method</p>
+<pre><code class="java">  JdbcPOJOInsertOutputOperator jdbcPOJOInsertOutputOperator = dag.addOperator(&quot;jdbcPOJOInsertOutputOperator&quot;, JdbcPOJOInsertOutputOperator.class);
+  StatelessPartitioner&lt;JdbcPOJOInsertOutputOperator&gt; partitioner1 = new StatelessPartitioner&lt;JdbcPOJOInsertOutputOperator&gt;(2);
+  dag.setAttribute(jdbcPOJOInsertOutputOperator, Context.OperatorContext.PARTITIONER, partitioner1);
+</code></pre>
+
+<p>Static partitioning can also be achieved by specifying the partitioner in properties file.</p>
+<pre><code class="xml">  &lt;property&gt;
+    &lt;name&gt;dt.operator.{OperatorName}.attr.PARTITIONER&lt;/name&gt;
+    &lt;value&gt;com.datatorrent.common.partitioner.StatelessPartitioner:2&lt;/value&gt;
+  &lt;/property&gt;
+</code></pre>
+
+<p>where {OperatorName} is the name of the JdbcPOJOInsertOutputOperator operator.
+Above lines will partition JdbcPOJOInsertOutputOperator statically 2 times. Above value can be changed accordingly to change the number of static partitions.</p>
+<h4 id="dynamic-partitioning">Dynamic Partitioning</h4>
+<p>Not supported.</p>
+<h2 id="example">Example</h2>
+<p>An example application using this operator can be found <a href="https://github.com/DataTorrent/examples/tree/master/tutorials/fileToJdbc">here</a>. This example shows how to read files from HDFS, parse into POJOs and then insert into a table in MySQL.</p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../jdbcPollInputOperator/" class="btn btn-neutral float-right" title="JDBC Poller Input">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../ftpInputOperator/" class="btn btn-neutral" title="FTP Input Operator"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../ftpInputOperator/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../jdbcPollInputOperator/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/block_reader/index.html b/content/docs/malhar-3.8/operators/block_reader/index.html
new file mode 100644
index 0000000..739fdbf
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/block_reader/index.html
@@ -0,0 +1,541 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>Block Reader - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "Block Reader";
+    var mkdocs_page_input_path = "operators/block_reader.md";
+    var mkdocs_page_url = "/operators/block_reader/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">Block Reader</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#block-reader">Block Reader</a></li>
+                
+                    <li><a class="toctree-l4" href="#why-is-it-needed">Why is it needed?</a></li>
+                
+                    <li><a class="toctree-l4" href="#class-diagram">Class Diagram</a></li>
+                
+                    <li><a class="toctree-l4" href="#abstractblockreader">AbstractBlockReader</a></li>
+                
+                    <li><a class="toctree-l4" href="#example-application">Example Application</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#abstractfsreadaheadlinereader">AbstractFSReadAheadLineReader</a></li>
+                
+                    <li><a class="toctree-l4" href="#readaheadlinereadercontext">ReadAheadLineReaderContext</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#abstractfslinereader">AbstractFSLineReader</a></li>
+                
+                    <li><a class="toctree-l4" href="#linereadercontext">LineReaderContext</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#fsslicereader">FSSliceReader</a></li>
+                
+                    <li><a class="toctree-l4" href="#fixedbytesreadercontext">FixedBytesReaderContext</a></li>
+                
+                    <li><a class="toctree-l4" href="#configuration_1">Configuration</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#partitioner-and-statslistener">Partitioner and StatsListener</a></li>
+                
+                    <li><a class="toctree-l4" href="#processstats">processStats </a></li>
+                
+                    <li><a class="toctree-l4" href="#definepartitions">definePartitions</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>Block Reader</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="block-reader">Block Reader</h1>
+<p>This is a scalable operator that reads and parses blocks of data sources into records. A data source can be a file or a message bus that contains records and a block defines a chunk of data in the source by specifying the block offset and the length of the source belonging to the block. </p>
+<h2 id="why-is-it-needed">Why is it needed?</h2>
+<p>A Block Reader is needed to parallelize reading and parsing of a single data source, for example a file. Simple parallelism of reading data sources can be achieved by multiple partitions reading different source of same type (for files see <a href="https://github.com/apache/incubator-apex-malhar/blob/master/library/src/main/java/com/datatorrent/lib/io/fs/AbstractFileInputOperator.java">AbstractFileInputOperator</a>) but Block Reader partitions can read blocks of same source in paralle [...]
+<h2 id="class-diagram">Class Diagram</h2>
+<p><img alt="BlockReader class diagram" src="../images/blockreader/classdiagram.png" /></p>
+<h2 id="abstractblockreader">AbstractBlockReader</h2>
+<p>This is the abstract implementation that serves as the base for different types of data sources. It defines how a block metadata is processed. The flow diagram below describes the processing of a block metadata.</p>
+<p><img alt="BlockReader flow diagram" src="../images/blockreader/flowdiagram.png" /></p>
+<h3 id="ports">Ports</h3>
+<ul>
+<li>
+<p>blocksMetadataInput: input port on which block metadata are received.</p>
+</li>
+<li>
+<p>blocksMetadataOutput: output port on which block metadata are emitted if the port is connected. This port is useful when a downstream operator that receives records from block reader may also be interested to know the details of the corresponding blocks.</p>
+</li>
+<li>
+<p>messages: output port on which tuples of type <code>com.datatorrent.lib.io.block.AbstractBlockReader.ReaderRecord</code> are emitted. This class encapsulates a <code>record</code> and the <code>blockId</code> of the corresponding block.</p>
+</li>
+</ul>
+<h3 id="readercontext">readerContext</h3>
+<p>This is one of the most important fields in the block reader. It is of type <code>com.datatorrent.lib.io.block.ReaderContext</code> and is responsible for fetching bytes that make a record. It also lets the reader know how many total bytes were consumed which may not be equal to the total bytes in a record because consumed bytes also include bytes for the record delimiter which may not be a part of the actual record.</p>
+<p>Once the reader creates an input stream for the block (or uses the previous opened stream if the current block is successor of the previous block) it initializes the reader context by invoking <code>readerContext.initialize(stream, blockMetadata, consecutiveBlock);</code>. Initialize method is where any implementation of <code>ReaderContext</code> can perform all the operations which have to be executed just before reading the block or create states which are used during the lifetime  [...]
+<p>Once the initialization is done, <code>readerContext.next()</code> is called repeatedly until it returns <code>null</code>. It is left to the <code>ReaderContext</code> implementations to decide when a block is completely processed. In cases when a record is split across adjacent blocks, reader context may decide to read ahead of the current block boundary to completely fetch the split record (examples- <code>LineReaderContext</code> and <code>ReadAheadLineReaderContext</code>). In ot [...]
+<h3 id="abstract-methods">Abstract methods</h3>
+<ul>
+<li>
+<p><code>STREAM setupStream(B block)</code>: creating a stream for a block is dependent on the type of source which is not known to AbstractBlockReader. Sub-classes which deal with a specific data source provide this implementation.</p>
+</li>
+<li>
+<p><code>R convertToRecord(byte[] bytes)</code><a name="convertToRecord"></a>: this converts the array of bytes into the actual instance of record type.</p>
+</li>
+</ul>
+<h3 id="auto-scalability">Auto-scalability</h3>
+<p>Block reader can auto-scale, that is, depending on the backlog (total number of all the blocks which are waiting in the <code>blocksMetadataInput</code> port queue of all partitions) it can create more partitions or reduce them. Details are discussed in the last section which covers the <a href="#partitioning">partitioner and stats-listener</a>.</p>
+<h3 id="configuration">Configuration</h3>
+<ol>
+<li><a name="maxReaders"></a><strong>maxReaders</strong>: when auto-scaling is enabled, this controls the maximum number of block reader partitions that can be created.</li>
+<li><a name="minReaders"></a><strong>minReaders</strong>: when auto-scaling is enabled, this controls the minimum number of block reader partitions that should always exist.</li>
+<li><a name="collectStats"></a><strong>collectStats</strong>: this enables or disables auto-scaling. When it is set to <code>true</code> the stats (number of blocks in the queue) are collected and this triggers partitioning; otherwise auto-scaling is disabled.</li>
+<li><strong>intervalMillis</strong>: when auto-scaling is enabled, this specifies the interval at which the reader will trigger the logic of computing the backlog and auto-scale.</li>
+</ol>
+<h2 id="abstractfsblockreader"><a name="AbstractFSBlockReader"></a> AbstractFSBlockReader</h2>
+<p>This abstract implementation deals with files. Different types of file systems that are implementations of <code>org.apache.hadoop.fs.FileSystem</code> are supported. The user can override <code>getFSInstance()</code> method to create an instance of a specific <code>FileSystem</code>. By default, filesystem instance is created from the filesytem URI that comes from the default hadoop configuration.</p>
+<pre><code class="java">protected FileSystem getFSInstance() throws IOException
+{
+  return FileSystem.newInstance(configuration);
+}
+</code></pre>
+
+<p>It uses this filesystem instance to setup a stream of type <code>org.apache.hadoop.fs.FSDataInputStream</code> to read the block.</p>
+<pre><code class="java">@Override
+protected FSDataInputStream setupStream(BlockMetadata.FileBlockMetadata block) throws IOException
+{
+  return fs.open(new Path(block.getFilePath()));
+}
+</code></pre>
+
+<p>All the ports and configurations are derived from the super class. It doesn't provide an implementation of <a href="#convertToRecord"><code>convertToRecord(byte[] bytes)</code></a> method which is delegated to concrete sub-classes.</p>
+<h3 id="example-application">Example Application</h3>
+<p>This simple dag demonstrates how any concrete implementation of <code>AbstractFSBlockReader</code> can be plugged into an application. </p>
+<p><img alt="Application with FSBlockReader" src="../images/blockreader/fsreaderexample.png" /></p>
+<p>In the above application, file splitter creates block metadata for files which are sent to block reader. Partitions of the block reader parses the file blocks for records which are filtered, transformed and then persisted to a file (created per block). Therefore block reader is parallel partitioned with the 2 downstream operators - filter/converter and record output operator. The code which implements this dag is below.</p>
+<pre><code class="java">public class ExampleApplication implements StreamingApplication
+{
+  @Override
+  public void populateDAG(DAG dag, Configuration configuration)
+  {
+    FileSplitterInput input = dag.addOperator(&quot;File-splitter&quot;, new FileSplitterInput());
+    //any concrete implementation of AbstractFSBlockReader based on the use-case can be added here.
+    LineReader blockReader = dag.addOperator(&quot;Block-reader&quot;, new LineReader());
+    Filter filter = dag.addOperator(&quot;Filter&quot;, new Filter());
+    RecordOutputOperator recordOutputOperator = dag.addOperator(&quot;Record-writer&quot;, new RecordOutputOperator());
+
+    dag.addStream(&quot;file-block metadata&quot;, input.blocksMetadataOutput, blockReader.blocksMetadataInput);
+    dag.addStream(&quot;records&quot;, blockReader.messages, filter.input);
+    dag.addStream(&quot;filtered-records&quot;, filter.output, recordOutputOperator.input);
+  }
+
+  /**
+   * Concrete implementation of {@link AbstractFSBlockReader} for which a record is a line in the file.
+   */
+  public static class LineReader extends AbstractFSBlockReader.AbstractFSReadAheadLineReader&lt;String&gt;
+  {
+
+    @Override
+    protected String convertToRecord(byte[] bytes)
+    {
+      return new String(bytes);
+    }
+  }
+
+  /**
+   * Considers any line starting with a '.' as invalid. Emits the valid records.
+   */
+  public static class Filter extends BaseOperator
+  {
+    public final transient DefaultOutputPort&lt;AbstractBlockReader.ReaderRecord&lt;String&gt;&gt; output = new DefaultOutputPort&lt;&gt;();
+    public final transient DefaultInputPort&lt;AbstractBlockReader.ReaderRecord&lt;String&gt;&gt; input = new DefaultInputPort&lt;AbstractBlockReader.ReaderRecord&lt;String&gt;&gt;()
+    {
+      @Override
+      public void process(AbstractBlockReader.ReaderRecord&lt;String&gt; stringRecord)
+      {
+        //filter records and transform
+        //if the string starts with a '.' ignore the string.
+        if (!StringUtils.startsWith(stringRecord.getRecord(), &quot;.&quot;)) {
+          output.emit(stringRecord);
+        }
+      }
+    };
+  }
+
+  /**
+   * Persists the valid records to corresponding block files.
+   */
+  public static class RecordOutputOperator extends AbstractFileOutputOperator&lt;AbstractBlockReader.ReaderRecord&lt;String&gt;&gt;
+  {
+    @Override
+    protected String getFileName(AbstractBlockReader.ReaderRecord&lt;String&gt; tuple)
+    {
+      return Long.toHexString(tuple.getBlockId());
+    }
+
+    @Override
+    protected byte[] getBytesForTuple(AbstractBlockReader.ReaderRecord&lt;String&gt; tuple)
+    {
+      return tuple.getRecord().getBytes();
+    }
+  }
+}
+</code></pre>
+
+<p>Configuration to parallel partition block reader with its downstream operators.</p>
+<pre><code class="xml">  &lt;property&gt;
+    &lt;name&gt;dt.operator.Filter.port.input.attr.PARTITION_PARALLEL&lt;/name&gt;
+    &lt;value&gt;true&lt;/value&gt;
+  &lt;/property&gt;
+  &lt;property&gt;
+    &lt;name&gt;dt.operator.Record-writer.port.input.attr.PARTITION_PARALLEL&lt;/name&gt;
+    &lt;value&gt;true&lt;/value&gt;
+  &lt;/property&gt;
+</code></pre>
+
+<h2 id="abstractfsreadaheadlinereader">AbstractFSReadAheadLineReader</h2>
+<p>This extension of <a href="#AbstractFSBlockReader"><code>AbstractFSBlockReader</code></a> parses lines from a block and binds the <code>readerContext</code> field to an instance of <code>ReaderContext.ReadAheadLineReaderContext</code>.</p>
+<p>It is abstract because it doesn't provide an implementation of <a href="#convertToRecord"><code>convertToRecord(byte[] bytes)</code></a> since the user may want to convert the bytes that make a line into some other type. </p>
+<h3 id="readaheadlinereadercontext">ReadAheadLineReaderContext</h3>
+<p>In order to handle a line split across adjacent blocks, ReadAheadLineReaderContext always reads beyond the block boundary and ignores the bytes till the first end-of-line character of all the blocks except the first block of the file. This ensures that no line is missed or incomplete.</p>
+<p>This is one of the most common ways of handling a split record. It doesn't require any further information to decide if a line is complete. However, the cost of this consistent way to handle a line split is that it always reads from the next block.</p>
+<h2 id="abstractfslinereader">AbstractFSLineReader</h2>
+<p>Similar to <code>AbstractFSReadAheadLineReader</code>, even this parses lines from a block. However, it binds the <code>readerContext</code> field to an instance of <code>ReaderContext.LineReaderContext</code>.</p>
+<h3 id="linereadercontext">LineReaderContext</h3>
+<p>This handles the line split differently from <code>ReadAheadLineReaderContext</code>. It doesn't always read from the next block. If the end of the last line is aligned with the block boundary then it stops processing the block. It does read from the next block when the boundaries are not aligned, that is, last line extends beyond the block boundary. The result of this is an inconsistency in reading the next block.</p>
+<p>When the boundary of the last line of the previous block was aligned with its block, then the first line of the current block is a valid line. However, in the other case the bytes from the block start offset to the first end-of-line character should be ignored. Therefore, this means that any record formed by this reader context has to be validated. For example, if the lines are of fixed size then size of each record can be validated or if each line begins with a special field then tha [...]
+<p>If the validations of completeness fails for a line then <a href="#convertToRecord"><code>convertToRecord(byte[] bytes)</code></a> should return null.</p>
+<h2 id="fsslicereader">FSSliceReader</h2>
+<p>A concrete extension of <a href="#AbstractFSBlockReader"><code>AbstractFSBlockReader</code></a> that reads fixed-size <code>byte[]</code> from a block and emits the byte array wrapped in <code>com.datatorrent.netlet.util.Slice</code>.</p>
+<p>This operator binds the <code>readerContext</code> to an instance of <code>ReaderContext.FixedBytesReaderContext</code>.</p>
+<h3 id="fixedbytesreadercontext">FixedBytesReaderContext</h3>
+<p>This implementation of <code>ReaderContext</code> never reads beyond a block boundary which can result in the last <code>byte[]</code> of a block to be of a shorter length than the rest of the records.</p>
+<h3 id="configuration_1">Configuration</h3>
+<p><strong>readerContext.length</strong>: length of each record. By default, this is initialized to the default hdfs block size.</p>
+<h2 id="partitioner-and-statslistener">Partitioner and StatsListener</h2>
+<p>The logical instance of the block reader acts as the Partitioner (unless a custom partitioner is set using the operator attribute - <code>PARTITIONER</code>) as well as a StatsListener. This is because the 
+<code>AbstractBlockReader</code> implements both the <code>com.datatorrent.api.Partitioner</code> and <code>com.datatorrent.api.StatsListener</code> interfaces and provides an implementation of <code>definePartitions(...)</code> and <code>processStats(...)</code> which make it auto-scalable.</p>
+<h3 id="processstats">processStats <a name="processStats"></a></h3>
+<p>The application master invokes <code>Response processStats(BatchedOperatorStats stats)</code> method on the logical instance with the stats (<code>tuplesProcessedPSMA</code>, <code>tuplesEmittedPSMA</code>, <code>latencyMA</code>, etc.) of each partition. The data which this operator is interested in is the <code>queueSize</code> of the input port <code>blocksMetadataInput</code>.</p>
+<p>Usually the <code>queueSize</code> of an input port gives the count of waiting control tuples plus data tuples. However, if a stats listener is interested only in the count of data tuples then that can be expressed by annotating the class with <code>@DataQueueSize</code>. In this case <code>AbstractBlockReader</code> itself is the <code>StatsListener</code> which is why it is annotated with <code>@DataQueueSize</code>.</p>
+<p>The logical instance caches the queue size per partition and at regular intervals (configured by <code>intervalMillis</code>) sums these values to find the total backlog which is then used to decide whether re-partitioning is needed. The flow-diagram below describes this logic.</p>
+<p><img alt="Processing of total-backlog" src="../images/blockreader/totalBacklogProcessing.png" /></p>
+<p>The goal of this logic is to create as many partitions within bounds (see <a href="#maxReaders"><code>maxReaders</code></a> and <a href="#minReaders"><code>minReaders</code></a> above) to quickly reduce this backlog or if the backlog is small then remove any idle partitions.</p>
+<h3 id="definepartitions">definePartitions</h3>
+<p>Based on the <code>repartitionRequired</code> field of the <code>Response</code> object which is returned by <em><a href="#processStats">processStats</a></em> method, the application master invokes </p>
+<pre><code class="java">Collection&lt;Partition&lt;AbstractBlockReader&lt;...&gt;&gt;&gt; definePartitions(Collection&lt;Partition&lt;AbstractBlockReader&lt;...&gt;&gt;&gt; partitions, PartitioningContext context)
+</code></pre>
+
+<p>on the logical instance which is also the partitioner instance. The implementation calculates the difference between required partitions and the existing count of partitions. If this difference is negative, then equivalent number of partitions are removed otherwise new partitions are created. </p>
+<p>Please note auto-scaling can be disabled by setting <a href="#collectStats"><code>collectStats</code></a> to <code>false</code>. If the use-case requires only static partitioning, then that can be achieved by setting <a href="https://github.com/chandnisingh/incubator-apex-core/blob/master/common/src/main/java/com/datatorrent/common/partitioner/StatelessPartitioner.java"><code>StatelessPartitioner</code></a> as the operator attribute- <code>PARTITIONER</code> on the block reader.</p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../csvformatter/" class="btn btn-neutral float-right" title="CSV Formatter">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../../apis/calcite/" class="btn btn-neutral" title="SQL"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../../apis/calcite/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../csvformatter/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/csvParserOperator/index.html b/content/docs/malhar-3.8/operators/csvParserOperator/index.html
new file mode 100644
index 0000000..65e9527
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/csvParserOperator/index.html
@@ -0,0 +1,626 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>CSV Parser - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "CSV Parser";
+    var mkdocs_page_input_path = "operators/csvParserOperator.md";
+    var mkdocs_page_url = "/operators/csvParserOperator/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">CSV Parser</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#csv-parser-operator">Csv Parser Operator</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-objective">Operator Objective</a></li>
+                
+                    <li><a class="toctree-l4" href="#overview">Overview</a></li>
+                
+                    <li><a class="toctree-l4" href="#class-diagram">Class Diagram</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-information">Operator Information</a></li>
+                
+                    <li><a class="toctree-l4" href="#platform-attributes-that-influences-operator-behavior">Platform Attributes that influences operator behavior</a></li>
+                
+                    <li><a class="toctree-l4" href="#ports">Ports</a></li>
+                
+                    <li><a class="toctree-l4" href="#partitioning">Partitioning</a></li>
+                
+                    <li><a class="toctree-l4" href="#example">Example</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>CSV Parser</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="csv-parser-operator">Csv Parser Operator</h1>
+<h2 id="operator-objective">Operator Objective</h2>
+<p>This operator is designed to parse delimited records and construct a map or concrete java class also known as <a href="https://en.wikipedia.org/wiki/Plain_Old_Java_Object">"POJO"</a> out of it. User need to provide the schema to describe the delimited data. Based on schema definition the operator will parse the incoming record to object map and POJO.  User can also provide constraints if any, in the schema. The supported constraints are listed in <a href="#constraints">constraints tab [...]
+<p><strong>Note</strong>: field names of POJO must match field names in schema and in the same order as it appears in the incoming data.</p>
+<h2 id="overview">Overview</h2>
+<p>The operator is <strong>idempotent</strong>, <strong>fault-tolerant</strong> and <strong>partitionable</strong>.</p>
+<h2 id="class-diagram">Class Diagram</h2>
+<p><img alt="" src="../images/csvParser/CSVParser.png" /></p>
+<h2 id="operator-information">Operator Information</h2>
+<ol>
+<li>Operator location:<strong><em>malhar-contrib</em></strong></li>
+<li>Available since:<strong><em>3.2.0</em></strong></li>
+<li>Operator state:<strong><em>Evolving</em></strong></li>
+<li>Java Package:<a href="https://github.com/apache/apex-malhar/blob/master/contrib/src/main/java/com/datatorrent/contrib/parser/CsvParser.java">com.datatorrent.contrib.parser.CsvParser</a></li>
+</ol>
+<h2 id="properties-of-csv-parser"><a name="props"></a>Properties of Csv Parser</h2>
+<p>User need to set the schema which describes delimited data as well as specifies constraints on values if any.
+e.g.</p>
+<pre><code class="xml">{
+  &quot;separator&quot;:&quot;,&quot;,
+  &quot;quoteChar&quot;:&quot;\&quot;&quot;,
+  &quot;fields&quot;:[
+    {
+      &quot;name&quot;:&quot;adId&quot;,
+      &quot;type&quot;:&quot;Integer&quot;,
+      &quot;constraints&quot;:{
+         &quot;required&quot;:&quot;true&quot;
+      }
+    },
+    {
+      &quot;name&quot;:&quot;adName&quot;,
+      &quot;type&quot;:&quot;String&quot;,
+      &quot;constraints&quot;:{
+         &quot;required&quot;:&quot;true&quot;,
+         &quot;pattern&quot;:&quot;[a-z].*[a-z]$&quot;,
+         &quot;maxLength&quot;:&quot;10&quot;
+      }
+    },
+    {
+      &quot;name&quot;:&quot;bidPrice&quot;,
+      &quot;type&quot;:&quot;Double&quot;,
+      &quot;constraints&quot;:{
+         &quot;required&quot;:&quot;true&quot;,
+         &quot;minValue&quot;:&quot;0.1&quot;,
+         &quot;maxValue&quot;:&quot;3.2&quot;
+      }
+    },
+    {
+      &quot;name&quot;:&quot;startDate&quot;,
+      &quot;type&quot;:&quot;Date&quot;,
+      &quot;constraints&quot;:{
+         &quot;format&quot;:&quot;yyyy-MM-dd HH:mm:ss&quot;
+      }
+    }
+  ]
+}
+</code></pre>
+
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>schema</em></td>
+<td><a href="https://github.com/apache/apex-malhar/blob/master/contrib/src/main/java/com/datatorrent/contrib/parser/DelimitedSchema.java">Schema</a>  describing delimited data</td>
+<td>String</td>
+<td>YES</td>
+<td>N/A</td>
+</tr>
+</tbody>
+</table>
+<h2 id="platform-attributes-that-influences-operator-behavior">Platform Attributes that influences operator behavior</h2>
+<table>
+<thead>
+<tr>
+<th><strong>Attribute</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>out.TUPLE_CLASS</em></td>
+<td>TUPLE_CLASS attribute on output port which tells operator the class of POJO which need to be emitted</td>
+<td>Class</td>
+<td>Yes</td>
+</tr>
+</tbody>
+</table>
+<h2 id="supported-datatypes-in-schema"><a name="dataTypes"></a>Supported DataTypes in Schema</h2>
+<ul>
+<li>Integer</li>
+<li>Long</li>
+<li>Double</li>
+<li>Character</li>
+<li>String</li>
+<li>Boolean</li>
+<li>Date</li>
+<li>Float</li>
+</ul>
+<h2 id="schema-constraints"><a name="constraints"></a>Schema Constraints</h2>
+<table>
+<thead>
+<tr>
+<th><strong>DataType</strong></th>
+<th><strong>Constraints</strong></th>
+<th><strong>Description</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>All data Types</em></td>
+<td>required</td>
+<td>If mentioned, indicates that the data type constraints that follow are required. It cannot be blank/null. It may or may not satisfy other constraints ( like equals/minVal/maxVal etc )</td>
+</tr>
+<tr>
+<td><em>All data Types</em></td>
+<td>equals</td>
+<td>If mentioned, indicates that the data string or value declared in the data type constraints must be an exact match with the specified value. <code>Note: This constraints is not applicable for data type boolean and date</code></td>
+</tr>
+<tr>
+<td><em>String</em></td>
+<td>Length</td>
+<td>The string must be of the length that is specified.</td>
+</tr>
+<tr>
+<td><em>String</em></td>
+<td>minLength</td>
+<td>The string is at least the length specified as minLength value.</td>
+</tr>
+<tr>
+<td><em>String</em></td>
+<td>maxLength</td>
+<td>The string can be at the most the length specified as maxLength value.</td>
+</tr>
+<tr>
+<td><em>String</em></td>
+<td>pattern</td>
+<td>The string must match the specified regular expression.</td>
+</tr>
+<tr>
+<td><em>Long</em></td>
+<td>maxValue</td>
+<td>The numeric can be at the most the value specified as maxValue.</td>
+</tr>
+<tr>
+<td><em>Long</em></td>
+<td>minValue</td>
+<td>The numeric is at least the value specified as minValue.</td>
+</tr>
+<tr>
+<td><em>Double</em></td>
+<td>maxValue</td>
+<td>The numeric can be at the most the value specified as maxValue.</td>
+</tr>
+<tr>
+<td><em>Double</em></td>
+<td>minValue</td>
+<td>The numeric is at least the value specified as minValue.</td>
+</tr>
+<tr>
+<td><em>Float</em></td>
+<td>maxValue</td>
+<td>The numeric can be at the most the value specified as maxValue.</td>
+</tr>
+<tr>
+<td><em>Float</em></td>
+<td>minValue</td>
+<td>The numeric is at least the value specified as minValue.</td>
+</tr>
+<tr>
+<td><em>Integer</em></td>
+<td>maxValue</td>
+<td>The numeric can be at the most the value specified as maxValue.</td>
+</tr>
+<tr>
+<td><em>Integer</em></td>
+<td>minValue</td>
+<td>The numeric is at least the value specified as minValue.</td>
+</tr>
+<tr>
+<td><em>Date</em></td>
+<td>format</td>
+<td>A simple date format as specified in the SimpleDateFormat class: http://docs.oracle.com/javase/8/docs/api/java/text/SimpleDateFormat.html?is-external=true</td>
+</tr>
+<tr>
+<td><em>Boolean</em></td>
+<td>trueValue</td>
+<td>String for which boolean value is true. The default values are: true, 1, y, and t. <code>Note: If you specify trueValue, you must also specify falseValue.</code></td>
+</tr>
+<tr>
+<td><em>Boolean</em></td>
+<td>falseValue</td>
+<td>String for which boolean value is false. The default values are: false, 0, n, and f. <code>Note: If you specify falseValue, you must also specify trueValue.</code></td>
+</tr>
+</tbody>
+</table>
+<h2 id="ports">Ports</h2>
+<table>
+<thead>
+<tr>
+<th><strong>Port</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>in</em></td>
+<td>Tuples that needs to be parsed are recieved on this port</td>
+<td>byte[]</td>
+<td>Yes</td>
+</tr>
+<tr>
+<td><em>out</em></td>
+<td>Valid Tuples that are emitted as pojo</td>
+<td>Object (POJO)</td>
+<td>No</td>
+</tr>
+<tr>
+<td><em>parsedOutput</em></td>
+<td>Valid Tuples that are emitted as map</td>
+<td>Map</td>
+<td>No</td>
+</tr>
+<tr>
+<td><em>err</em></td>
+<td>Invalid Tuples are emitted with error message</td>
+<td>KeyValPair &lt;String, String></td>
+<td>No</td>
+</tr>
+</tbody>
+</table>
+<h2 id="partitioning">Partitioning</h2>
+<p>CSV Parser is both statically and dynamically partitionable.</p>
+<h3 id="static-partitioning">Static Partitioning</h3>
+<p>This can be achieved in 2 ways as shown below.</p>
+<p>Specifying the partitioner and number of partitions in the populateDAG() method</p>
+<pre><code class="java">    CsvParser csvParser = dag.addOperator(&quot;csvParser&quot;, CsvParser.class);
+    StatelessPartitioner&lt;CsvParser&gt; partitioner1 = new StatelessPartitioner&lt;CsvParser&gt;(2);
+    dag.setAttribute(csvParser, Context.OperatorContext.PARTITIONER, partitioner1);
+</code></pre>
+
+<p>Specifying the partitioner in properties file.</p>
+<pre><code class="xml">   &lt;property&gt;
+     &lt;name&gt;dt.operator.{OperatorName}.attr.PARTITIONER&lt;/name&gt;
+     &lt;value&gt;com.datatorrent.common.partitioner.StatelessPartitioner:2&lt;/value&gt;
+   &lt;/property&gt;
+</code></pre>
+
+<p>where {OperatorName} is the name of the CsvParser operator.
+ Above lines will partition CsvParser statically 2 times. Above value can be changed accordingly to change the number of static partitions.</p>
+<h3 id="dynamic-paritioning">Dynamic Paritioning</h3>
+<p>CsvParser can be dynamically partitioned using out-of-the-box partitioner:</p>
+<h4 id="throughput-based">Throughput based</h4>
+<p>Following code can be added to populateDAG method of application to dynamically partition CsvParser:</p>
+<pre><code class="java">CsvParser csvParser = dag.addOperator(&quot;csvParser&quot;, CsvParser.class);
+StatelessThroughputBasedPartitioner&lt;CsvParser&gt; partitioner = new StatelessThroughputBasedPartitioner&lt;&gt;();
+partitioner.setCooldownMillis(conf.getLong(COOL_DOWN_MILLIS, 10000));
+partitioner.setMaximumEvents(conf.getLong(MAX_THROUGHPUT, 30000));
+partitioner.setMinimumEvents(conf.getLong(MIN_THROUGHPUT, 10000));
+dag.setAttribute(csvParser, OperatorContext.STATS_LISTENERS, Arrays.asList(new StatsListener[]{partitioner}));
+dag.setAttribute(csvParser, OperatorContext.PARTITIONER, partitioner);
+</code></pre>
+
+<p>Above code will dynamically partition csvParser when the throughput changes.
+If the overall throughput of csvParser goes beyond 30000 or less than 10000, the platform will repartition CsvParser
+to balance throughput of a single partition to be between 10000 and 30000.
+CooldownMillis of 10000 will be used as the threshold time for which the throughput change is observed.</p>
+<h2 id="example">Example</h2>
+<p>Example for Csv Parser can be found at: <a href="https://github.com/DataTorrent/examples/tree/master/tutorials/parser">https://github.com/DataTorrent/examples/tree/master/tutorials/parser</a></p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../deduper/" class="btn btn-neutral float-right" title="Deduper">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../csvformatter/" class="btn btn-neutral" title="CSV Formatter"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../csvformatter/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../deduper/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/csvformatter/index.html b/content/docs/malhar-3.8/operators/csvformatter/index.html
new file mode 100644
index 0000000..beb6805
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/csvformatter/index.html
@@ -0,0 +1,493 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>CSV Formatter - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "CSV Formatter";
+    var mkdocs_page_input_path = "operators/csvformatter.md";
+    var mkdocs_page_url = "/operators/csvformatter/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">CSV Formatter</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#csvformatter">CsvFormatter</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-objective">Operator Objective</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-information">Operator Information</a></li>
+                
+                    <li><a class="toctree-l4" href="#properties-attributes-and-ports">Properties, Attributes and Ports</a></li>
+                
+                    <li><a class="toctree-l4" href="#limitations">Limitations</a></li>
+                
+                    <li><a class="toctree-l4" href="#example">Example</a></li>
+                
+                    <li><a class="toctree-l4" href="#advanced">Advanced</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>CSV Formatter</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="csvformatter">CsvFormatter</h1>
+<h2 id="operator-objective">Operator Objective</h2>
+<p>This operator receives a POJO (<a href="https://en.wikipedia.org/wiki/Plain_Old_Java_Object">Plain Old Java Object</a>) as an incoming tuple, converts the data in 
+the incoming POJO to a custom delimited string and emits the delimited string.</p>
+<p>CsvFormatter supports schema definition as a JSON string. </p>
+<p>CsvFormatter does not hold any state and is <strong>idempotent</strong>, <strong>fault-tolerant</strong> and <strong>statically/dynamically partitionable</strong>.</p>
+<h2 id="operator-information">Operator Information</h2>
+<ol>
+<li>Operator location: <strong><em>malhar-contrib</em></strong></li>
+<li>Available since: <strong><em>3.2.0</em></strong></li>
+<li>Operator state: <strong><em>Evolving</em></strong></li>
+<li>Java Packages:<ul>
+<li>Operator: <strong><em><a href="https://www.datatorrent.com/docs/apidocs/com/datatorrent/contrib/formatter/CsvFormatter.html">com.datatorrent.contrib.formatter.CsvFormatter</a></em></strong></li>
+</ul>
+</li>
+</ol>
+<h2 id="properties-attributes-and-ports">Properties, Attributes and Ports</h2>
+<h3 id="properties-of-pojoenricher"><a name="props"></a>Properties of POJOEnricher</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>schema</em></td>
+<td>Contents of the schema.Schema is specified in a json format.</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+</tbody>
+</table>
+<h3 id="platform-attributes-that-influences-operator-behavior">Platform Attributes that influences operator behavior</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Attribute</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>in.TUPLE_CLASS</em></td>
+<td>TUPLE_CLASS attribute on input port which tells operator the class of POJO which will be incoming</td>
+<td>Class or FQCN</td>
+<td>Yes</td>
+</tr>
+</tbody>
+</table>
+<h3 id="ports">Ports</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Port</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>in</em></td>
+<td>Tuples which need to be formatted are received on this port</td>
+<td>Object (POJO)</td>
+<td>Yes</td>
+</tr>
+<tr>
+<td><em>out</em></td>
+<td>Tuples that are formatted are emitted from this port</td>
+<td>String</td>
+<td>No</td>
+</tr>
+<tr>
+<td><em>err</em></td>
+<td>Tuples that could not be converted are emitted on this port</td>
+<td>Object</td>
+<td>No</td>
+</tr>
+</tbody>
+</table>
+<h2 id="limitations">Limitations</h2>
+<p>Current CsvFormatter contain following limitations:</p>
+<ol>
+<li>The field names in schema and the pojo field names should match.For eg. if name of the schema field is "customerName", then POJO should contain a field with the same name. </li>
+<li>Field wise validation/formatting is not yet supported.</li>
+<li>The fields will be written to the file in the same order as specified in schema.json</li>
+</ol>
+<h2 id="example">Example</h2>
+<p>Example for CsvFormatter can be found at: <a href="https://github.com/DataTorrent/examples/tree/master/tutorials/csvformatter">https://github.com/DataTorrent/examples/tree/master/tutorials/csvformatter</a></p>
+<h2 id="advanced">Advanced</h2>
+<h3 id="schema-format-for-csvformatter"><a name="JSONFileFormat"></a> Schema format for CsvFormatter</h3>
+<p>CsvFormatter expects schema to be a String in JSON format:</p>
+<p>Example for format of schema:</p>
+<pre><code class="json">{
+  &quot;separator&quot;: &quot;,&quot;,
+  &quot;quoteChar&quot;: &quot;\&quot;&quot;,
+  &quot;lineDelimiter&quot;: &quot;\n&quot;,
+  &quot;fields&quot;: [
+    {
+      &quot;name&quot;: &quot;campaignId&quot;,
+      &quot;type&quot;: &quot;Integer&quot;
+    },
+    {
+      &quot;name&quot;: &quot;startDate&quot;,
+      &quot;type&quot;: &quot;Date&quot;,
+      &quot;constraints&quot;: {
+        &quot;format&quot;: &quot;yyyy-MM-dd&quot;
+      }
+    }
+    ]
+}
+</code></pre>
+
+<h3 id="partitioning-of-csvformatter">Partitioning of CsvFormatter</h3>
+<p>Being stateless operator, CsvFormatter will ensure built-in partitioners present in Malhar library can be directly used by setting properties as follows:</p>
+<h4 id="stateless-partioning-of-csvformatter">Stateless partioning of CsvFormatter</h4>
+<p>Stateless partitioning will ensure that CsvFormatter will be partitioned right at the start of the application and will remain partitioned throughout the lifetime of the DAG.
+CsvFormatter can be stateless partitioned by adding following lines to properties.xml:</p>
+<pre><code class="xml">  &lt;property&gt;
+    &lt;name&gt;dt.operator.{OperatorName}.attr.PARTITIONER&lt;/name&gt;
+    &lt;value&gt;com.datatorrent.common.partitioner.StatelessPartitioner:2&lt;/value&gt;
+  &lt;/property&gt;
+</code></pre>
+
+<p>where {OperatorName} is the name of the CsvFormatter operator.
+Above lines will partition CsvFormatter statically 2 times. Above value can be changed accordingly to change the number of static partitions.</p>
+<h4 id="dynamic-partitioning-of-csvformatter">Dynamic Partitioning of CsvFormatter</h4>
+<p>Dynamic partitioning is a feature of Apex platform which changes the partition of the operator based on certain conditions.
+CsvFormatter can be dynamically partitioned using below out-of-the-box partitioner:</p>
+<h5 id="throughput-based">Throughput based</h5>
+<p>Following code can be added to populateDAG method of application to dynamically partition CsvFormatter:</p>
+<pre><code class="java">    StatelessThroughputBasedPartitioner&lt;CsvFormatter&gt; partitioner = new StatelessThroughputBasedPartitioner&lt;&gt;();
+    partitioner.setCooldownMillis(conf.getLong(COOL_DOWN_MILLIS, 10000));
+    partitioner.setMaximumEvents(conf.getLong(MAX_THROUGHPUT, 30000));
+    partitioner.setMinimumEvents(conf.getLong(MIN_THROUGHPUT, 10000));
+    dag.setAttribute(csvFormatter, OperatorContext.STATS_LISTENERS, Arrays.asList(new StatsListener[]{partitioner}));
+    dag.setAttribute(csvFormatter, OperatorContext.PARTITIONER, partitioner);
+</code></pre>
+
+<p>Above code will dynamically partition CsvFormatter when throughput changes.
+If overall throughput of CsvFormatter goes beyond 30000 or less than 10000, the platform will repartition CsvFormatter 
+to balance throughput of a single partition to be between 10000 and 30000.
+CooldownMillis of 10000 will be used as threshold time for which  throughput change is observed.</p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../csvParserOperator/" class="btn btn-neutral float-right" title="CSV Parser">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../block_reader/" class="btn btn-neutral" title="Block Reader"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../block_reader/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../csvParserOperator/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/deduper/index.html b/content/docs/malhar-3.8/operators/deduper/index.html
new file mode 100644
index 0000000..2505e83
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/deduper/index.html
@@ -0,0 +1,855 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>Deduper - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "Deduper";
+    var mkdocs_page_input_path = "operators/deduper.md";
+    var mkdocs_page_url = "/operators/deduper/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">Deduper</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#deduper-operator-documentation">Deduper - Operator Documentation</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#introduction">Introduction</a></li>
+                
+                    <li><a class="toctree-l4" href="#about-this-document">About this document</a></li>
+                
+                    <li><a class="toctree-l4" href="#terminology">Terminology</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#overview">Overview</a></li>
+                
+                    <li><a class="toctree-l4" href="#dedup-what-in-a-nutshell">Dedup - “What” in a Nutshell</a></li>
+                
+                    <li><a class="toctree-l4" href="#dedup-how-in-a-nutshell">Dedup - “How” in a Nutshell</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#use-cases-basic-dedup">Use cases - Basic Dedup</a></li>
+                
+                    <li><a class="toctree-l4" href="#dedup-key">Dedup Key</a></li>
+                
+                    <li><a class="toctree-l4" href="#use-case-details">Use case Details</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#use-case-dedup-with-expiry">Use case - Dedup with Expiry</a></li>
+                
+                    <li><a class="toctree-l4" href="#motivation">Motivation</a></li>
+                
+                    <li><a class="toctree-l4" href="#expiry-key">Expiry Key</a></li>
+                
+                    <li><a class="toctree-l4" href="#expiry-period">Expiry Period</a></li>
+                
+                    <li><a class="toctree-l4" href="#use-case-details_1">Use case Details</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#use-cases-summary">Use cases - Summary</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#technical-architecture">Technical Architecture</a></li>
+                
+                    <li><a class="toctree-l4" href="#class-structure">Class Structure</a></li>
+                
+                    <li><a class="toctree-l4" href="#architectural-details">Architectural Details</a></li>
+                
+                    <li><a class="toctree-l4" href="#concepts">Concepts</a></li>
+                
+                    <li><a class="toctree-l4" href="#assumptions">Assumptions</a></li>
+                
+                    <li><a class="toctree-l4" href="#flow-of-a-tuple-through-dedup-operator">Flow of a Tuple through Dedup Operator</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#ports-attributes-and-properties">Ports, Attributes and Properties</a></li>
+                
+                    <li><a class="toctree-l4" href="#ports">Ports</a></li>
+                
+                    <li><a class="toctree-l4" href="#attributes">Attributes</a></li>
+                
+                    <li><a class="toctree-l4" href="#properties">Properties</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#example">Example</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#partitioning">Partitioning</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>Deduper</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="deduper-operator-documentation">Deduper - Operator Documentation</h1>
+<h1 id="introduction">Introduction</h1>
+<h2 id="about-this-document">About this document</h2>
+<p>This document is intended as a guide for understanding and using
+the Dedup operator.</p>
+<h2 id="terminology">Terminology</h2>
+<p>We will refer to this operator as the Deduper or Dedup operator
+interchangeably.</p>
+<h1 id="overview">Overview</h1>
+<h2 id="dedup-what-in-a-nutshell">Dedup - “What” in a Nutshell</h2>
+<p>Dedup is actually short for Deduplication. Duplicates are omnipresent and
+can be found in almost any kind of data. Most of the times it is
+essential to discard, or at the very least separate out the data into
+unique and duplicate components. The entire purpose of this
+operator is to de-duplicate data. In other words, when data passes
+through this operator, it will be segregated into two different data
+sets, one of which contains all unique tuples, and the other which are
+occurring more than once in the original data set.</p>
+<p><img alt="" src="../images/deduper/image00.png" /></p>
+<h2 id="dedup-how-in-a-nutshell">Dedup - “How” in a Nutshell</h2>
+<p>In order to quickly decide whether an incoming tuple is duplicate
+or unique, it has to store each incoming tuple (or a signature, like key,
+for example) to be used for comparison later. A plain in-memory storage
+may work for small datasets, but will not scale for large ones. Deduper employs a large scale distributed persistent hashing mechanism (known as the Managed State) which allows
+it to identify if a particular tuple is duplicate or unique. Managed state is a layer on HDFS which allows all the stored data to be persisted in a distributed fashion.
+Each time it identifies a tuple as a unique tuple, it also
+stores it into the Managed state for future
+lookup.</p>
+<h2 id="_1"><img alt="" src="../images/deduper/image04.png" /></h2>
+<p>Following are the different components of the Deduper</p>
+<ol>
+<li><strong>Dedup Operator</strong> - This is responsible for the overall
+    functionality of the operator. This in turn makes use of other
+    components to establish the end goal of deciding whether a tuple is
+    a duplicate of some earlier tuple, or is a unique tuple.</li>
+<li><strong>Managed State</strong> - Since, all of the data cannot be stored in
+    memory, this component allows us to persist existing unique keys on
+    HDFS in form of buckets. This is also responsible for fetching data as
+    requested by the Deduper. Since, it communicates with the HDFS, data access is slow and so it allows for asynchronous (non-blocking) calls to fetch data. This ensures that the Deduper is not blocked and can continue to process other tuples. It also supports an in-memory cache where it stores the fetched data so that repeated access to the same data is faster. Periodically, based on configuration, this also
+    discards data which is no longer needed.</li>
+</ol>
+<p>This was a very basic introduction to the functioning of the
+Deduper. Following sections will go into more detail on each of the
+components.</p>
+<h1 id="use-cases-basic-dedup">Use cases - Basic Dedup</h1>
+<h2 id="dedup-key">Dedup Key</h2>
+<p>A dedup key is a set of one or more fields in the data tuple which
+acts as the key for the tuples.
+This is used by the deduper to compare tuples to arrive at the
+conclusion on whether two tuples are duplicates.</p>
+<p>Consider an example schema and two sample tuples</p>
+<p><code>{Name, Phone, Email, Date, State, Zip, Country}</code></p>
+<p>Tuple 1:</p>
+<pre><code>{
+  Austin U. Saunders,
+  +91-319-340-59385,
+  ausaunders@semperegestasurna.com,
+  2015-11-09 13:38:38,
+  Texas,
+  73301,
+  United States
+}
+</code></pre>
+
+<p>Tuple 2:</p>
+<pre><code>{
+  Austin U. Saunders,
+  +91-319-340-59385,
+  austin@semperegestasurna.com,
+  2015-11-09 13:39:38,
+  Texas,
+  73301,
+  United States
+}
+</code></pre>
+
+<p>Let us assume that the Dedup Key
+is <code>{Name, Phone}</code>. In
+this case, the two tuples are duplicates because the key fields are same
+in both the tuples. However, if the Dedup Key is {Phone,Email},
+the two are unique as the email values differ.</p>
+<h2 id="use-case-details">Use case Details</h2>
+<p>Consider the case of de-duplicating a master data set
+which is stored in a file. Further also consider the
+following schema for tuples in the data set.</p>
+<p><code>{Name, Phone, Email, Date, City, Zip, Country}</code></p>
+<p>Also consider that we need to identify unique customers from the
+master data set. So, ultimately the output needed for the use case is
+two data sets - Unique Records and Duplicate Records.</p>
+<p>As part of configuring the operator for this use case, we need to
+set the following parameters:</p>
+<ul>
+<li><strong><em>keyExpression</em></strong> - This can be set as
+    the primary key which can be used to uniquely identify a Customer.
+    For example, we can set it to <code>Name,Email</code></li>
+</ul>
+<p>The above configuration is sufficient to address this use case.</p>
+<h1 id="use-case-dedup-with-expiry">Use case - Dedup with Expiry</h1>
+<h2 id="motivation">Motivation</h2>
+<p>The Basic Dedup use case is the most straightforward and is
+usually applied when the amount of data to be processed is not huge.
+However, if the incoming data is huge, or even never-ending, it is
+usually not necessary to keep storing all the data. This is because in
+most real world use cases, the duplicates occur only a short distance
+apart. Hence, after a while, it is usually okay to forget part of
+the history and consider only limited history for identifying
+duplicates, in the interest of efficiency. In other words, we expire
+(ignore) some tuples which are (or were supposed to be) delivered long
+back. Doing so, reduces the load on the storage mechanism (managed state) which effectively deletes part of the history, thus making the whole process more
+efficient. We call this use case, Dedup with expiry.</p>
+<h2 id="expiry-key">Expiry Key</h2>
+<p>The easiest way to understand this use case is to consider
+time as the criterion for expiring
+tuples. Time is a natural expiry
+key and is in line with the concept of expiry. Formally, an expiry field
+is a field in the input tuple which can be used to discard incoming
+tuples as expired. This expiry key
+usually works with another parameter called Expiry Period defined
+next.</p>
+<h2 id="expiry-period">Expiry Period</h2>
+<p>The expiry period is the value supplied by the user to define the
+extent of history which should be considered while expiring
+tuples.</p>
+<h2 id="use-case-details_1">Use case Details</h2>
+<p>Consider an incoming stream of system logs. The use case requires
+us to identify duplicate log messages and pass on only the unique ones.
+Another relaxation in the use case is that the log messages which are
+older than a day, may not be considered and must be filtered out as
+expired. The expiry must be measured with respect to the time stamp in
+the logs. For example, if the timestamp in the incoming message is
+<code>30-12-2014 00:00:00</code> and the
+latest message that the system has encountered had the time stamp
+<code>31-12-2014 00:00:00</code>, then the
+incoming message must be considered as expired. However, if the incoming
+message had any timestamp like <code>30-12-2014
+00:11:00</code>, it must be accepted into the system and be checked for a possible duplicate.</p>
+<p>The expiry facet in the use case above gives us an advantage in
+that we do not have to compare the incoming record with all the data to check if it is a duplicate.
+At the same time, all the
+incoming data need not be stored; just a day worth of data is adequate to address the above use case.</p>
+<p>Configuring the below parameters will solve the problem for this
+use case:</p>
+<ul>
+<li><strong><em>keyExpression</em></strong> - This is the dedup key for the incoming tuples (similar to the Basic Dedup use case). This can be any key which can uniquely identify a record. For log messages this can be a serial number attached in the log.</li>
+<li><strong><em>timeExpression</em></strong> - This is the key which can help identify the expired records, as explained above. In this particular use case, it can be a timestamp field which indicates when the log message was generated.</li>
+<li><strong><em>expireBefore</em></strong> - This is the period of expiry as explained above. In our example use case this will be 24 hour, specified in seconds.</li>
+</ul>
+<p>Configuration of the above parameters is sufficient to address this use
+case.</p>
+<h1 id="use-cases-summary">Use cases - Summary</h1>
+<ol>
+<li><strong>Basic Dedup</strong> - Deduplication of
+    bounded datasets. Data is assumed to be bounded. This use case is
+    not meant for never ending streams of data. For example:
+    Deduplication of master data like customer records, product catalogs
+    etc.</li>
+<li><strong>Time Based Dedup</strong> - Deduplication of
+    unlimited streams of data. This use case handles unbounded streams
+    of data and can run forever. An expiry key and criterion is expected
+    as part of the input which helps avoid storing all the unique data.
+    This helps speed up performance. Any timestamp field in the incoming
+    tuple can be used as a time based expiry key.<ul>
+<li><em>With respect to system time</em> - Time progresses with system time. Any expiry criteria are executed with the notion of system time. This is possible if the incoming tuple does not have a time field, or the user does not specify a <code>timeExpression</code>.</li>
+<li><em>With respect to tuple time</em> - Time progresses based on the time in the incoming tuples. Expiry criteria are executed with the notion of time indicated by the incoming tuple. Specification of the time field (<code>timeExpression</code>) is mandatory for this scenario.</li>
+</ul>
+</li>
+</ol>
+<h1 id="technical-architecture">Technical Architecture</h1>
+<h2 id="class-structure">Class Structure</h2>
+<p><img alt="" src="../images/deduper/image03.png" /></p>
+<hr />
+<h2 id="architectural-details">Architectural Details</h2>
+<p><img alt="" src="../images/deduper/image02.png" /></p>
+<h2 id="concepts">Concepts</h2>
+<h3 id="dedup-key-specified-by-keyexpression-parameter">Dedup Key - Specified by <em>keyExpression</em> parameter</h3>
+<p>A dedup key is a set of one or more fields in the data tuple which
+acts as the key for the tuples.
+This is used by the deduper to compare tuples to arrive at the
+conclusion on whether two tuples are duplicates. If Dedup Key of two
+tuples match, then they are duplicates, else they are unique.</p>
+<h3 id="expiry-key-specified-by-timeexpression-parameter">Expiry Key - Specified by <em>timeExpression</em> parameter</h3>
+<p>A tuple may or may not have an Expiry Key. Dedup operator cannot
+keep storing all the data that is flowing into the operator. At some
+point it becomes essential to discard some of the historical tuples in
+interest of memory and efficiency.</p>
+<p>At the same time, tuples are expected to arrive at the Dedup
+operator within some time after they are generated. After this time, the
+tuples may be considered as stale or obsolete.</p>
+<p>In such cases, the Deduper considers these tuples as
+expired and takes no action other than
+separating out these tuples on a different port in order to be processed
+by some other operator or stored offline for analysis.</p>
+<p>In order to create a criterion for discarding such tuples, we
+introduce an Expiry Key. Looking at the value of the Expiry Key in each
+tuple, we can decide whether or not to discard this tuple as
+expired.</p>
+<p>The expiry key that we consider in Time Based Dedup is
+time. This usually works with
+another parameter called Expiry Period defined next.</p>
+<h3 id="expiry-period_1">Expiry Period</h3>
+<p>The Expiry Period is the value supplied by the user which decides
+when a particular tuple expires.</p>
+<h3 id="time-points">Time Points</h3>
+<p>For every dataset that the deduper processes, a set of time points is maintained:</p>
+<ol>
+<li><em>Latest Point</em> - This is the maximum
+    time point observed in all the processed tuples.</li>
+<li><em>Expiry Point</em> - This is given by:
+    <code>Expiry Point = Latest Point - Expiry Period</code></li>
+</ol>
+<p>These points help the deduper to make decisions related to expiry
+of a tuple.</p>
+<h3 id="example-expiry">Example - Expiry</h3>
+<table>
+<thead>
+<tr>
+<th align="center">Tuple Id</th>
+<th align="center">Expiry Key (Expiry Period = 10)</th>
+<th align="center">Latest Point</th>
+<th align="center">Expiry Point</th>
+<th align="center">Decision for Tuple</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td align="center">1</td>
+<td align="center">10</td>
+<td align="center">10</td>
+<td align="center">1</td>
+<td align="center">Not Expired</td>
+</tr>
+<tr>
+<td align="center">2</td>
+<td align="center">20</td>
+<td align="center">20</td>
+<td align="center">11</td>
+<td align="center">Not Expired</td>
+</tr>
+<tr>
+<td align="center">3</td>
+<td align="center">25</td>
+<td align="center">25</td>
+<td align="center">16</td>
+<td align="center">Not Expired</td>
+</tr>
+<tr>
+<td align="center">4</td>
+<td align="center">40</td>
+<td align="center">40</td>
+<td align="center">31</td>
+<td align="center">Not Expired</td>
+</tr>
+<tr>
+<td align="center">5</td>
+<td align="center">21</td>
+<td align="center">40</td>
+<td align="center">31</td>
+<td align="center">Expired</td>
+</tr>
+<tr>
+<td align="center">6</td>
+<td align="center">35</td>
+<td align="center">40</td>
+<td align="center">31</td>
+<td align="center">Not Expired</td>
+</tr>
+<tr>
+<td align="center">7</td>
+<td align="center">45</td>
+<td align="center">45</td>
+<td align="center">36</td>
+<td align="center">Not Expired</td>
+</tr>
+<tr>
+<td align="center">8</td>
+<td align="center">57</td>
+<td align="center">57</td>
+<td align="center">48</td>
+<td align="center">Not Expired</td>
+</tr>
+</tbody>
+</table>
+<h3 id="time-buckets-a-component-of-managed-state">Time Buckets (A component of Managed State)</h3>
+<p>One of the requirements of the Deduper is to store all the unique
+tuples (actually, just the keys of tuples). Keeping an ever growing
+cache in memory is not scalable. So what we need is a limited cache
+backed by a persistent store. When data is requested to be fetched from managed
+state, it is also cached in an in-memory cache. Buckets help
+narrow down the search of duplicates for incoming tuples. A Bucket is an
+abstraction for a collection of tuples all of which share a common hash
+value based on some hash function or a range of time, for example: a
+bucket of data for 5 contiguous minutes. A Bucket has a span property called Bucket Span.</p>
+<h3 id="bucket-span">Bucket Span</h3>
+<p>Bucket span is simply the range of the domain
+that is covered by the Bucket. This span is specified in
+the domain of the Expiry key. If the Expiry
+Key is time,  then the Bucket span
+will be specified in seconds. It is
+only defined in case tuples have an Expiry Key.</p>
+<h3 id="number-of-buckets">Number of Buckets</h3>
+<p>The number of buckets can be given by - <code>Num Buckets = Expiry
+Period / Bucket Span</code></p>
+<p>This is because at any point of time, we need only store Expiry
+Period worth of data.</p>
+<h3 id="example-buckets">Example - Buckets</h3>
+<p><img alt="" src="../images/deduper/image01.png" /></p>
+<h2 id="assumptions">Assumptions</h2>
+<h3 id="assumption-1">Assumption 1 <a name="Assumption1"></a></h3>
+<p>This assumption is only applicable in case of Dedup with
+Expiry.</p>
+<p>For any two tuples, t1 and t2 having dedup keys d1 and d2, and
+expiry keys e1 and e2, respectively, the following holds:</p>
+<pre><code>If d1 = d2,
+  then e1 = e2
+</code></pre>
+
+<p>In other words, there may never
+be two tuples t1 and t2 such that:</p>
+<pre><code>Tuple 1: d1, e1
+Tuple 2: d2, e2
+d1 = d2 and e1 != e2
+</code></pre>
+
+<p>In other words, any two tuples with the same dedup key are assumed to have the
+same expiry key as well.
+This assumption was made with respect to certain use cases. These
+use cases follow this assumption in that the records which are
+duplicates are exactly identical. An example use case is when log
+messages are replayed erroneously, and we want to identify the duplicate
+log messages. In such cases, we need not worry about two different log
+messages having the same identifier but different timestamps. Since its
+a replay of the same data, the duplicate records are assumed to be
+exactly identical.</p>
+<p>In case the duplicate tuple has a different value for expiry key, the behavior of
+the deduper can be non-deterministic.</p>
+<h2 id="flow-of-a-tuple-through-dedup-operator">Flow of a Tuple through Dedup Operator</h2>
+<p>Tuples flow through the Dedup operator one by one. Deduper may process a tuple immediately, or store it in some data
+structure for later processing.</p>
+<p>When a tuple always arrives at the input
+port of the Dedup operator, it does
+the following tasks.</p>
+<h4 id="check-if-tuple-is-expired">Check if tuple is Expired</h4>
+<p>This is only done in case of Dedup with expiry. The
+following condition is used to check if the tuple is expired.</p>
+<pre><code>if ( Latest Point - Expiry Key &lt; Expiry Point )
+  then Expired
+</code></pre>
+
+<p>If the tuple is expired, then send it to the expired port.</p>
+<h4 id="check-if-tuple-is-a-duplicate-or-unique">Check if tuple is a Duplicate or Unique</h4>
+<p>Once a tuple passes the check of expiry, we proceed to check if
+the tuple is a duplicate of some earlier tuple. Note that
+if the tuple in question is not expired, the duplicate will also not
+have expired due to the assumption listed <a href="#Assumption1">here</a>.
+The Deduper queries the Managed state to fetch the value for the tuple key.
+This request is processed by the Managed state in a separate asynchronous thread.
+Once this request is submitted, the Deduper moves on to process other
+tuples. Additionally the Deduper also inserts the tuple being processed
+into a waiting events queue for later processing.</p>
+<h4 id="process-pending-tuples">Process pending tuples</h4>
+<p>Once the Deduper has looked at the all the tuples in the current window,
+it starts to process the tuples in the waiting queue to finalize the decision
+(unique or duplicate) for these tuples.
+Once the request to Managed state is completed for a tuple and the value is
+fetched from persistent storage, the Deduper can decide if the tuple in
+question is a duplicate or a unique.
+Depending on whether there is enough time left in the current window,
+it can do one of the following:</p>
+<ul>
+<li>Process only the tuples for which the managed state has completed processing.
+The tuples which are still being processed by managed state are skipped only to come back to them when it can no longer postpone it. This is typically done when the operator
+has idle time as there are no tuples on the input ports and the current window
+has still not ended.</li>
+<li>Block on them to complete their processing. This will happen when the current
+window has no time left, and the decision cannot be postponed. Note: An operator can end its window, only when all the tuples have been completely processed.  </li>
+</ul>
+<h1 id="ports-attributes-and-properties">Ports, Attributes and Properties</h1>
+<h2 id="ports">Ports</h2>
+<p>The deduper has a single input port and multiple output
+ports.</p>
+<ul>
+<li><strong><em>input</em></strong> - This is the input port through
+    which the tuples arrive at the Deduper.</li>
+<li><strong><em>unique</em></strong> - This is the output port on
+    which unique tuples are sent out by the Deduper.</li>
+<li><strong><em>duplicate</em></strong> - This is the output port on
+    which duplicate tuples are sent out by the Deduper.</li>
+<li><strong><em>expired</em></strong> - This is the output port on
+    which expired tuples are sent out by the Deduper.</li>
+</ul>
+<p>The user can choose which output ports to connect the down stream operators.
+All the output ports are optional and can be used as required by the use case.</p>
+<h2 id="attributes">Attributes</h2>
+<ul>
+<li><strong><em>Input port Attribute - input.TUPLE_CLASS</em></strong> - Class or the fully
+qualified class name.<ul>
+<li>Mandatory attribute</li>
+<li>Tells the operator about the type of the incoming
+tuple.</li>
+</ul>
+</li>
+</ul>
+<h2 id="properties">Properties</h2>
+<ul>
+<li>
+<p><strong><em>keyExpression</em></strong> - String</p>
+<ul>
+<li>Mandatory parameter.</li>
+<li>The java expression to extract the key fields in the incoming tuple (POJO)</li>
+</ul>
+</li>
+<li>
+<p><strong><em>timeExpression</em></strong> - String - (Time Based Deduper only)</p>
+<ul>
+<li>The java expression to extract the time field in the incoming tuple (POJO).</li>
+</ul>
+</li>
+<li>
+<p><strong><em>expireBefore</em></strong> - Long (Seconds) - (Time Based Deduper only)</p>
+<ul>
+<li>This is the total time period during which a tuple stays in the system and blocks any other tuple with the same key.</li>
+</ul>
+</li>
+<li>
+<p><strong><em>bucketSpan</em></strong> - Long (Seconds) - (Time Based Deduper only)</p>
+<ul>
+<li>Mandatory parameter</li>
+<li>This is the unit which describes how large a bucket can be. Typically this should be defined depending on the use case. For example, if we have expireBefore set to 1 hour, then typically we would be clubbing data in the order of minutes, so a <code>bucketSpan</code> of a few minutes would make sense. Note that in this case, the entire data worth the <code>bucketSpan</code> will expire as a whole. Setting it to 1 minute would make the number of time buckets in the system to be 1 hour  [...]
+<li>Note that having too many or too few buckets could have a performance impact. If unsure, set the bucketSpan to the square root of <code>expireBefore</code>. This way the number of buckets and bucket span are balanced.</li>
+</ul>
+</li>
+<li>
+<p><strong><em>referenceInstant</em></strong> -  Long (Seconds) - (Time Based Deduper only)</p>
+<ul>
+<li>The reference point from which to start the time which is use for expiry. Setting the referenceInstant to say, r seconds from the epoch, would initialize the start of expiry to be from that <code>instant = r</code>. The start and end of the expiry window periodically move by the span of a single bucket.</li>
+</ul>
+</li>
+<li>
+<p><strong><em>numBuckets</em></strong> -  Integer - (Bounded Deduper only)</p>
+<ul>
+<li>Optional parameter, but recommended to be provided by the user.</li>
+<li>This is the number of buckets that need to be used for storing the keys of the incoming tuples.</li>
+<li>Users can decide upon the proper value for this parameter by guessing the number of distinct keys in the application. A reasonable value is the square root of N, where N is the number of distinct keys. If omitted, the Java MAX_VALUE for integer is used for N.</li>
+</ul>
+</li>
+</ul>
+<h1 id="example">Example</h1>
+<p>Please refer to <a href="https://github.com/DataTorrent/examples/tree/master/tutorials/dedup">https://github.com/DataTorrent/examples/tree/master/tutorials/dedup</a> for
+an example on how to use Deduper.</p>
+<h1 id="partitioning">Partitioning</h1>
+<p>Deduper can be statically partitioned using the operator
+attribute: PARTITIONER</p>
+<p>Add the following property to the properties.xml file:</p>
+<pre><code>&lt;property&gt;
+        &lt;name&gt;dt.operator.{OperatorName}.attr.PARTITIONER&lt;/name&gt;
+        &lt;value&gt;com.datatorrent.common.partitioner.StatelessPartitioner:2&lt;/value&gt;
+&lt;/property&gt;
+</code></pre>
+
+<p>This will partition the Dedup operator into 2 static partitions. Change the number
+to the required number of partitions.</p>
+<p>Dynamic partitioning is currently not supported in the Deduper.</p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../enricher/" class="btn btn-neutral float-right" title="Enricher">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../csvParserOperator/" class="btn btn-neutral" title="CSV Parser"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../csvParserOperator/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../enricher/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/enricher/index.html b/content/docs/malhar-3.8/operators/enricher/index.html
new file mode 100644
index 0000000..09aa4ef
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/enricher/index.html
@@ -0,0 +1,641 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>Enricher - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "Enricher";
+    var mkdocs_page_input_path = "operators/enricher.md";
+    var mkdocs_page_url = "/operators/enricher/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">Enricher</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#pojo-enricher">POJO Enricher</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-objective">Operator Objective</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-usecase">Operator Usecase</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-information">Operator Information</a></li>
+                
+                    <li><a class="toctree-l4" href="#properties-attributes-and-ports">Properties, Attributes and Ports</a></li>
+                
+                    <li><a class="toctree-l4" href="#platform-attributes-that-influences-operator-behavior">Platform Attributes that influences operator behavior</a></li>
+                
+                    <li><a class="toctree-l4" href="#ports">Ports</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#limitations">Limitations</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#example">Example</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#advanced">Advanced</a></li>
+                
+                    <li><a class="toctree-l4" href="#caching-mechanism-in-pojoenricher">Caching mechanism in POJOEnricher</a></li>
+                
+                    <li><a class="toctree-l4" href="#partitioning-of-pojoenricher">Partitioning of POJOEnricher</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>Enricher</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="pojo-enricher">POJO Enricher</h1>
+<h2 id="operator-objective">Operator Objective</h2>
+<p>This operator receives an POJO (<a href="https://en.wikipedia.org/wiki/Plain_Old_Java_Object">Plain Old Java Object</a>) as an incoming tuple and uses an external source to enrich the data in 
+the incoming tuple and finally emits the enriched data as a new enriched POJO.</p>
+<p>POJOEnricher supports enrichment from following external sources:</p>
+<ol>
+<li><strong>JSON File Based</strong> - Reads the file in memory having content stored in JSON format and use that to enrich the data. This can be done using FSLoader implementation.</li>
+<li><strong>JDBC Based</strong> - Any JDBC store can act as an external entity to which enricher can request data for enriching incoming tuples. This can be done using JDBCLoader implementation.</li>
+</ol>
+<p>POJO Enricher does not hold any state and is <strong>idempotent</strong>, <strong>fault-tolerant</strong> and <strong>statically/dynamically partitionable</strong>.</p>
+<h2 id="operator-usecase">Operator Usecase</h2>
+<ol>
+<li>Bank <strong><em>transaction records</em></strong> usually contains customerId. For further analysis of transaction one wants the customer name and other customer related information. 
+Such information is present in another database. One could enrich the transaction's record with customer information using POJOEnricher.</li>
+<li><strong><em>Call Data Record (CDR)</em></strong> contains only mobile/telephone numbers of the customer. Customer information is missing in CDR. POJO Enricher can be used to enrich 
+CDR with customer data for further analysis.</li>
+</ol>
+<h2 id="operator-information">Operator Information</h2>
+<ol>
+<li>Operator location: <strong><em>malhar-contrib</em></strong></li>
+<li>Available since: <strong><em>3.4.0</em></strong></li>
+<li>Operator state: <strong><em>Evolving</em></strong></li>
+<li>Java Packages:<ul>
+<li>Operator: <strong><em><a href="https://www.datatorrent.com/docs/apidocs/com/datatorrent/contrib/enrich/POJOEnricher.html">com.datatorrent.contrib.enrich.POJOEnricher</a></em></strong></li>
+<li>FSLoader: <strong><em><a href="https://www.datatorrent.com/docs/apidocs/com/datatorrent/contrib/enrich/FSLoader.html">com.datatorrent.contrib.enrich.FSLoader</a></em></strong></li>
+<li>JDBCLoader: <strong><em><a href="https://www.datatorrent.com/docs/apidocs/com/datatorrent/contrib/enrich/JDBCLoader.html">com.datatorrent.contrib.enrich.JDBCLoader</a></em></strong></li>
+</ul>
+</li>
+</ol>
+<h2 id="properties-attributes-and-ports">Properties, Attributes and Ports</h2>
+<h3 id="properties-of-pojoenricher"><a name="props"></a>Properties of POJOEnricher</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>includeFields</em></td>
+<td>List of fields from database that needs to be added to output POJO.</td>
+<td>List&lt;String></td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>lookupFields</em></td>
+<td>List of fields from input POJO which will form a <em>unique composite</em> key for querying to store</td>
+<td>List&lt;String></td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>store</em></td>
+<td>Backend Store from which data should be queried for enrichment</td>
+<td><a href="#backendStore">BackendStore</a></td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>cacheExpirationInterval</em></td>
+<td>Cache entry expiry in ms. After this time, the lookup to store will be done again for given key</td>
+<td>int</td>
+<td>No</td>
+<td>1 * 60 * 60 * 1000 (1 hour)</td>
+</tr>
+<tr>
+<td><em>cacheCleanupInterval</em></td>
+<td>Interval in ms after which cache will be removed for any stale entries.</td>
+<td>int</td>
+<td>No</td>
+<td>1 * 60 * 60 * 1000 (1 hour)</td>
+</tr>
+<tr>
+<td><em>cacheSize</em></td>
+<td>Number of entry in cache after which eviction will start on each addition based on LRU</td>
+<td>int</td>
+<td>No</td>
+<td>1000</td>
+</tr>
+</tbody>
+</table>
+<h4 id="properties-of-fsloader-backendstore"><a name="backendStore"></a>Properties of FSLoader (BackendStore)</h4>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>fileName</em></td>
+<td>Path of the file, the data from which will be used for enrichment. See <a href="#JSONFileFormat">here</a> for JSON File format.</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+</tbody>
+</table>
+<h4 id="properties-of-jdbcloader-backendstore">Properties of JDBCLoader (BackendStore)</h4>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>databaseUrl</em></td>
+<td>Connection string for connecting to JDBC</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>databaseDriver</em></td>
+<td>JDBC Driver class for connection to JDBC Store. This driver should be there in classpath</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>tableName</em></td>
+<td>Name of the table from which data needs to be retrieved</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>connectionProperties</em></td>
+<td>Command seperated list of advanced connection properties that need to be passed to JDBC Driver. For eg. <em>prop1:val1,prop2:val2</em></td>
+<td>String</td>
+<td>No</td>
+<td>null</td>
+</tr>
+<tr>
+<td><em>queryStmt</em></td>
+<td>Select statement which will be used to query the data. This is optional parameter in case of advanced query.</td>
+<td>String</td>
+<td>No</td>
+<td>null</td>
+</tr>
+</tbody>
+</table>
+<h3 id="platform-attributes-that-influences-operator-behavior">Platform Attributes that influences operator behavior</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Attribute</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>input.TUPLE_CLASS</em></td>
+<td>TUPLE_CLASS attribute on input port which tells operator the class of POJO which will be incoming</td>
+<td>Class or FQCN</td>
+<td>Yes</td>
+</tr>
+<tr>
+<td><em>output.TUPLE_CLASS</em></td>
+<td>TUPLE_CLASS attribute on output port which tells operator the class of POJO which need to be emitted</td>
+<td>Class or FQCN</td>
+<td>Yes</td>
+</tr>
+</tbody>
+</table>
+<h3 id="ports">Ports</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Port</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>input</em></td>
+<td>Tuple which needs to be enriched are received on this port</td>
+<td>Object (POJO)</td>
+<td>Yes</td>
+</tr>
+<tr>
+<td><em>output</em></td>
+<td>Tuples that are enriched from external source are emitted from on this port</td>
+<td>Object (POJO)</td>
+<td>No</td>
+</tr>
+</tbody>
+</table>
+<h2 id="limitations">Limitations</h2>
+<p>Current POJOEnricher contains following limitation:</p>
+<ol>
+<li>FSLoader loads the file content in memory. Though it loads only the composite key and composite value in memory, a very large amount of data would bloat the memory and make the operator go OOM. In case the filesize is large, allocate sufficient memory to the POJOEnricher.</li>
+<li>Incoming POJO should be a subset of outgoing POJO.</li>
+<li><a href="#props">includeFields</a> property should contains fields having same name in database column as well as outgoing POJO. For eg. If name of the database column is "customerName", then outgoing POJO should contains a field with the same name and same should be added to includeFields.</li>
+<li><a href="#props">lookupFields</a> property should contains fields having same name in database column as well as incoming POJO. For eg. If name of the database column is "customerId", then incoming POJO should contains a field with the same name and same should be added to lookupFields.</li>
+</ol>
+<h2 id="example">Example</h2>
+<p>Example for POJOEnricher can be found at: <a href="https://github.com/DataTorrent/examples/tree/master/tutorials/enricher">https://github.com/DataTorrent/examples/tree/master/tutorials/enricher</a></p>
+<h2 id="advanced">Advanced</h2>
+<h3 id="file-format-for-json-based-fsloader"><a name="JSONFileFormat"></a> File format for JSON based FSLoader</h3>
+<p>FSLoader expects file to be in specific format:</p>
+<ol>
+<li>Each line makes on record which becomes part of the store</li>
+<li>Each line is a valid JSON Object where <em>key</em> is name of the field name and <em>value</em> is the field value.</li>
+</ol>
+<p>Example for the format look like following:</p>
+<pre><code class="json">{&quot;circleId&quot;:0, &quot;circleName&quot;:&quot;A&quot;}
+{&quot;circleId&quot;:1, &quot;circleName&quot;:&quot;B&quot;}
+{&quot;circleId&quot;:2, &quot;circleName&quot;:&quot;C&quot;}
+{&quot;circleId&quot;:3, &quot;circleName&quot;:&quot;D&quot;}
+{&quot;circleId&quot;:4, &quot;circleName&quot;:&quot;E&quot;}
+{&quot;circleId&quot;:5, &quot;circleName&quot;:&quot;F&quot;}
+{&quot;circleId&quot;:6, &quot;circleName&quot;:&quot;G&quot;}
+{&quot;circleId&quot;:7, &quot;circleName&quot;:&quot;H&quot;}
+{&quot;circleId&quot;:8, &quot;circleName&quot;:&quot;I&quot;}
+{&quot;circleId&quot;:9, &quot;circleName&quot;:&quot;J&quot;}
+</code></pre>
+
+<h3 id="caching-mechanism-in-pojoenricher">Caching mechanism in POJOEnricher</h3>
+<p>POJOEnricher contains an cache which makes the lookup for keys more efficient. This is specially useful when data in external store is not changing much. 
+However, one should carefully tune the <a href="#props">cacheExpirationInterval</a> property for desirable results.</p>
+<p>On every incoming tuple, POJOEnricher first queries the cache. If the cache contains desired record and is within expiration interval, then it uses that to
+enrich the tuple, otherwise does a lookup to configured store and the return value is used to enrich the tuple. The return value is then cached for composite key and composite value.</p>
+<p>POJOEnricher only caches the required fields for enrichment mechanism and not all fields returned by external store. This ensures optimal use of memory.</p>
+<h3 id="partitioning-of-pojoenricher">Partitioning of POJOEnricher</h3>
+<p>Being stateless operator, POJOEnricher will ensure built-in partitioners present in Malhar library can be directly simply by setting few properties as follows:</p>
+<h4 id="stateless-partioning-of-pojoenricher">Stateless partioning of POJOEnricher</h4>
+<p>Stateless partitioning will ensure that POJOEnricher will will be partitioned right at the starting of the application and will remain partitioned throughout the lifetime of the DAG.
+POJOEnricher can be stateless partitioned by adding following lines to properties.xml:</p>
+<pre><code class="xml">  &lt;property&gt;
+    &lt;name&gt;dt.operator.{OperatorName}.attr.PARTITIONER&lt;/name&gt;
+    &lt;value&gt;com.datatorrent.common.partitioner.StatelessPartitioner:2&lt;/value&gt;
+  &lt;/property&gt;
+</code></pre>
+
+<p>where {OperatorName} is the name of the POJOEnricher operator.
+Above lines will partition POJOEnricher statically 2 times. Above value can be changed accordingly to change the number of static partitions.</p>
+<h4 id="dynamic-partitioning-of-pojoenricher">Dynamic Partitioning of POJOEnricher</h4>
+<p>Dynamic partitioning is a feature of Apex platform which changes the partition of the operator based on certain condition.
+POJOEnricher can be dynamically partitioned using 2 out-of-the-box partitioners:</p>
+<h5 id="throughput-based">Throughput based</h5>
+<p>Following code can be added to populateDAG method of application to dynamically partitioning POJOEnricher:</p>
+<pre><code class="java">    StatelessThroughputBasedPartitioner&lt;POJOEnricher&gt; partitioner = new StatelessThroughputBasedPartitioner&lt;&gt;();
+    partitioner.setCooldownMillis(conf.getLong(COOL_DOWN_MILLIS, 10000));
+    partitioner.setMaximumEvents(conf.getLong(MAX_THROUGHPUT, 30000));
+    partitioner.setMinimumEvents(conf.getLong(MIN_THROUGHPUT, 10000));
+    dag.setAttribute(pojoEnricherObj, OperatorContext.STATS_LISTENERS, Arrays.asList(new StatsListener[]{partitioner}));
+    dag.setAttribute(pojoEnricherObj, OperatorContext.PARTITIONER, partitioner);
+</code></pre>
+
+<p>Above code will dynamically partition POJOEnricher when the throughput changes.
+If the overall throughput of POJOEnricher goes beyond 30000 or less than 10000, the platform will repartition POJOEnricher 
+to balance throughput of a single partition to be between 10000 and 30000.
+CooldownMillis of 10000 will be used as the threshold time for which the throughout change is observed.</p>
+<h5 id="latency-based">Latency based</h5>
+<p>Following code can be added to populateDAG method of application to dynamically partitioning POJOEnricher:</p>
+<pre><code class="java">    StatelessLatencyBasedPartitioner&lt;POJOEnricher&gt; partitioner = new StatelessLatencyBasedPartitioner&lt;&gt;();
+    partitioner.setCooldownMillis(conf.getLong(COOL_DOWN_MILLIS, 10000));
+    partitioner.setMaximumLatency(conf.getLong(MAX_THROUGHPUT, 10));
+    partitioner.setMinimumLatency(conf.getLong(MIN_THROUGHPUT, 3));
+    dag.setAttribute(pojoEnricherObj, OperatorContext.STATS_LISTENERS, Arrays.asList(new StatsListener[]{partitioner}));
+    dag.setAttribute(pojoEnricherObj, OperatorContext.PARTITIONER, partitioner);
+</code></pre>
+
+<p>Above code will dynamically partition POJOEnricher when the overall latency of POJOEnricher changes.
+If the overall latency of POJOEnricher goes beyond 10 ms or less than 3 ms, the platform will repartition POJOEnricher 
+to balance latency of a single partition to be between 3 ms and 10 ms.
+CooldownMillis of 10000 will be used as the threshold time for which the latency change is observed.</p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../fsInputOperator/" class="btn btn-neutral float-right" title="File Input">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../deduper/" class="btn btn-neutral" title="Deduper"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../deduper/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../fsInputOperator/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/file_output/index.html b/content/docs/malhar-3.8/operators/file_output/index.html
new file mode 100644
index 0000000..12837ed
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/file_output/index.html
@@ -0,0 +1,476 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>File Output - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "File Output";
+    var mkdocs_page_input_path = "operators/file_output.md";
+    var mkdocs_page_url = "/operators/file_output/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">File Output</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#abstractfileoutputoperator">AbstractFileOutputOperator</a></li>
+                
+                    <li><a class="toctree-l4" href="#persisting-data-to-files">Persisting data to files</a></li>
+                
+                    <li><a class="toctree-l4" href="#automatic-rotation">Automatic rotation</a></li>
+                
+                    <li><a class="toctree-l4" href="#fault-tolerance">Fault-tolerance</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>File Output</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="abstractfileoutputoperator">AbstractFileOutputOperator</h1>
+<p>The abstract file output operator in Apache Apex Malhar library &mdash; <a href="https://github.com/apache/incubator-apex-malhar/blob/master/library/src/main/java/com/datatorrent/lib/io/fs/AbstractFileOutputOperator.java"><code>AbstractFileOutputOperator</code></a> writes streaming data to files. The main features of this operator are:</p>
+<ol>
+<li>Persisting data to files.</li>
+<li>Automatic rotation of files based on:<br />
+  a. maximum length of a file.<br />
+  b. time-based rotation where time is specified using a count of application windows.</li>
+<li>Fault-tolerance.</li>
+<li>Compression and encryption of data before it is persisted.</li>
+</ol>
+<p>In this tutorial we will cover the details of the basic structure and implementation of all the above features in <code>AbstractFileOutputOperator</code>. Configuration items related to each feature are discussed as they are introduced in the section of that feature.</p>
+<h2 id="persisting-data-to-files">Persisting data to files</h2>
+<p>The principal function of this operator is to persist tuples to files efficiently. These files are created under a specific directory on the file system. The relevant configuration item is:</p>
+<p><strong>filePath</strong>: path specifying the directory where files are written.</p>
+<p>Different types of file system that are implementations of <code>org.apache.hadoop.fs.FileSystem</code> are supported. The file system instance which is used for creating streams is constructed from the <code>filePath</code> URI.</p>
+<pre><code class="java">FileSystem.newInstance(new Path(filePath).toUri(), new Configuration())
+</code></pre>
+
+<p>Tuples may belong to different files therefore expensive IO operations like creating multiple output streams, flushing of data to disk, and closing streams are handled carefully.</p>
+<h3 id="ports">Ports</h3>
+<ul>
+<li><code>input</code>: the input port on which tuples to be persisted are received.</li>
+</ul>
+<h3 id="streamscache"><code>streamsCache</code></h3>
+<p>This transient state caches output streams per file in memory. The file to which the data is appended may change with incoming tuples. It will be highly inefficient to keep re-opening streams for a file just because tuples for that file are interleaved with tuples for another file. Therefore, the operator maintains a cache of limited size with open output streams.</p>
+<p><code>streamsCache</code> is of type <code>com.google.common.cache.LoadingCache</code>. A <code>LoadingCache</code> has an attached <code>CacheLoader</code> which is responsible to load value of a key when the key is not present in the cache. Details are explained here- <a href="https://github.com/google/guava/wiki/CachesExplained">CachesExplained</a>.</p>
+<p>The operator constructs this cache in <code>setup(...)</code>. It is built with the following configuration items:</p>
+<ul>
+<li><strong>maxOpenFiles</strong>: maximum size of the cache. The cache evicts entries that haven't been used recently when the cache size is approaching this limit. <em>Default</em>: 100</li>
+<li><strong>expireStreamAfterAcessMillis</strong>: expires streams after the specified duration has passed since the stream was last accessed. <em>Default</em>: value of attribute- <code>OperatorContext.SPIN_MILLIS</code>.</li>
+</ul>
+<p>An important point to note here is that the guava cache does not perform cleanup and evict values asynchronously, that is, instantly after a value expires. Instead, it performs small amounts of maintenance during write operations, or during occasional read operations if writes are rare.</p>
+<h4 id="cacheloader">CacheLoader</h4>
+<p><code>streamsCache</code> is created with a <code>CacheLoader</code> that opens an <code>FSDataOutputStream</code> for a file which is not in the cache. The output stream is opened in either <code>append</code> or <code>create</code> mode and the basic logic to determine this is explained by the simple diagram below.</p>
+<p><img alt="Opening an output stream" src="../images/fileoutput/diagram1.png" /></p>
+<p>This process gets complicated when fault-tolerance (writing to temporary files)  and rotation is added.</p>
+<p>Following are few configuration items used for opening the streams:</p>
+<ul>
+<li><strong>replication</strong>: specifies the replication factor of the output files. <em>Default</em>: <code>fs.getDefaultReplication(new Path(filePath))</code></li>
+<li><strong>filePermission</strong>: specifies the permission of the output files. The permission is an octal number similar to that used by the Unix chmod command. <em>Default</em>: 0777</li>
+</ul>
+<h4 id="removallistener">RemovalListener</h4>
+<p>A <code>Guava</code> cache also allows specification of removal listener which can perform some operation when an entry is removed from the cache. Since <code>streamsCache</code> is of limited size and also has time-based expiry enabled, it is imperative that when a stream is evicted from the cache it is closed properly. Therefore, we attach a removal listener to <code>streamsCache</code> which closes the stream when it is evicted.</p>
+<h3 id="setupoperatorcontext-context"><code>setup(OperatorContext context)</code></h3>
+<p>During setup the following main tasks are performed:</p>
+<ol>
+<li>FileSystem instance is created.</li>
+<li>The cache of streams is created.</li>
+<li>Files are recovered (see Fault-tolerance section).</li>
+<li>Stray part files are cleaned (see Automatic rotation section).</li>
+</ol>
+<h3 id="processtupleinput-tuple"><a name="processTuple"></a><code>processTuple(INPUT tuple)</code></h3>
+<p>The code snippet below highlights the basic steps of processing a tuple.</p>
+<pre><code class="java">protected void processTuple(INPUT tuple)
+{  
+  //which file to write to is derived from the tuple.
+  String fileName = getFileName(tuple);  
+
+  //streamsCache is queried for the output stream. If the stream is already opened then it is returned immediately otherwise the cache loader creates one.
+  FilterOutputStream fsOutput = streamsCache.get(fileName).getFilterStream();
+
+  byte[] tupleBytes = getBytesForTuple(tuple);
+
+  fsOutput.write(tupleBytes);
+}
+</code></pre>
+
+<h3 id="endwindow"><a name="endWindow"></a>endWindow()</h3>
+<p>It should be noted that while processing a tuple we do not flush the stream after every write. Since flushing is expensive it is done periodically for all the open streams in the operator's <code>endWindow()</code>.</p>
+<pre><code class="java">Map&lt;String, FSFilterStreamContext&gt; openStreams = streamsCache.asMap();
+for (FSFilterStreamContext streamContext: openStreams.values()) {
+  ...
+  //this flushes the stream
+  streamContext.finalizeContext();
+  ...
+}
+</code></pre>
+
+<p><code>FSFilterStreamContext</code> will be explained with compression and encryption.</p>
+<h3 id="teardown"><a name="teardown"></a>teardown()</h3>
+<p>When any operator in a DAG fails then the application master invokes <code>teardown()</code> for that operator and its downstream operators. In <code>AbstractFileOutputOperator</code> we have a bunch of open streams in the cache and the operator (acting as HDFS client) holds leases for all the corresponding files. It is important to release these leases for clean re-deployment. Therefore, we try to close all the open streams in <code>teardown()</code>.</p>
+<h2 id="automatic-rotation">Automatic rotation</h2>
+<p>In a streaming application where data is being continuously processed, when this output operator is used, data will be continuously written to an output file. The users may want to be able to take the data from time to time to use it, copy it out of Hadoop or do some other processing. Having all the data in a single file makes it difficult as the user needs to keep track of how much data has been read from the file each time so that the same data is not read again. Also users may alre [...]
+<p>To help solve these problems the operator supports creating many smaller files instead of writing to just one big file. Data is written to a file and when some condition is met the file is finalized and data is written to a new file. This is called file rotation. The user can determine when the file gets rotated. Each of these files is called a part file as they contain portion of the data.</p>
+<h3 id="part-filename">Part filename</h3>
+<p>The filename for a part file is formed by using the original file name and the part number. The part number starts from 0 and is incremented each time a new part file created. The default filename has the format, assuming origfile represents the original filename and partnum represents the part number,</p>
+<p><code>origfile.partnum</code></p>
+<p>This naming scheme can be changed by the user. It can be done so by overriding the following method</p>
+<pre><code class="java">protected String getPartFileName(String fileName, int part)
+</code></pre>
+
+<p>This method is passed the original filename and part number as arguments and should return the part filename.</p>
+<h3 id="mechanisms">Mechanisms</h3>
+<p>The user has a couple of ways to specify when a file gets rotated. First is based on size and second on time. In the first case the files are limited by size and in the second they are rotated by time.</p>
+<h4 id="size-based">Size Based</h4>
+<p>With size based rotation the user specifies a size limit. Once the size of the currently file reaches this limit the file is rotated. The size limit can be specified by setting the following property</p>
+<p><code>maxLength</code></p>
+<p>Like any other property this can be set in Java application code or in the property file.</p>
+<h4 id="time-based">Time Based</h4>
+<p>In time based rotation user specifies a time interval. This interval is specified as number of application windows. The files are rotated periodically once the specified number of application windows have elapsed. Since the interval is application window based it is not always exactly constant time. The interval can be specified using the following property</p>
+<p><code>rotationWindows</code></p>
+<h3 id="setupoperatorcontext-context_1"><code>setup(OperatorContext context)</code></h3>
+<p>When an operator is being started there may be stray part files and they need to be cleaned up. One common scenario, when these could be present, is in the case of failure, where a node running the operator failed and a previous instance of the operator was killed. This cleanup and other initial processing for the part files happens in the operator setup. The following diagram describes this process</p>
+<p><img alt="Rotation setup" src="../images/fileoutput/FileRotation.png" /></p>
+<h2 id="fault-tolerance">Fault-tolerance</h2>
+<p>There are two issues that should be addressed in order to make the operator fault-tolerant:</p>
+<ol>
+<li>
+<p>The operator flushes data to the filesystem every application window. This implies that after a failure when the operator is re-deployed and tuples of a window are replayed, then duplicate data will be saved to the files. This is handled by recording how much the operator has written to each file every window in a state that is checkpointed and truncating files back to the recovery checkpoint after re-deployment.</p>
+</li>
+<li>
+<p>While writing to HDFS, if the operator gets killed and didn't have the opportunity to close a file, then later when it is redeployed it will attempt to truncate/restore that file. Restoring a file may fail because the lease that the previous process (operator instance before failure) had acquired from namenode to write to a file may still linger and therefore there can be exceptions in acquiring the lease again by the new process (operator instance after failure). This is handled by a [...]
+</li>
+<li><strong>alwaysWriteToTmp</strong>: enables/disables writing to a temporary file. <em>Default</em>: true.</li>
+</ol>
+<p>Most of the complexity in the code comes from making this operator fault-tolerant.</p>
+<h3 id="checkpointed-states-needed-for-fault-tolerance">Checkpointed states needed for fault-tolerance</h3>
+<ul>
+<li>
+<p><code>endOffsets</code>: contains the size of each file as it is being updated by the operator. It helps the operator to restore a file during recovery in operator <code>setup(...)</code> and is also used while loading a stream to find out if the operator has seen a file before.</p>
+</li>
+<li>
+<p><code>fileNameToTmpName</code>: contains the name of the temporary file per actual file. It is needed because the name of a temporary file is random. They are named based on the timestamp when the stream is created. During recovery the operator needs to know the temp file which it was writing to and if it needs restoration then it creates a new temp file and updates this mapping.</p>
+</li>
+<li>
+<p><code>finalizedFiles</code>: contains set of files which were requested to be finalized per window id.</p>
+</li>
+<li>
+<p><code>finalizedPart</code>: contains the latest <code>part</code> of each file which was requested to be finalized.</p>
+</li>
+</ul>
+<p>The use of <code>finalizedFiles</code> and <code>finalizedPart</code> are explained in detail under <a href="#requestFinalize"><code>requestFinalize(...)</code></a> method.</p>
+<h3 id="recovering-files">Recovering files</h3>
+<p>When the operator is re-deployed, it checks in its <code>setup(...)</code> method if the state of a file which it has seen before the failure is consistent with the file's state on the file system, that is, the size of the file on the file system should match the size in the <code>endOffsets</code>. When it doesn't the operator truncates the file.</p>
+<p>For example, let's say the operator wrote 100 bytes to test1.txt by the end of window 10. It wrote another 20 bytes by the end of window 12 but failed in window 13. When the operator gets re-deployed it is restored with window 10 (recovery checkpoint) state. In the previous run, by the end of window 10, the size of file on the filesystem was 100 bytes but now it is 120 bytes. Tuples for windows 11 and 12 are going to be replayed. Therefore, in order to avoid writing duplicates to test [...]
+<h3 id="requestfinalizestring-filename"><a name="requestFinalize"></a><code>requestFinalize(String fileName)</code></h3>
+<p>When the operator is always writing to temporary files (in order to avoid HDFS Lease exceptions), then it is necessary to rename the temporary files to the actual files once it has been determined that the files are closed. This is refered to as <em>finalization</em> of files and the method allows the user code to specify when a file is ready for finalization.</p>
+<p>In this method, the requested file (or in the case of rotation &mdash; all the file parts including the latest open part which have not yet been requested for finalization) are registered for finalization. Registration is basically adding the file names to <code>finalizedFiles</code> state and updating <code>finalizedPart</code>.</p>
+<p>The process of <em>finalization</em> of all the files which were requested till the window <em>w</em> is deferred till window <em>w</em> is committed. This is because until a window is committed it can be replayed after a failure which means that a file can be open for writing even after it was requested for finalization.</p>
+<p>When rotation is enabled, part files as and when they get completed are requested for finalization. However, when rotation is not enabled user code needs to invoke this method as the knowledge that when a file is closed is unknown to this abstract operator.</p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../file_splitter/" class="btn btn-neutral float-right" title="File Splitter">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../fsInputOperator/" class="btn btn-neutral" title="File Input"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../fsInputOperator/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../file_splitter/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/file_splitter/index.html b/content/docs/malhar-3.8/operators/file_splitter/index.html
new file mode 100644
index 0000000..b83f23a
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/file_splitter/index.html
@@ -0,0 +1,493 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>File Splitter - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "File Splitter";
+    var mkdocs_page_input_path = "operators/file_splitter.md";
+    var mkdocs_page_url = "/operators/file_splitter/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">File Splitter</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#file-splitter">File Splitter</a></li>
+                
+                    <li><a class="toctree-l4" href="#why-is-it-needed">Why is it needed?</a></li>
+                
+                    <li><a class="toctree-l4" href="#class-diagram">Class Diagram</a></li>
+                
+                    <li><a class="toctree-l4" href="#abstractfilesplitter">AbstractFileSplitter</a></li>
+                
+                    <li><a class="toctree-l4" href="#filesplitterbase">FileSplitterBase</a></li>
+                
+                    <li><a class="toctree-l4" href="#filesplitterinput">FileSplitterInput</a></li>
+                
+                    <li><a class="toctree-l4" href="#handling-of-split-records">Handling of split records</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>File Splitter</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="file-splitter">File Splitter</h1>
+<p>This is a simple operator whose main function is to split a file virtually and create metadata describing the files and the splits. </p>
+<h2 id="why-is-it-needed">Why is it needed?</h2>
+<p>It is a common operation to read a file and parse it. This operation can be parallelized by having multiple partitions of such operators and each partition operating on different files. However, at times when a file is large then a single partition reading it can become a bottleneck.
+In these cases, throughput can be increased if instances of the partitioned operator can read and parse non-overlapping sets of file blocks. This is where file splitter comes in handy. It creates metadata of blocks of file which serves as tasks handed out to downstream operator partitions. 
+The downstream partitions can read/parse the block without the need of interacting with other partitions.</p>
+<h2 id="class-diagram">Class Diagram</h2>
+<p><img alt="FileSplitter class dierarchy" src="../images/filesplitter/classdiagram.png" /></p>
+<h2 id="abstractfilesplitter">AbstractFileSplitter</h2>
+<p>The abstract implementation defines the logic of processing <code>FileInfo</code>. This comprises the following tasks -  </p>
+<ul>
+<li>
+<p>building <code>FileMetadata</code> per file and emitting it. This metadata contains the file information such as filepath, no. of blocks in it, length of the file, all the block ids, etc.</p>
+</li>
+<li>
+<p>creating <code>BlockMetadataIterator</code> from <code>FileMetadata</code>. The iterator lazy-loads the block metadata when needed. We use an iterator because the no. of blocks in a file can be huge if the block size is small and loading all of them at once in memory may cause out of memory errors.</p>
+</li>
+<li>
+<p>retrieving <code>BlockMetadata.FileBlockMetadata</code> from the block metadata iterator and emitting it. The FileBlockMetadata contains the block id, start offset of the block, length of file in the block, etc. The number of block metadata emitted per window are controlled by <code>blocksThreshold</code> setting which by default is 1.  </p>
+</li>
+</ul>
+<p>The main utility method that performs all the above tasks is the <a href="#process_method"><code>process()</code></a> method. Concrete implementations can invoke this method whenever they have data to process.</p>
+<h3 id="ports">Ports</h3>
+<p>Declares only output ports on which file metadata and block metadata are emitted.</p>
+<ul>
+<li>filesMetadataOutput: metadata for each file is emitted on this port. </li>
+<li>blocksMetadataOutput: metadata for each block is emitted on this port. </li>
+</ul>
+<h3 id="process-method"><a name="process_method"></a><code>process()</code> method</h3>
+<p>When process() is invoked, any pending blocks from the current file are emitted on the 'blocksMetadataOutput' port. If the threshold for blocks per window is still not met then a new input file is processed - corresponding metadata is emitted on 'filesMetadataOutput' and more of its blocks are emitted. This operation is repeated until the <code>blocksThreshold</code> is reached or there are no more new files.</p>
+<pre><code class="java">  protected void process()
+  {
+    if (blockMetadataIterator != null &amp;&amp; blockCount &lt; blocksThreshold) {
+      emitBlockMetadata();
+    }
+
+    FileInfo fileInfo;
+    while (blockCount &lt; blocksThreshold &amp;&amp; (fileInfo = getFileInfo()) != null) {
+      if (!processFileInfo(fileInfo)) {
+        break;
+      }
+    }
+  }
+</code></pre>
+
+<h3 id="abstract-methods">Abstract methods</h3>
+<ul>
+<li>
+<p><code>FileInfo getFileInfo()</code>: called from within the <code>process()</code> and provides the next file to process.</p>
+</li>
+<li>
+<p><code>long getDefaultBlockSize()</code>: provides the block size which is used when user hasn't configured the size.</p>
+</li>
+<li>
+<p><code>FileStatus getFileStatus(Path path)</code>: provides the <code>org.apache.hadoop.fs.FileStatus</code> instance for a path.   </p>
+</li>
+</ul>
+<h3 id="configuration">Configuration</h3>
+<ol>
+<li><strong>blockSize</strong>: size of a block.</li>
+<li><strong>blocksThreshold</strong><a name="blocksThreshold"></a>: threshold on the number of blocks emitted by file splitter every window. This setting is used for throttling the work for downstream operators.</li>
+</ol>
+<h2 id="filesplitterbase">FileSplitterBase</h2>
+<p>Simple operator that receives tuples of type <code>FileInfo</code> on its <code>input</code> port. <code>FileInfo</code> contains the information (currently just the file path) about the file which this operator uses to create file metadata and block metadata.</p>
+<h3 id="example-application">Example application</h3>
+<p>This is a simple sub-dag that demonstrates how FileSplitterBase can be plugged into an application.
+<img alt="Application with FileSplitterBase" src="../images/filesplitter/baseexample.png" /></p>
+<p>The upstream operator emits tuples of type <code>FileInfo</code> on its output port which is connected to splitter input port. The downstream receives tuples of type <code>BlockMetadata.FileBlockMetadata</code> from the splitter's block metadata output port.</p>
+<pre><code class="java">public class ApplicationWithBaseSplitter implements StreamingApplication
+{
+  @Override
+  public void populateDAG(DAG dag, Configuration configuration)
+  {
+    JMSInput input = dag.addOperator(&quot;Input&quot;, new JMSInput());
+    FileSplitterBase splitter = dag.addOperator(&quot;Splitter&quot;, new FileSplitterBase());
+    FSSliceReader blockReader = dag.addOperator(&quot;BlockReader&quot;, new FSSliceReader());
+    ...
+    dag.addStream(&quot;file-info&quot;, input.output, splitter.input);
+    dag.addStream(&quot;block-metadata&quot;, splitter.blocksMetadataOutput, blockReader.blocksMetadataInput);
+    ...
+  }
+
+  public static class JMSInput extends AbstractJMSInputOperator&lt;AbstractFileSplitter.FileInfo&gt;
+  {
+
+    public final transient DefaultOutputPort&lt;AbstractFileSplitter.FileInfo&gt; output = new DefaultOutputPort&lt;&gt;();
+
+    @Override
+    protected AbstractFileSplitter.FileInfo convert(Message message) throws JMSException
+    {
+      //assuming the message is a text message containing the absolute path of the file.
+      return new AbstractFileSplitter.FileInfo(null, ((TextMessage)message).getText());
+    }
+
+    @Override
+    protected void emit(AbstractFileSplitter.FileInfo payload)
+    {
+      output.emit(payload);
+    }
+  }
+}
+</code></pre>
+
+<h3 id="ports_1">Ports</h3>
+<p>Declares an input port on which it receives tuples from the upstream operator. Output ports are inherited from AbstractFileSplitter.</p>
+<ul>
+<li>input: non optional port on which tuples of type <code>FileInfo</code> are received.</li>
+</ul>
+<h3 id="configuration_1">Configuration</h3>
+<ol>
+<li><strong>file</strong>: path of the file from which the filesystem is inferred. FileSplitter creates an instance of <code>org.apache.hadoop.fs.FileSystem</code> which is why this path is needed.  </li>
+</ol>
+<pre><code>FileSystem.newInstance(new Path(file).toUri(), new Configuration());
+</code></pre>
+
+<p>The fs instance is then used to fetch the default block size and <code>org.apache.hadoop.fs.FileStatus</code> for each file path.</p>
+<h2 id="filesplitterinput">FileSplitterInput</h2>
+<p>This is an input operator that discovers files itself. The scanning of the directories for new files is asynchronous which is handled by <code>TimeBasedDirectoryScanner</code>. The function of TimeBasedDirectoryScanner is to periodically scan specified directories and find files which were newly added or modified. The interaction between the operator and the scanner is depicted in the diagram below.</p>
+<p><img alt="Interaction between operator and scanner" src="../images/filesplitter/sequence.png" /></p>
+<h3 id="example-application_1">Example application</h3>
+<p>This is a simple sub-dag that demonstrates how FileSplitterInput can be plugged into an application.</p>
+<p><img alt="Application with FileSplitterInput" src="../images/filesplitter/inputexample.png" /></p>
+<p>Splitter is the input operator here that sends block metadata to the downstream BlockReader.</p>
+<pre><code class="java">  @Override
+  public void populateDAG(DAG dag, Configuration configuration)
+  {
+    FileSplitterInput input = dag.addOperator(&quot;Input&quot;, new FileSplitterInput());
+    FSSliceReader reader = dag.addOperator(&quot;Block Reader&quot;, new FSSliceReader());
+    ...
+    dag.addStream(&quot;block-metadata&quot;, input.blocksMetadataOutput, reader.blocksMetadataInput);
+    ...
+  }
+
+</code></pre>
+
+<h3 id="ports_2">Ports</h3>
+<p>Since it is an input operator there are no input ports and output ports are inherited from AbstractFileSplitter.</p>
+<h3 id="configuration_2">Configuration</h3>
+<ol>
+<li><strong>scanner</strong>: the component that scans directories asynchronously. It is of type <code>com.datatorrent.lib.io.fs.FileSplitter.TimeBasedDirectoryScanner</code>. The basic implementation of TimeBasedDirectoryScanner can be customized by users.  </li>
+</ol>
+<p>a. <strong>files</strong>: comma separated list of directories to scan.  </p>
+<p>b. <strong>recursive</strong>: flag that controls whether the directories should be scanned recursively.  </p>
+<p>c. <strong>scanIntervalMillis</strong>: interval specified in milliseconds after which another scan iteration is triggered.  </p>
+<p>d. <strong>filePatternRegularExp</strong>: regular expression for accepted file names.  </p>
+<p>e. <strong>trigger</strong>: a flag that triggers a scan iteration instantly. If the scanner thread is idling then it will initiate a scan immediately otherwise if a scan is in progress, then the new iteration will be triggered immediately after the completion of current one.
+2. <strong>idempotentStorageManager</strong>: by default FileSplitterInput is idempotent. 
+Idempotency ensures that the operator will process the same set of files/blocks in a window if it has seen that window previously, i.e., before a failure. For example, let's say the operator completed window 10 and failed somewhere between window 11. If the operator gets restored at window 10 then it will process the same file/block again in window 10 which it did in the previous run before the failure. Idempotency is important but comes with higher cost because at the end of each window [...]
+<h2 id="handling-of-split-records">Handling of split records</h2>
+<p>Splitting of files to create tasks for downstream operator needs to be a simple operation that doesn't consume a lot of resources and is fast. This is why the file splitter doesn't open files to read. The downside of that is if the file contains records then a record may split across adjacent blocks. Handling of this is left to the downstream operator.</p>
+<p>We have created Block readers in Apex-malhar library that handle line splits efficiently. The 2 line readers- <code>AbstractFSLineReader</code> and <code>AbstractFSReadAheadLineReader</code> can be found here <a href="https://github.com/apache/incubator-apex-malhar/blob/master/library/src/main/java/com/datatorrent/lib/io/block/AbstractFSBlockReader.java">AbstractFSBlockReader</a>.</p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../filter/" class="btn btn-neutral float-right" title="Filter">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../file_output/" class="btn btn-neutral" title="File Output"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../file_output/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../filter/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/filter/index.html b/content/docs/malhar-3.8/operators/filter/index.html
new file mode 100644
index 0000000..bc3965f
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/filter/index.html
@@ -0,0 +1,449 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>Filter - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "Filter";
+    var mkdocs_page_input_path = "operators/filter.md";
+    var mkdocs_page_url = "/operators/filter/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">Filter</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#filter">Filter</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-objective">Operator Objective</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-usecase">Operator Usecase</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-information">Operator Information</a></li>
+                
+                    <li><a class="toctree-l4" href="#properties-attributes-and-ports">Properties, Attributes and Ports</a></li>
+                
+                    <li><a class="toctree-l4" href="#limitations">Limitations</a></li>
+                
+                    <li><a class="toctree-l4" href="#example">Example</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>Filter</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="filter">Filter</h1>
+<h2 id="operator-objective">Operator Objective</h2>
+<p>This operator receives an POJO (<a href="https://en.wikipedia.org/wiki/Plain_Old_Java_Object">Plain Old Java Object</a>) as an incoming tuple
+and based on the filter condition it emits filtered tuples on one output port and rest on another output port.</p>
+<p>Filter operator supports quasi Java expressions to specify filter rule.</p>
+<p>Filter operator does not hold any state and is <strong>idempotent</strong>, <strong>fault-tolerant</strong> and <strong>statically/dynamically partitionable</strong>.</p>
+<h2 id="operator-usecase">Operator Usecase</h2>
+<ol>
+<li><strong><em>Customer data</em></strong> usually contains a field customer category/segment. One wants some analysis to be done for specific customer segment. One could use this filter operator to filter the records based on segment for some analysis for specific customer segment. </li>
+<li><strong><em>Log data</em></strong> processing pipeline may want to filter logs from specific machine/router/switch.</li>
+</ol>
+<h2 id="operator-information">Operator Information</h2>
+<ol>
+<li>Operator location: <strong><em><a href="https://github.com/apache/apex-malhar/tree/master/library">malhar-library</a></em></strong></li>
+<li>Available since: <strong><em>3.5.0</em></strong></li>
+<li>Operator state: <strong><em>Evolving</em></strong></li>
+<li>Java Packages:<ul>
+<li>Operator: <strong><em><a href="https://www.datatorrent.com/docs/apidocs/com/datatorrent/lib/filter/FilterOperator.html">com.datatorrent.lib.filter.FilterOperator</a></em></strong></li>
+</ul>
+</li>
+</ol>
+<h2 id="properties-attributes-and-ports">Properties, Attributes and Ports</h2>
+<h3 id="properties-of-filteroperator"><a name="props"></a>Properties of FilterOperator</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>condition</em></td>
+<td>condition/expression with which Filtering is done.</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>additionalExpressionFunctions</em></td>
+<td>List of import classes/method that should be made statically available to expression to use.</td>
+<td><code>List&lt;String&gt;</code></td>
+<td>No</td>
+<td>Empty List</td>
+</tr>
+</tbody>
+</table>
+<h3 id="platform-attributes-that-influences-operator-behavior">Platform Attributes that influences operator behavior</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Attribute</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>port.input.attr.TUPLE_CLASS</em></td>
+<td>TUPLE_CLASS attribute on input port indicates the class of POJO which incoming tuple</td>
+<td>Class or FQCN</td>
+<td>Yes</td>
+</tr>
+</tbody>
+</table>
+<h3 id="ports">Ports</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Port</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Connection Required</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>input</em></td>
+<td>Tuple which needs to be filtered are received on this port</td>
+<td>Object (POJO)</td>
+<td>Yes</td>
+</tr>
+<tr>
+<td><em>truePort</em></td>
+<td>Tuples which satisfies <a href="#props">condition</a> are emitted on this port</td>
+<td>Object (POJO)</td>
+<td>No</td>
+</tr>
+<tr>
+<td><em>falsePort</em></td>
+<td>Tuples which does not satisfy <a href="#props">condition</a> are emitted on this port</td>
+<td>Object (POJO)</td>
+<td>No</td>
+</tr>
+</tbody>
+</table>
+<h2 id="limitations">Limitations</h2>
+<p>Current <code>FilterOperator</code> has following limitation:</p>
+<ol>
+<li><a href="https://issues.apache.org/jira/browse/APEXMALHAR-2175">APEXMALHAR-2175</a> : Filter condition is not able to correctly handle java reserved words.</li>
+</ol>
+<h2 id="example">Example</h2>
+<p>Example for <code>FilterOperator</code> can be found at: <a href="https://github.com/DataTorrent/examples/tree/master/tutorials/filter">https://github.com/DataTorrent/examples/tree/master/tutorials/filter</a></p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../fixedWidthParserOperator/" class="btn btn-neutral float-right" title="Fixed Width Parser">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../file_splitter/" class="btn btn-neutral" title="File Splitter"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../file_splitter/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../fixedWidthParserOperator/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/fixedWidthParserOperator/index.html b/content/docs/malhar-3.8/operators/fixedWidthParserOperator/index.html
new file mode 100644
index 0000000..4f53fc4
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/fixedWidthParserOperator/index.html
@@ -0,0 +1,613 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>Fixed Width Parser - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "Fixed Width Parser";
+    var mkdocs_page_input_path = "operators/fixedWidthParserOperator.md";
+    var mkdocs_page_url = "/operators/fixedWidthParserOperator/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">Fixed Width Parser</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#fixed-width-parser-operator">Fixed Width Parser Operator</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-objective">Operator Objective</a></li>
+                
+                    <li><a class="toctree-l4" href="#class-diagram">Class Diagram</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-information">Operator Information</a></li>
+                
+                    <li><a class="toctree-l4" href="#platform-attributes-that-influences-operator-behavior">Platform Attributes that influences operator behavior</a></li>
+                
+                    <li><a class="toctree-l4" href="#ports">Ports</a></li>
+                
+                    <li><a class="toctree-l4" href="#partitioning">Partitioning</a></li>
+                
+                    <li><a class="toctree-l4" href="#example">Example</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>Fixed Width Parser</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="fixed-width-parser-operator">Fixed Width Parser Operator</h1>
+<h2 id="operator-objective">Operator Objective</h2>
+<p>This operator is designed to parse fixed width records and construct a map or concrete java class also known as <a href="https://en.wikipedia.org/wiki/Plain_Old_Java_Object">"POJO"</a> out of it. User needs to provide the schema to describe the fixed width data. The incoming records will be parsed according to the schema and either a map or a POJO (or both) is emitted.
+Invalid records will be emitted on the error port along with an error message.</p>
+<p><strong>Note</strong>: field names in the schema must match field names of the POJO and must be in the same order as it appears in the incoming data.</p>
+<p>FixedWidthParser is <strong>idempotent</strong>, <strong>fault-tolerant</strong> and <strong>statically/dynamically partitionable</strong>.</p>
+<h2 id="class-diagram">Class Diagram</h2>
+<p><img alt="" src="../images/fixedWidthParser/fixedWidthParser.png" /></p>
+<h2 id="operator-information">Operator Information</h2>
+<ol>
+<li>Operator location:<strong><em>malhar-contrib</em></strong></li>
+<li>Available since:<strong><em>3.8.0</em></strong></li>
+<li>Operator state:<strong><em>Evolving</em></strong></li>
+<li>Java Package:<a href="https://github.com/apache/apex-malhar/blob/master/contrib/src/main/java/com/datatorrent/contrib/parser/FixedWidthParser.java">com.datatorrent.contrib.parser.FixedWidthParser</a></li>
+</ol>
+<h2 id="properties-of-fixedwidthparser"><a name="props"></a>Properties of FixedWidthParser</h2>
+<p>Data in a fixed-width text file is arranged in rows and columns, with one entry per row. Fixed width record is one row in a fixed-width file.  Each column has a fixed width, specified in characters, which determines the maximum amount of data it can contain.  No delimiters are used to separate the fields in the file.  Instead, the data is left- or right- justified in its column as specified by the <code>alignment</code> value of the schema and the remaining space is filled by the padd [...]
+<p>User needs to set the schema which describes fixed width data.</p>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>jsonSchema</em></td>
+<td><a href="https://github.com/apache/apex-malhar/blob/master/contrib/src/main/java/com/datatorrent/contrib/parser/FixedWidthSchema.java">Schema</a>  describing fixed width data. Based on schema definition the operator will parse the incoming record to object map and POJO. Valid records will be emitted as POJO / map while invalid ones are emitted on error port with error message.</td>
+<td>String</td>
+<td>YES</td>
+<td>N/A</td>
+</tr>
+</tbody>
+</table>
+<p>User can set this property in <code>properties.xml</code> in the following way:</p>
+<pre><code class="xml">&lt;property&gt;
+    &lt;name&gt;
+      dt.application.{ApplicationName}.operator.{OperatorName}.prop.jsonSchema
+    &lt;/name&gt;
+    &lt;value&gt;
+    {
+      &quot;padding&quot;: &quot;_&quot;,
+      &quot;alignment&quot;: &quot;left&quot;,
+      &quot;fields&quot;:
+      [
+        {
+          &quot;name&quot;: &quot;adId&quot;,
+          &quot;type&quot;: &quot;Integer&quot;,
+          &quot;length&quot;: &quot;3&quot;,
+          &quot;padding&quot;: &quot;0&quot;
+        },
+        {
+          &quot;name&quot;: &quot;campaignId&quot;,
+          &quot;type&quot;: &quot;Integer&quot;,
+          &quot;length&quot;: &quot;3&quot;,
+          &quot;padding&quot;: &quot; &quot;
+        },
+        {
+          &quot;name&quot;: &quot;adName&quot;,
+          &quot;type&quot;: &quot;String&quot;,
+          &quot;length&quot;: &quot;10&quot;,
+          &quot;alignment&quot;:&quot;right&quot;
+        },
+        {
+          &quot;name&quot;: &quot;bidPrice&quot;,
+          &quot;type&quot;: &quot;Double&quot;,
+          &quot;length&quot;: &quot;3&quot;
+        },
+        {
+          &quot;name&quot;: &quot;startDate&quot;,
+          &quot;type&quot;: &quot;Date&quot;,
+          &quot;format&quot;: &quot;yyyy-MM-dd HH:mm:ss&quot;,
+          &quot;length&quot;: &quot;19&quot;
+        },
+        {
+          &quot;name&quot;: &quot;endDate&quot;,
+          &quot;type&quot;: &quot;Date&quot;,
+          &quot;format&quot;: &quot;dd/MM/yyyy&quot;,
+          &quot;length&quot;: &quot;10&quot;
+        },
+        {
+          &quot;name&quot;: &quot;securityCode&quot;,
+          &quot;type&quot;: &quot;Long&quot;,
+          &quot;length&quot;: &quot;5&quot;
+        },
+        {
+          &quot;name&quot;: &quot;active&quot;,
+          &quot;type&quot;: &quot;Boolean&quot;,
+          &quot;length&quot;: &quot;5&quot;,
+          &quot;trueValue&quot;: &quot;true&quot;,
+          &quot;falseValue&quot;: &quot;false&quot;
+        },
+        {
+          &quot;name&quot;: &quot;optimized&quot;,
+          &quot;type&quot;: &quot;Boolean&quot;,
+          &quot;length&quot;: &quot;1&quot;,
+          &quot;trueValue&quot;: &quot;y&quot;,
+          &quot;falseValue&quot;: &quot;n&quot;
+        },
+        {
+          &quot;name&quot;: &quot;parentCampaign&quot;,
+          &quot;type&quot;: &quot;String&quot;,
+          &quot;length&quot;: &quot;10&quot;
+        },
+        {
+          &quot;name&quot;: &quot;weatherTargeted&quot;,
+          &quot;type&quot;: &quot;Character&quot;,
+          &quot;length&quot;: &quot;1&quot;
+        }
+      ]
+    }
+    &lt;/value&gt;
+&lt;/property&gt;
+</code></pre>
+
+<p>Where {OperatorName} is the name of the Operator and {ApplicationName} is the name of the application.
+As explained earlier padding is a character used to specify padding used in the incoming records to fill the fixed width if required. User has the flexibility to specify a single padding character for the entire file or the user can choose to provide separate padding character for separate fields (columns of the record). The padding value for separate fields (if specified) over rides the padding value (global) (if specified) for the entire file.
+Similar to padding character user also has the flexibility to define the alignment of the incoming records, user can choose whether the record is left,centre or right aligned. Note that currently only British spelling for 'centre' is accepted.</p>
+<p>The sample json schema for records having 3 fields 'Occupation', 'Age' and 'Gender' with field widths 20,2 and 6, padding characters '#','$' and '@' and alignments 'left', 'centre' and 'right' will be as follows:</p>
+<pre><code>{
+  &quot;fields&quot;:
+  [
+    {
+      &quot;name&quot;: &quot;Occupation&quot;,
+      &quot;type&quot;: &quot;String&quot;,
+      &quot;length&quot;: &quot;20&quot;,
+      &quot;padding&quot;: &quot;#&quot;,
+      &quot;alignment&quot;:&quot;left&quot;
+    },
+    {
+      &quot;name&quot;: &quot;Age&quot;,
+      &quot;type&quot;: &quot;Integer&quot;,
+      &quot;length&quot;: &quot;2&quot;,
+      &quot;padding&quot;: &quot;$&quot;,
+      &quot;alignment&quot;:&quot;centre&quot;
+    },
+    {
+      &quot;name&quot;: &quot;Gender&quot;,
+      &quot;type&quot;: &quot;String&quot;,
+      &quot;length&quot;: &quot;6&quot;,
+      &quot;padding&quot;: &quot;@&quot;,
+      &quot;alignment&quot;:&quot;right&quot;
+    }
+}
+</code></pre>
+
+<p>The corresponding record with values for 'Occupation' as Engineer, 'Age' as 30 and 'Gender' as Male would be as follows:</p>
+<pre><code>Engineer############30@@Male
+</code></pre>
+
+<h2 id="platform-attributes-that-influences-operator-behavior">Platform Attributes that influences operator behavior</h2>
+<table>
+<thead>
+<tr>
+<th><strong>Attribute</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>TUPLE_CLASS</em></td>
+<td>TUPLE_CLASS attribute on output port which tells operator the class of POJO which need to be emitted</td>
+<td>Class</td>
+<td>Yes</td>
+</tr>
+</tbody>
+</table>
+<p>User can set this property in <code>properties.xml</code> in the following way:
+In the examples below, {OperatorName} is the name of the Operator, {ApplicationName} is the name of the application and "com.datatorrent.tutorial.fixedwidthparser.Ad" is the fully qualified name of the Tuple class</p>
+<pre><code class="xml">&lt;property&gt;
+    &lt;name&gt;dt.application.{ApplicationName}.operator.{OperatorName}.port.out.attr.TUPLE_CLASS&lt;/name&gt;
+    &lt;value&gt;com.datatorrent.tutorial.fixedwidthparser.Ad&lt;/value&gt;
+&lt;/property&gt;
+</code></pre>
+
+<p>Following code can be added to <code>populateDAG()</code> method of application to set Tuple Class:</p>
+<pre><code class="java">dag.setOutputPortAttribute({OperatorName}.out, Context.PortContext.TUPLE_CLASS, com.datatorrent.tutorial.fixedwidthparser.Ad.class);
+</code></pre>
+
+<h2 id="supported-datatypes-in-schema"><a name="dataTypes"></a>Supported DataTypes in Schema</h2>
+<ul>
+<li>Integer</li>
+<li>Long</li>
+<li>Double</li>
+<li>Character</li>
+<li>String</li>
+<li>Boolean</li>
+<li>Date</li>
+<li>Float</li>
+</ul>
+<h2 id="ports">Ports</h2>
+<table>
+<thead>
+<tr>
+<th><strong>Port</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>in</em></td>
+<td>Tuples that needs to be parsed are received on this port</td>
+<td>byte[]</td>
+<td>Yes</td>
+</tr>
+<tr>
+<td><em>out</em></td>
+<td>Valid Tuples that are emitted as pojo</td>
+<td>Object (POJO)</td>
+<td>No</td>
+</tr>
+<tr>
+<td><em>parsedOutput</em></td>
+<td>Valid Tuples that are emitted as maps</td>
+<td>Map</td>
+<td>No</td>
+</tr>
+<tr>
+<td><em>err</em></td>
+<td>Invalid Tuples are emitted with error message</td>
+<td>KeyValPair &lt;String, String></td>
+<td>No</td>
+</tr>
+</tbody>
+</table>
+<h2 id="partitioning">Partitioning</h2>
+<p>Fixed Width Parser is both statically and dynamically partitionable.</p>
+<h3 id="static-partitioning">Static Partitioning</h3>
+<p>Static partitioning can be achieved by specifying the partitioner and number of partitions in the populateDAG() method.</p>
+<pre><code class="java">FixedWidthParser fixedWidthParser = dag.addOperator(&quot;fixedWidthParser&quot;, FixedWidthParser.class);
+StatelessPartitioner&lt;FixedWidthParser&gt; partitioner1 = new StatelessPartitioner&lt;FixedWidthParser&gt;(2);
+dag.setAttribute(fixedWidthParser, Context.OperatorContext.PARTITIONER, partitioner1);
+</code></pre>
+
+<p>Static partitioning can also be achieved by specifying the partitioner in properties file.</p>
+<pre><code class="xml">&lt;property&gt;
+    &lt;name&gt;dt.operator.{OperatorName}.attr.PARTITIONER&lt;/name&gt;
+    &lt;value&gt;com.datatorrent.common.partitioner.StatelessPartitioner:2&lt;/value&gt;
+&lt;/property&gt;
+</code></pre>
+
+<p>where {OperatorName} is the name of the FixedWidthParser operator. Above lines will partition FixedWidthParser statically 2 times. Above value can be changed accordingly to change the number of static partitions.</p>
+<h3 id="dynamic-partioning">Dynamic Partioning</h3>
+<p>FixedWidthParser can be dynamically partitioned using out-of-the-box partitioner:</p>
+<h4 id="throughput-based">Throughput based</h4>
+<p>Following code can be added to <code>populateDAG()</code> method of application to dynamically partition FixedWidthParser:</p>
+<pre><code class="java">FixedWidthParser fixedWidthParser = dag.addOperator(&quot;fixedWidthParser&quot;, FixedWidthParser.class);
+StatelessThroughputBasedPartitioner&lt;FixedWidthParser&gt; partitioner = new StatelessThroughputBasedPartitioner&lt;&gt;();
+partitioner.setCooldownMillis(conf.getLong(&quot;dt.cooldown&quot;, 10000));
+partitioner.setMaximumEvents(conf.getLong(&quot;dt.maxThroughput&quot;, 30000));
+partitioner.setMinimumEvents(conf.getLong(&quot;dt.minThroughput&quot;, 10000));
+dag.setAttribute(fixedWidthParser, OperatorContext.STATS_LISTENERS, Arrays.asList(new StatsListener[]{partitioner}));
+dag.setAttribute(fixedWidthParser, OperatorContext.PARTITIONER, partitioner);
+</code></pre>
+
+<p>Above code will dynamically partition FixedWidthParser when the throughput changes.
+If the overall throughput of FixedWidthParser goes beyond 30000 or less than 10000, the platform will repartition FixedWidthParser
+to balance throughput of a single partition to be between 10000 and 30000.
+CooldownMillis of 10000 will be used as the threshold time for which the throughput change is observed.</p>
+<h2 id="example">Example</h2>
+<p>Example for Fixed Width Parser can be found at: <a href="https://github.com/DataTorrent/examples/tree/master/tutorials/parser">https://github.com/DataTorrent/examples/tree/master/tutorials/parser</a></p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../ftpInputOperator/" class="btn btn-neutral float-right" title="FTP Input Operator">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../filter/" class="btn btn-neutral" title="Filter"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../filter/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../ftpInputOperator/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/fsInputOperator/index.html b/content/docs/malhar-3.8/operators/fsInputOperator/index.html
new file mode 100644
index 0000000..6cadb3a
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/fsInputOperator/index.html
@@ -0,0 +1,825 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>File Input - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "File Input";
+    var mkdocs_page_input_path = "operators/fsInputOperator.md";
+    var mkdocs_page_url = "/operators/fsInputOperator/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">File Input</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#file-input-operator">File Input Operator</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-objective">Operator Objective</a></li>
+                
+                    <li><a class="toctree-l4" href="#overview">Overview</a></li>
+                
+                    <li><a class="toctree-l4" href="#use-cases">Use Cases</a></li>
+                
+                    <li><a class="toctree-l4" href="#how-to-use">How to Use?</a></li>
+                
+                    <li><a class="toctree-l4" href="#partitioning">Partitioning</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-information">Operator Information</a></li>
+                
+                    <li><a class="toctree-l4" href="#ports">Ports</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#abstract-methods">Abstract Methods</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#derived-classes">Derived Classes</a></li>
+                
+                    <li><a class="toctree-l4" href="#1-abstractftpinputoperator">1. AbstractFTPInputOperator</a></li>
+                
+                    <li><a class="toctree-l4" href="#2-ftpstringinputoperator">2. FTPStringInputOperator</a></li>
+                
+                    <li><a class="toctree-l4" href="#3-abstractparquetfilereader">3. AbstractParquetFileReader</a></li>
+                
+                    <li><a class="toctree-l4" href="#4-abstractthroughputfileinputoperator">4. AbstractThroughputFileInputOperator</a></li>
+                
+                    <li><a class="toctree-l4" href="#5-linebylinefileinputoperator">5. LineByLineFileInputOperator</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#example-implementation-using-a-custom-character-encoding">Example Implementation Using a Custom Character Encoding</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#common-implementation-scenarios">Common Implementation Scenarios</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>File Input</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="file-input-operator">File Input Operator</h1>
+<h2 id="operator-objective">Operator Objective</h2>
+<p>This operator is designed to scan a directory for files, read and split file content into tuples
+such as lines or a block of bytes, and finally emit them on output ports defined in concrete
+subclasses for further processing by downstream operators.
+It can be used with any filesystem supported by Hadoop like HDFS, S3, ftp, NFS etc.</p>
+<h2 id="overview">Overview</h2>
+<p>The operator is <strong>idempotent</strong>, <strong>fault-tolerant</strong> and <strong>partitionable</strong>.</p>
+<p>Logic for directory scanning is encapsulated in the <code>DirectoryScanner</code> static inner class
+which provides functions such as matching file names against a regular expression, tracking files
+that have already been processed (so that they are not processed again), filtering files based
+on the hashcode of the file names in the presence of partitioning so that each file is
+processed by a unique partition. This class can be extended if necessary to provide
+additional capabilities such as scanning multiple directories.</p>
+<p>It tracks the current file offset as part of checkpoint state. It it fails and is restarted
+by the platform, it will seek to the saved offset to avoid duplicate processing. Exactly once processing
+for fault tolerance is handled using window data manager. For more details check the blog about <a href="https://www.datatorrent.com/blog/fault-tolerant-file-processing/">Fault-Tolerant File Processing</a>.
+It supports both static and dynamic partitioning.</p>
+<h2 id="use-cases">Use Cases</h2>
+<p>This operator is suitable for use in an environment where small to medium sized files are
+deposited in a specific directory on a regular basis. For very large files a better alternative
+is the <code>FileSplitter</code> and <code>BlockReader</code> combination since they allow such files to be processed
+by multiple partitions to achieve higher throughput. Additionally, files which are continually
+modified by other processes are not suitable for processing with this operator since they may
+yield unpredictable results.</p>
+<h2 id="how-to-use">How to Use?</h2>
+<p>The tuple type in the abstract class is a generic parameter.
+Concrete subclasses need to choose an appropriate class (such as <code>String</code> or <code>byte[]</code>) for the
+generic parameter and also implement a couple of abstract methods: <code>readEntity()</code> to read
+the next tuple from the currently open file and <code>emit()</code> to process the next tuple.</p>
+<p>In principle, no ports need be defined in the rare case that the operator simply writes
+tuples to some external sink or merely maintains aggregated statistics. But in most common
+scenarios, the tuples need to be sent to one or more downstream operators for additional
+processing such as parsing, enrichment or aggregation; in such cases, appropriate
+output ports are defined and the <code>emit()</code> implementation dispatches tuples to the
+desired output ports.</p>
+<p>A simple concrete implementation is provided in Malhar: <code>LineByLineFileInputOperator</code>.
+It uses <code>String</code> for the generic parameter, defines a single output port and processes each
+line of the input file as a tuple. It is discussed further below.</p>
+<h2 id="partitioning">Partitioning</h2>
+<h4 id="static-partitioning">Static Partitioning</h4>
+<p>Configure parameter <code>partitionCount</code> to define the desired number of initial partitions
+(4 in this example).</p>
+<pre><code class="xml">&lt;property&gt;
+  &lt;name&gt;dt.operator.{OperatorName}.prop.partitionCount&lt;/name&gt;
+  &lt;value&gt;4&lt;/value&gt;
+&lt;/property&gt;
+</code></pre>
+
+<p>where <em>{OperatorName}</em> is the name of the input operator.</p>
+<h4 id="dynamic-partitioning">Dynamic Partitioning</h4>
+<p>Dynamic partitioning -- changing the number of partitions of one or more operators
+in a running application -- can be achieved in multiple ways:
+- Use the command line tool <code>apex</code> or the UI console to change the value of the
+  <code>partitionCount</code> property of the running operator. This change is detected in
+  <code>processStats()</code> (which is invoked periodically by the platform) where, if the
+  current partition count (<code>currentPartitions</code>) and the desired partition count
+  (<code>partitionCount</code>) differ, the <code>repartitionRequired</code> flag in the response is set.
+  This causes the platform to invoke <code>definePartitions()</code> to create a new set of
+  partitions with the desired count.
+- Override <code>processStats()</code> and within it, based on the statistics in the
+  incoming parameter or any other factors, define a new desired value of
+  <code>partitionCount</code> and finally, if this value differs from the current partition
+  count, set the <code>repartitionRequired</code> flag in the response.</p>
+<p>The details of actually creating the new set of partitions can be customized by overriding
+the <code>definePartitions()</code> method. There are a couple of things to keep in mind when doing this.
+The first is that repartitioning needs some care when the operator has state (as is the
+case here): Existing state from current operator partitions needs to redistributed to the
+new partitions in a logically consistent way. The second is that some or all of the
+current set of partitions, which is an input parameter to <code>definePartitions()</code>, can be
+copied over to the new set; such partitions will continue running and will not be
+restarted. Any existing partitions that are not present in the new set will be shutdown.
+The current re-partitioning logic does not preserve any existing partitions, so upon
+a repartition event, all existing partitions are shutdown and the new ones started.</p>
+<h2 id="operator-information">Operator Information</h2>
+<ol>
+<li>Operator location: <strong><em>malhar-library</em></strong></li>
+<li>Available since: <strong><em>1.0.2</em></strong></li>
+<li>Operator state: <strong><em>Stable</em></strong></li>
+<li>Java Packages:<ul>
+<li>Operator: <strong><em><a href="https://www.datatorrent.com/docs/apidocs/com/datatorrent/lib/io/fs/AbstractFileInputOperator.html">com.datatorrent.lib.io.fs.AbstractFileInputOperator</a></em></strong></li>
+</ul>
+</li>
+</ol>
+<h3 id="abstractfileinputoperator">AbstractFileInputOperator</h3>
+<p>This is the abstract implementation that, as noted above, scans a single directory.
+It can be extended to modify functionality or add new capabilities. For example, the
+directory scanner can be overriden to monitor multiple directories. <a href="https://github.com/DataTorrent/examples/tree/master/tutorials/fileIO-multiDir">This</a> example demonstrates how to do that.
+As noted in the overview above, this class has no ports, so concrete subclasses will need to
+provide them if necessary.</p>
+<p><img alt="AbstractFileInputOperator.png" src="../images/fsInput/operatorsClassDiagram.png" /></p>
+<h3 id="properties-of-abstractfileinputoperator"><a name="AbstractFileInputOperatorProps"></a>Properties of AbstractFileInputOperator</h3>
+<p>Several properties are available to configure the behavior of this operator and they are
+summarized in the table below. Of these, only <code>directory</code> is required: it specifies
+the path of the monitored directory. It can be set like this:</p>
+<pre><code class="xml">&lt;property&gt;
+  &lt;name&gt;dt.operator.{OperatorName}.prop.directory&lt;/name&gt;
+  &lt;value&gt;/tmp/fileInput&lt;/value&gt;
+&lt;/property&gt;
+</code></pre>
+
+<p>If new files appear with high frequency in this directory
+and they need to be processed as soon as they appear, reduce the value of <code>scanIntervalMillis</code>;
+if they appear rarely or if some delay in processing a new file is acceptable, increase it.
+Obviously, smaller values will result in greater IO activity for the corresponding filesystem.</p>
+<p>The platform invokes the <code>emitTuples()</code> callback multiple time in each streaming window; within
+a single such call, if a large number of tuples are emitted, there is some risk that they
+may overwhelm the downstream operators especially if they are performing some compute intensive
+operation. For such cases, output can be throttled by reducing the value of the
+<code>emitBatchSize</code> property. Conversely, if the downstream operators can handle the load, increase
+the value to enhance throughput.</p>
+<p>The <code>partitionCount</code> parameter has already been discussed above.</p>
+<p>Occasionally, some files get into a bad state and cause errors when an attempt is made to
+read from them. The causes vary depending on the filesystem type ranging from corrupted
+filesystems to network issues. In such cases, the operator will retry reading from such
+files a limited number of times before blacklisting those files. This retry count is
+defined by the <code>maxRetryCount</code> property.</p>
+<p>Finally, the specific scanner class used to monitor the input directories can be configured
+by setting the <code>scanner</code> property.</p>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>directory</em></td>
+<td>absolute path of directory to be scanned</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>scanIntervalMillis</em></td>
+<td>Interval in milliseconds after which directory should be scanned for new files</td>
+<td>int</td>
+<td>No</td>
+<td>5000</td>
+</tr>
+<tr>
+<td><em>emitBatchSize</em></td>
+<td>Maximum number of tuples to emit in a single call to the <code>emitTuples()</code> callback (see explanation above).</td>
+<td>int</td>
+<td>No</td>
+<td>1000</td>
+</tr>
+<tr>
+<td><em>partitionCount</em></td>
+<td>Desired number of partitions</td>
+<td>int</td>
+<td>No</td>
+<td>1</td>
+</tr>
+<tr>
+<td><em>maxRetryCount</em></td>
+<td>Maximum number of times the operator will attempt to process a file</td>
+<td>int</td>
+<td>No</td>
+<td>5</td>
+</tr>
+<tr>
+<td><em>scanner</em></td>
+<td>Scanner to scan new files in directory</td>
+<td><a href="#DirectoryScanner">DirectoryScanner</a></td>
+<td>No</td>
+<td>DirectoryScanner</td>
+</tr>
+</tbody>
+</table>
+<h4 id="properties-of-directoryscanner"><a name="DirectoryScanner"></a>Properties of DirectoryScanner</h4>
+<p>The directory scanner has one optional property: a regular expression to filter files
+of interest. If absent, all files in the source directory are processed. It can be
+set like this:</p>
+<pre><code class="xml">&lt;property&gt;
+  &lt;name&gt;dt.operator.{OperatorName}.prop.scanner.filePatternRegexp&lt;/name&gt;
+  &lt;value&gt;/tmp/fileInput&lt;/value&gt;
+&lt;/property&gt;
+</code></pre>
+
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>filePatternRegexp</em></td>
+<td>regex to select files from input directory</td>
+<td>String</td>
+<td>No</td>
+<td>N/A</td>
+</tr>
+</tbody>
+</table>
+<h3 id="ports">Ports</h3>
+<p>This operator has no ports.</p>
+<h2 id="abstract-methods">Abstract Methods</h2>
+<p>As described above, concrete subclasses need to provide implementations for these two
+methods:</p>
+<pre><code class="java">void emit(T tuple);
+T readEntity();
+</code></pre>
+
+<p>Examples of implementations are in the <code>LineByLineFileInputOperator</code> operator and also in
+the example at the end of this guide.</p>
+<h2 id="derived-classes">Derived Classes</h2>
+<h3 id="1-abstractftpinputoperator">1. AbstractFTPInputOperator</h3>
+<p>The class is used to read files from FTP file system. As for the above abstract class, concrete
+subclasses need to implement the
+<a href="https://www.datatorrent.com/docs/apidocs/com/datatorrent/lib/io/fs/AbstractFileInputOperator.html#readEntity">readEntity</a> and
+<a href="https://www.datatorrent.com/docs/apidocs/com/datatorrent/lib/io/fs/AbstractFileInputOperator.html#emit">emit</a> methods.</p>
+<h4 id="properties"><a name="AbstractFTPInputOperatorProps"></a>Properties</h4>
+<p>This operator defines following additional properties beyond those defined in the
+<a href="#AbstractFileInputOperatorProps">parent class</a>.</p>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>host</em></td>
+<td>Hostname of ftp server.</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>port</em></td>
+<td>Port of ftp server.</td>
+<td>int</td>
+<td>No</td>
+<td>21 (default ftp port)</td>
+</tr>
+<tr>
+<td><em>userName</em></td>
+<td>Username which is used for login to the server.</td>
+<td>String</td>
+<td>No</td>
+<td>anonymous</td>
+</tr>
+<tr>
+<td><em>password</em></td>
+<td>Password which is used for login to the server.</td>
+<td>String</td>
+<td>No</td>
+<td>gues</td>
+</tr>
+</tbody>
+</table>
+<h4 id="ports_1">Ports</h4>
+<p>This operator has no ports.</p>
+<h3 id="2-ftpstringinputoperator">2. FTPStringInputOperator</h3>
+<p>This class extends AbstractFTPInputOperator and  implements abstract methods to read files available on FTP file system line by line.</p>
+<h4 id="properties_1"><a name="FTPStringInputOperatorProps"></a>Properties</h4>
+<p>This operator defines no additional properties beyond those defined in the
+<a href="#AbstractFTPInputOperatorProps">parent class</a>.</p>
+<h4 id="ports_2">Ports</h4>
+<table>
+<thead>
+<tr>
+<th><strong>Port</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>output</em></td>
+<td>Tuples that are read from file are emitted on this port</td>
+<td>String</td>
+<td>Yes</td>
+</tr>
+</tbody>
+</table>
+<h3 id="3-abstractparquetfilereader">3. AbstractParquetFileReader</h3>
+<p>Reads Parquet files from input directory using GroupReadSupport. Derived classes need to implement <a href="https://www.datatorrent.com/docs/apidocs/com/datatorrent/contrib/parquet/AbstractParquetFileReader.html#convertGroup(Group)">convertGroup(Group)</a> method to convert Group to other type. Also it should implement  <a href="https://www.datatorrent.com/docs/apidocs/com/datatorrent/lib/io/fs/AbstractFileInputOperator.html#readEntity()">readEntity()</a> and <a href="https://www.data [...]
+<h4 id="properties-of-abstractparquetfilereader"><a name="AbstractParquetFileReaderProps"></a>Properties of AbstractParquetFileReader</h4>
+<p>This operator defines following additional properties beyond those defined in the
+<a href="#AbstractFileInputOperatorProps">parent class</a>.</p>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>parquetSchema</em></td>
+<td>Parquet Schema to parse record.</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+</tbody>
+</table>
+<h4 id="ports_3">Ports</h4>
+<p>This operator has no ports.</p>
+<h3 id="4-abstractthroughputfileinputoperator">4. AbstractThroughputFileInputOperator</h3>
+<p>This operator extends <code>AbstractFileInputOperator</code> by providing the capability to partition
+dynamically based the file backlog. The user can set the preferred number of pending files per operator as well as the maximum number of operators and define a re-partition interval. If a physical operator runs out of files to process and an amount of time greater than or equal to the repartition interval has passed then a new number of operators are created to accommodate the remaining pending files. Derived classes need to implement <a href="https://www.datatorrent.com/docs/apidocs/com [...]
+<h4 id="properties-of-abstractthroughputfileinputoperator"><a name="AbstractThroughputFileInputOperatorProps"></a>Properties of AbstractThroughputFileInputOperator</h4>
+<p>This operator defines following additional properties beyond those defined in the
+<a href="#AbstractFileInputOperatorProps">parent class</a>.</p>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>repartitionInterval</em></td>
+<td>The minimum amount of time that must pass in milliseconds before the operator can be repartitioned.</td>
+<td>long</td>
+<td>No</td>
+<td>5 minutes</td>
+</tr>
+<tr>
+<td><em>preferredMaxPendingFilesPerOperator</em></td>
+<td>the preferred number of pending files per operator.</td>
+<td>int</td>
+<td>No</td>
+<td>10</td>
+</tr>
+<tr>
+<td><em>partitionCount</em></td>
+<td>the maximum number of partitions for the operator.</td>
+<td>int</td>
+<td>No</td>
+<td>1</td>
+</tr>
+</tbody>
+</table>
+<h4 id="ports_4">Ports</h4>
+<p>This operator has no ports.</p>
+<h3 id="5-linebylinefileinputoperator">5. LineByLineFileInputOperator</h3>
+<p>As mentioned in the overview above, this operator defines a single output port; it reads files
+as lines and emits them as Java Strings on the output port. The output port <em>must</em> be connected.
+Lines are extracted using the Java <code>BufferedReader</code> class and the default character encoding.
+An example illustrating the use of a custom encoding (such as UTF_8) is provided below</p>
+<h4 id="properties_2">Properties</h4>
+<p>This operator defines no additional properties beyond those defined in the
+<a href="#AbstractFileInputOperatorProps">parent class</a>.</p>
+<h4 id="ports_5">Ports</h4>
+<table>
+<thead>
+<tr>
+<th><strong>Port</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>output</em></td>
+<td>Tuples that are read from file are emitted on this port</td>
+<td>String</td>
+<td>Yes</td>
+</tr>
+</tbody>
+</table>
+<h2 id="example-implementation-using-a-custom-character-encoding">Example Implementation Using a Custom Character Encoding</h2>
+<p>This example demonstrates how to extend the <code>AbstractFileInputOperator</code> to read
+UTF-8 encoded data.</p>
+<pre><code>public class EncodedDataReader extends AbstractFileInputOperator&lt;String&gt;
+{
+  public final transient DefaultOutputPort&lt;String&gt; output = new DefaultOutputPort&lt;&gt;();
+  protected transient BufferedReader br;
+
+  protected InputStream openFile(Path path) throws IOException
+  {
+    InputStream is = super.openFile(path);
+    br = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8));
+    return is;
+  }
+
+  @Override
+  protected void closeFile(InputStream is) throws IOException
+  {
+    super.closeFile(is);
+    br.close();
+    br = null;
+  }
+
+  @Override
+  protected String readEntity() throws IOException
+  {
+    return br.readLine();
+  }
+
+  @Override
+  protected void emit(String tuple)
+  {
+    output.emit(tuple);
+  }
+}
+
+</code></pre>
+
+<h2 id="common-implementation-scenarios">Common Implementation Scenarios</h2>
+<p>Sometimes, downstream operators need to know which file each tuple came from; there are a
+number of ways of achieving this, each with its own tradeoffs. Some alternatives:</p>
+<ul>
+<li>If the generic tuple type is a String, each tuple can be prefixed with the file name
+  with a suitable separator, for example: <code>foo.txt: first line</code>. This works but
+  has obvious additional costs in both processing (to parse out the two pieces of each
+  tuple) and network bandwidth utilization.</li>
+<li>Define a custom tuple class with two fields: one for the file name and one for tuple data.
+  The costs are similar to the previous approach though the code is simpler since
+  parsing is handled behind the scenes by the serialization process.</li>
+<li>Define the tuple type to be <code>Object</code> and emit either a custom <code>Tuple</code> object for actual
+  tuple data or <strong>BOF</strong>/<strong>EOF</strong> objects with the name of the file when a new file begins
+  or the current file ends. Here, the additional bandwidth consumed is
+  minimal (just 2 additional tuples at file boundaries) but the type of each tuple needs
+  to be checked using <code>instanceof</code> in the downstream operators which has some runtime cost.</li>
+<li>Similar to the previous approach but define an additional control port dedicated to
+  the BOF/EOF control tuples. This approach eliminates the runtime cost of using <code>instanceof</code>
+  but some care is needed because (a) the order of tuples arriving at multiple input ports
+  in downstream operators cannot be guaranteed -- for example, the BOF/EOF control tuples
+  may arrive before some of the actual data tuples; and (b) since the operator may read
+  more than one file in a single streaming window, the downstream operator may not be
+  able to tell which tuples belong to which file. One way of dealing with this is to
+  stop emitting data tuples until the next <code>endWindow()</code> callback when an EOF is detected
+  for the current file; that way, if the downstream operator receives an EOF control tuple,
+  it has the guarantee that all the data tuples received in the same window belong to the
+  current file.</li>
+</ul>
+<p>Of course, other strategies are possible depending on the needs of the particular situation.</p>
+<p>When used in a long-running application where a very large number of files are are processed
+over time, the internal state (consisting of properties like <code>processedFiles</code>) may grow
+correspondingly and this may have some performance impact since each checkpoint saves the
+entire operator state. In such situations, it is useful to explore options such as moving
+processed files to another directory and trimming operator state variables suitably.</p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../file_output/" class="btn btn-neutral float-right" title="File Output">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../enricher/" class="btn btn-neutral" title="Enricher"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../enricher/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../file_output/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/ftpInputOperator/index.html b/content/docs/malhar-3.8/operators/ftpInputOperator/index.html
new file mode 100644
index 0000000..1a89c3d
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/ftpInputOperator/index.html
@@ -0,0 +1,447 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>FTP Input Operator - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "FTP Input Operator";
+    var mkdocs_page_input_path = "operators/ftpInputOperator.md";
+    var mkdocs_page_url = "/operators/ftpInputOperator/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">FTP Input Operator</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#ftp-input-operator">FTP Input Operator</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-objective">Operator Objective</a></li>
+                
+                    <li><a class="toctree-l4" href="#class-diagram">Class Diagram</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-information">Operator Information</a></li>
+                
+                    <li><a class="toctree-l4" href="#partitioning">Partitioning</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>FTP Input Operator</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="ftp-input-operator">FTP Input Operator</h1>
+<h2 id="operator-objective">Operator Objective</h2>
+<p>This operator(<code>AbstractFTPInputOperator</code>) is designed to scan a directory from an FTP server for files, read
+and split file content into tuples such as lines or blocks of bytes, and finally
+emit them on the output port for further processing by downstream operators.
+The operator extends the <code>AbstractFileInputOperator</code>. It overrides the
+getFSInstance() method and returns an instance of the FTPFileSystem
+(<code>org.apache.hadoop.fs.ftp.FTPFileSystem</code>)</p>
+<h2 id="class-diagram">Class Diagram</h2>
+<p><img alt="FTPInputOperator class diagram" src="../images/ftpInputOperator/classdiagram.png" /></p>
+<h2 id="operator-information">Operator Information</h2>
+<ol>
+<li>Operator location : <strong><em>malhar-lib</em></strong></li>
+<li>Available since : <strong><em>2.0.0</em></strong></li>
+<li>Java Package : <a href="https://github.com/apache/apex-malhar/blob/master/library/src/main/java/com/datatorrent/lib/io/AbstractFTPInputOperator.java">com.datatorrent.lib.io</a></li>
+</ol>
+<h3 id="ports">Ports</h3>
+<p>Because this is an input operator, there are no input ports.</p>
+<table>
+<thead>
+<tr>
+<th>Port</th>
+<th>Description</th>
+<th>Type</th>
+<th>Mandatory</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>output</em></td>
+<td>output port on which data is emitted</td>
+<td>String</td>
+<td>Yes</td>
+</tr>
+</tbody>
+</table>
+<h3 id="configuration">Configuration</h3>
+<table>
+<thead>
+<tr>
+<th>Property</th>
+<th>Description</th>
+<th>Type</th>
+<th>Mandatory</th>
+<th>Default Value</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>host</em></td>
+<td>the hostname of the FTP Server</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>source</em></td>
+<td>the directory path from where to scan and read files</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>username</em></td>
+<td>the username for authenticating against the FTP server. This is an optional property and can be skipped when anonymous FTP is enabled</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>password</em></td>
+<td>the password to be used in conjunction with the above username</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+</tbody>
+</table>
+<h2 id="partitioning">Partitioning</h2>
+<h4 id="static-partitioning">Static Partitioning</h4>
+<p>Configure parameter <code>partitionCount</code> to define the desired number of initial partitions
+(4 in this example).</p>
+<pre><code class="xml">&lt;property&gt;
+  &lt;name&gt;dt.operator.{OperatorName}.prop.partitionCount&lt;/name&gt;
+  &lt;value&gt;4&lt;/value&gt;
+&lt;/property&gt;
+</code></pre>
+
+<p>Alternatively, this can be changed in the application code by setting the operator property <code>partitionCount</code> to the desired number of partitions:</p>
+<pre><code class="java">FTPStringInputOperator reader = dag.addOperator(&quot;Reader&quot;, new FTPStringInputOperator());
+reader.setPartitionCount(4);
+</code></pre>
+
+<h4 id="dynamic-partitioning">Dynamic Partitioning</h4>
+<p>Dynamic partitioning -- changing the number of partitions of one or more operators
+in a running application -- can be achieved in multiple ways:
+- Use the command line tool <code>apex</code> or the UI console to change the value of the
+  <code>partitionCount</code> property of the running operator. This change is detected in
+  <code>processStats()</code> (which is invoked periodically by the platform) where, if the
+  current partition count (<code>currentPartitions</code>) and the desired partition count
+  (<code>partitionCount</code>) differ, the <code>repartitionRequired</code> flag in the response is set.
+  This causes the platform to invoke <code>definePartitions()</code> to create a new set of
+  partitions with the desired count.
+- Override <code>processStats()</code> and within it, based on the statistics in the
+  incoming parameter or any other factors, define a new desired value of
+  <code>partitionCount</code> and finally, if this value differs from the current partition
+  count, set the <code>repartitionRequired</code> flag in the response.</p>
+<h3 id="example-application">Example application</h3>
+<p>An example application for the FTP input operator can be found at <a href="https://github.com/apache/apex-malhar/tree/master/examples/ftp">https://github.com/apache/apex-malhar/tree/master/examples/ftp</a></p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../AbstractJdbcTransactionableOutputOperator/" class="btn btn-neutral float-right" title="Jdbc Output Operator">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../fixedWidthParserOperator/" class="btn btn-neutral" title="Fixed Width Parser"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../fixedWidthParserOperator/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../AbstractJdbcTransactionableOutputOperator/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/images/blockreader/classdiagram.png b/content/docs/malhar-3.8/operators/images/blockreader/classdiagram.png
new file mode 100644
index 0000000..8fbd6fc
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/blockreader/classdiagram.png differ
diff --git a/content/docs/malhar-3.8/operators/images/blockreader/flowdiagram.png b/content/docs/malhar-3.8/operators/images/blockreader/flowdiagram.png
new file mode 100644
index 0000000..1b2897d
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/blockreader/flowdiagram.png differ
diff --git a/content/docs/malhar-3.8/operators/images/blockreader/fsreaderexample.png b/content/docs/malhar-3.8/operators/images/blockreader/fsreaderexample.png
new file mode 100644
index 0000000..571b60a
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/blockreader/fsreaderexample.png differ
diff --git a/content/docs/malhar-3.8/operators/images/blockreader/totalBacklogProcessing.png b/content/docs/malhar-3.8/operators/images/blockreader/totalBacklogProcessing.png
new file mode 100644
index 0000000..2ed481f
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/blockreader/totalBacklogProcessing.png differ
diff --git a/content/docs/malhar-3.8/operators/images/csvParser/CSVParser.png b/content/docs/malhar-3.8/operators/images/csvParser/CSVParser.png
new file mode 100644
index 0000000..523ba0b
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/csvParser/CSVParser.png differ
diff --git a/content/docs/malhar-3.8/operators/images/deduper/image00.png b/content/docs/malhar-3.8/operators/images/deduper/image00.png
new file mode 100644
index 0000000..ec3e292
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/deduper/image00.png differ
diff --git a/content/docs/malhar-3.8/operators/images/deduper/image01.png b/content/docs/malhar-3.8/operators/images/deduper/image01.png
new file mode 100644
index 0000000..b9e35a9
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/deduper/image01.png differ
diff --git a/content/docs/malhar-3.8/operators/images/deduper/image02.png b/content/docs/malhar-3.8/operators/images/deduper/image02.png
new file mode 100644
index 0000000..689bdfe
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/deduper/image02.png differ
diff --git a/content/docs/malhar-3.8/operators/images/deduper/image03.png b/content/docs/malhar-3.8/operators/images/deduper/image03.png
new file mode 100644
index 0000000..087a0b1
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/deduper/image03.png differ
diff --git a/content/docs/malhar-3.8/operators/images/deduper/image04.png b/content/docs/malhar-3.8/operators/images/deduper/image04.png
new file mode 100644
index 0000000..4d3bd32
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/deduper/image04.png differ
diff --git a/content/docs/malhar-3.8/operators/images/fileoutput/FileRotation.png b/content/docs/malhar-3.8/operators/images/fileoutput/FileRotation.png
new file mode 100644
index 0000000..624c96e
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/fileoutput/FileRotation.png differ
diff --git a/content/docs/malhar-3.8/operators/images/fileoutput/diagram1.png b/content/docs/malhar-3.8/operators/images/fileoutput/diagram1.png
new file mode 100644
index 0000000..0a260de
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/fileoutput/diagram1.png differ
diff --git a/content/docs/malhar-3.8/operators/images/filesplitter/baseexample.png b/content/docs/malhar-3.8/operators/images/filesplitter/baseexample.png
new file mode 100644
index 0000000..6af2b44
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/filesplitter/baseexample.png differ
diff --git a/content/docs/malhar-3.8/operators/images/filesplitter/classdiagram.png b/content/docs/malhar-3.8/operators/images/filesplitter/classdiagram.png
new file mode 100644
index 0000000..6490368
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/filesplitter/classdiagram.png differ
diff --git a/content/docs/malhar-3.8/operators/images/filesplitter/inputexample.png b/content/docs/malhar-3.8/operators/images/filesplitter/inputexample.png
new file mode 100644
index 0000000..65e199f
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/filesplitter/inputexample.png differ
diff --git a/content/docs/malhar-3.8/operators/images/filesplitter/sequence.png b/content/docs/malhar-3.8/operators/images/filesplitter/sequence.png
new file mode 100644
index 0000000..85cf702
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/filesplitter/sequence.png differ
diff --git a/content/docs/malhar-3.8/operators/images/fixedWidthParser/fixedWidthParser.png b/content/docs/malhar-3.8/operators/images/fixedWidthParser/fixedWidthParser.png
new file mode 100644
index 0000000..3359237
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/fixedWidthParser/fixedWidthParser.png differ
diff --git a/content/docs/malhar-3.8/operators/images/fsInput/operatorsClassDiagram.png b/content/docs/malhar-3.8/operators/images/fsInput/operatorsClassDiagram.png
new file mode 100644
index 0000000..31c7a0d
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/fsInput/operatorsClassDiagram.png differ
diff --git a/content/docs/malhar-3.8/operators/images/ftpInputOperator/classdiagram.png b/content/docs/malhar-3.8/operators/images/ftpInputOperator/classdiagram.png
new file mode 100644
index 0000000..8e173a8
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/ftpInputOperator/classdiagram.png differ
diff --git a/content/docs/malhar-3.8/operators/images/jdbcinput/operatorsClassDiagram.png b/content/docs/malhar-3.8/operators/images/jdbcinput/operatorsClassDiagram.png
new file mode 100644
index 0000000..4b0432d
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/jdbcinput/operatorsClassDiagram.png differ
diff --git a/content/docs/malhar-3.8/operators/images/jdbcoutput/operatorsClassDiagrams.png b/content/docs/malhar-3.8/operators/images/jdbcoutput/operatorsClassDiagrams.png
new file mode 100644
index 0000000..ae7ab42
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/jdbcoutput/operatorsClassDiagrams.png differ
diff --git a/content/docs/malhar-3.8/operators/images/jsonFormatter/JsonFormatter.png b/content/docs/malhar-3.8/operators/images/jsonFormatter/JsonFormatter.png
new file mode 100644
index 0000000..f457c68
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/jsonFormatter/JsonFormatter.png differ
diff --git a/content/docs/malhar-3.8/operators/images/jsonParser/JsonParser.png b/content/docs/malhar-3.8/operators/images/jsonParser/JsonParser.png
new file mode 100644
index 0000000..7235efc
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/jsonParser/JsonParser.png differ
diff --git a/content/docs/malhar-3.8/operators/images/kafkainput/image00.png b/content/docs/malhar-3.8/operators/images/kafkainput/image00.png
new file mode 100644
index 0000000..0fa00e8
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/kafkainput/image00.png differ
diff --git a/content/docs/malhar-3.8/operators/images/regexparser/logcapturedgroups.png b/content/docs/malhar-3.8/operators/images/regexparser/logcapturedgroups.png
new file mode 100644
index 0000000..e5eac05
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/regexparser/logcapturedgroups.png differ
diff --git a/content/docs/malhar-3.8/operators/images/regexparser/regexcapturedgroups.png b/content/docs/malhar-3.8/operators/images/regexparser/regexcapturedgroups.png
new file mode 100644
index 0000000..11a0c79
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/regexparser/regexcapturedgroups.png differ
diff --git a/content/docs/malhar-3.8/operators/images/s3output/s3outputmodule.png b/content/docs/malhar-3.8/operators/images/s3output/s3outputmodule.png
new file mode 100644
index 0000000..f9c686b
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/s3output/s3outputmodule.png differ
diff --git a/content/docs/malhar-3.8/operators/images/windowedOperator/allowed-lateness.png b/content/docs/malhar-3.8/operators/images/windowedOperator/allowed-lateness.png
new file mode 100644
index 0000000..34153ca
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/windowedOperator/allowed-lateness.png differ
diff --git a/content/docs/malhar-3.8/operators/images/windowedOperator/session-windows-1.png b/content/docs/malhar-3.8/operators/images/windowedOperator/session-windows-1.png
new file mode 100644
index 0000000..78712e8
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/windowedOperator/session-windows-1.png differ
diff --git a/content/docs/malhar-3.8/operators/images/windowedOperator/session-windows-2.png b/content/docs/malhar-3.8/operators/images/windowedOperator/session-windows-2.png
new file mode 100644
index 0000000..19bfab0
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/windowedOperator/session-windows-2.png differ
diff --git a/content/docs/malhar-3.8/operators/images/windowedOperator/session-windows-3.png b/content/docs/malhar-3.8/operators/images/windowedOperator/session-windows-3.png
new file mode 100644
index 0000000..e7810eb
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/windowedOperator/session-windows-3.png differ
diff --git a/content/docs/malhar-3.8/operators/images/windowedOperator/session-windows-4.png b/content/docs/malhar-3.8/operators/images/windowedOperator/session-windows-4.png
new file mode 100644
index 0000000..43838c8
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/windowedOperator/session-windows-4.png differ
diff --git a/content/docs/malhar-3.8/operators/images/windowedOperator/sliding-time-windows.png b/content/docs/malhar-3.8/operators/images/windowedOperator/sliding-time-windows.png
new file mode 100644
index 0000000..dc55e1a
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/windowedOperator/sliding-time-windows.png differ
diff --git a/content/docs/malhar-3.8/operators/images/windowedOperator/time-windows.png b/content/docs/malhar-3.8/operators/images/windowedOperator/time-windows.png
new file mode 100644
index 0000000..cba471c
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/windowedOperator/time-windows.png differ
diff --git a/content/docs/malhar-3.8/operators/images/xmlParser/XmlParser.png b/content/docs/malhar-3.8/operators/images/xmlParser/XmlParser.png
new file mode 100644
index 0000000..3964da9
Binary files /dev/null and b/content/docs/malhar-3.8/operators/images/xmlParser/XmlParser.png differ
diff --git a/content/docs/malhar-3.8/operators/jdbcPollInputOperator/index.html b/content/docs/malhar-3.8/operators/jdbcPollInputOperator/index.html
new file mode 100644
index 0000000..7c2a65c
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/jdbcPollInputOperator/index.html
@@ -0,0 +1,668 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>JDBC Poller Input - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "JDBC Poller Input";
+    var mkdocs_page_input_path = "operators/jdbcPollInputOperator.md";
+    var mkdocs_page_url = "/operators/jdbcPollInputOperator/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">JDBC Poller Input</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#jdbc-poller-input-operator">JDBC Poller Input Operator</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-objective">Operator Objective</a></li>
+                
+                    <li><a class="toctree-l4" href="#use-cases">Use cases</a></li>
+                
+                    <li><a class="toctree-l4" href="#how-to-use">How to Use?</a></li>
+                
+                    <li><a class="toctree-l4" href="#partitioning-of-jdbc-poller">Partitioning of JDBC Poller</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-information">Operator Information</a></li>
+                
+                    <li><a class="toctree-l4" href="#abstractjdbcpollinputoperator">AbstractJdbcPollInputOperator</a></li>
+                
+                    <li><a class="toctree-l4" href="#abstract-methods">Abstract Methods</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#concrete-classes">Concrete Classes</a></li>
+                
+                    <li><a class="toctree-l4" href="#1-jdbcpojopollinputoperator">1. JdbcPOJOPollInputOperator</a></li>
+                
+                    <li><a class="toctree-l4" href="#2-jdbcpollinputoperator">2. JdbcPollInputOperator</a></li>
+                
+            
+                <li class="toctree-l3"><a href="#limitations">Limitations</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>JDBC Poller Input</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="jdbc-poller-input-operator">JDBC Poller Input Operator</h1>
+<h2 id="operator-objective">Operator Objective</h2>
+<p>This operator scans JDBC database table in parallel fashion. This operator is added to address common input operator problems like,</p>
+<ol>
+<li>
+<p>As discussed in <a href="https://github.com/apache/apex-core/blob/master/docs/development_best_practices.md">Development Best Practices</a>,
+    the operator callbacks such as <code>beginWindow()</code>, <code>endWindow()</code>, <code>emitTuples()</code>, etc.
+    (which are invoked by the main operator thread)
+    are required to return quickly, well within the default streaming window duration of
+    500ms. This requirement can be an issue when retrieving data from slow external systems
+    such as databases or object stores: if the call takes too long, the platform will deem
+    the operator blocked and restart it. Restarting will often run into the same issue
+    causing an unbroken sequence of restarts.</p>
+</li>
+<li>
+<p>When a large volume of data is available from a single store that allows reading from
+   arbitrary locations (such as a file or a database table), reading the data sequentially
+   can be throughput limiting: Having multiple readers read from non-overlapping sections
+   of the store allows any downstream parallelism in the DAG to be exploited better to
+   enhance throughput. For files, this approach is used by the file splitter and block
+   reader operators in the Malhar library.</p>
+</li>
+</ol>
+<p>JDBC Poller Input operator addresses the first issue with an asynchronous worker thread which retrieves the data and adds it to an in-memory queue; the main operator thread dequeue tuples very quickly if data is available or simply returns if not. The second is addressed in a way that parallels the approach to files by having multiple partitions read records from non-overlapping areas of the table. Additional details of how this is done are described below.</p>
+<h4 id="assumption">Assumption</h4>
+<p>Assumption is that there is an ordered column using which range queries can be formed. That means database has a column or combination of columns which has unique constraint as well as every newly inserted record should have column value more than max value in that column, as we poll only appended records.</p>
+<h2 id="use-cases">Use cases</h2>
+<ol>
+<li>Ingest large database tables. An example application that copies database contents to HDFS is available <a href="https://github.com/apache/apex-malhar/blob/master/examples/jdbc/src/main/java/org/apache/apex/examples/JdbcIngest/JdbcPollerApplication.java">here</a>.</li>
+</ol>
+<h2 id="how-to-use">How to Use?</h2>
+<p>The tuple type in the abstract class is a generic parameter. Concrete subclasses need to choose an appropriate class (such as String or an appropriate concrete java class, having no-argument constructor so that it can be serialized using Kryo). Also implement a couple of abstract methods: <code>getTuple(ResultSet)</code> to convert database rows to objects of concrete class and <code>emitTuple(T)</code> to emit the tuple.</p>
+<p>In principle, no ports need be defined in the rare case that the operator simply writes tuples to some external sink or merely maintains aggregated statistics. But in most common scenarios, the tuples need to be sent to one or more downstream operators for additional processing such as parsing, enrichment or aggregation; in such cases, appropriate output ports are defined and the emitTuple(T) implementation dispatches tuples to the desired output ports.</p>
+<p>Couple of concrete implementations are provided in Malhar:</p>
+<ol>
+<li>
+<p><a href="https://github.com/apache/apex-malhar/blob/master/library/src/main/java/com/datatorrent/lib/db/jdbc/JdbcPOJOPollInputOperator.java">JdbcPOJOPollInputOperator</a>: It uses java Object for the generic parameter. This operator defines a single output port and processes each database table record one by one as a tuple object. You need to set the output port attribute TUPLE_CLASS to define your <a href="https://en.wikipedia.org/wiki/Plain_Old_Java_Object">POJO</a> class name to de [...]
+</li>
+<li>
+<p><a href="https://github.com/apache/apex-malhar/blob/master/library/src/main/java/com/datatorrent/lib/db/jdbc/JdbcPollInputOperator.java">JdbcPollInputOperator</a>: It uses String for the generic parameter. This operator defines a single port and processes each database table record one by one as String tuple. Details are discussed below.</p>
+</li>
+</ol>
+<h2 id="partitioning-of-jdbc-poller">Partitioning of JDBC Poller</h2>
+<h4 id="static-partitioning">Static Partitioning</h4>
+<p>Only static partitioning is supported for JDBC Poller Input Operator. Configure parameter <code>partitionCount</code> to define the desired number of initial partitions (4 in this example).
+<strong>Note</strong>: An additional partition will be created to poll newly added records, so the total number of partitions will always be 1 + partitionCount.</p>
+<pre><code class="xml">  &lt;property&gt;
+    &lt;name&gt;apex.operator.{OperatorName}.prop.partitionCount&lt;/name&gt;
+    &lt;value&gt;4&lt;/value&gt;
+  &lt;/property&gt;
+</code></pre>
+
+<p>where {OperatorName} is the name of the JDBC Poller operator.</p>
+<p>This will create 5 operator instances in all. Four of these will fetch the data which is currently in the table. We call these static non-polling partitions. The partitions will be idle after they fetch the portion of the data. An additional partition will be created which will read any newly added data. We call such a partition as a polling partition, as it "polls" for newly added data. There will be only one polling partition.</p>
+<h4 id="dynamic-partitioning">Dynamic Partitioning</h4>
+<p>Not supported.</p>
+<h2 id="operator-information">Operator Information</h2>
+<ol>
+<li>Operator location: <strong><em>malhar-library</em></strong></li>
+<li>Available since: <strong><em>3.5.0</em></strong></li>
+<li>Operator state: <strong><em>Evolving</em></strong></li>
+<li>Java Packages: <strong><em><a href="https://ci.apache.org/projects/apex-malhar/apex-malhar-javadoc-release-3.7/com/datatorrent/lib/db/jdbc/package-summary.html">AbstractJdbcPollInputOperator</a></em></strong></li>
+</ol>
+<p>JDBC Poller is <strong>idempotent</strong>, <strong>fault-tolerant</strong> and <strong>statically partitionable</strong>.</p>
+<h2 id="abstractjdbcpollinputoperator">AbstractJdbcPollInputOperator</h2>
+<p>This is the abstract implementation that serves as base class for polling messages from JDBC store. It can be extended to modify functionality or add new capabilities. This class doesn’t have any ports, so concrete subclasses will need to provide them if necessary.</p>
+<p><img alt="AbstractJdbcPollInputOperator.png" src="../images/jdbcinput/operatorsClassDiagram.png" /></p>
+<h3 id="properties-of-abstractjdbcpollinputoperator"><a name="AbstractJdbcPollInputOperatorProps"></a>Properties of AbstractJdbcPollInputOperator</h3>
+<p>Several properties are available to configure the behavior of this operator and they are summarized in the table below.</p>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>store</em></td>
+<td>JDBC Store for connection</td>
+<td><a href="#JDBCStore">JDBCStore</a></td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>tableName</em></td>
+<td>table name to be scanned</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>columnsExpression</em></td>
+<td>Comma separated list of columns to select from the given table.</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>key</em></td>
+<td>Primary key column name</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>partitionCount</em></td>
+<td>Static partitions count</td>
+<td>int</td>
+<td>No</td>
+<td>1</td>
+</tr>
+<tr>
+<td><em>whereCondition</em></td>
+<td>Where condition for JDBC query</td>
+<td>String</td>
+<td>No</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>fetchSize</em></td>
+<td>Hint limiting the number of rows to fetch in a single call</td>
+<td>int</td>
+<td>No</td>
+<td>20000</td>
+</tr>
+<tr>
+<td><em>pollInterval</em></td>
+<td>Interval in milliseconds to poll the database table</td>
+<td>int</td>
+<td>No</td>
+<td>10000</td>
+</tr>
+<tr>
+<td><em>queueCapacity</em></td>
+<td>Capacity of queue which holds DB data before emiting</td>
+<td>int</td>
+<td>No</td>
+<td>4096</td>
+</tr>
+<tr>
+<td><em>batchSize</em></td>
+<td>Maximum number of tuples to emit in a single call to the <code>emitTuples()</code> callback (see explanation above).</td>
+<td>int</td>
+<td>No</td>
+<td>2000</td>
+</tr>
+</tbody>
+</table>
+<h4 id="properties-of-jdbc-store-backendstore"><a name="JDBCStore"></a>Properties of JDBC Store (BackendStore)</h4>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>databaseDriver</em></td>
+<td>JDBC Driver class for connection to JDBC Store. This driver should be there in class path</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>databaseUrl</em></td>
+<td>Database url of the form jdbc:subprotocol:subname</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>connectionProps</em></td>
+<td>Comma separated connection properties e.g. user:xyz,password:ijk</td>
+<td>String</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+</tbody>
+</table>
+<p>Of these only <code>store</code> properties, <code>tableName</code>, <code>columnsExpression</code> and <code>key</code> are mandatory. Those properties can be set like this:</p>
+<pre><code class="xml">&lt;property&gt;
+  &lt;name&gt;apex.operator.{OperatorName}.prop.tableName&lt;/name&gt;
+  &lt;value&gt;mytable&lt;/value&gt;
+&lt;/property&gt;
+&lt;property&gt;
+  &lt;name&gt;apex.operator.{OperatorName}.prop.columnsExpression&lt;/name&gt;
+  &lt;value&gt;column1,column2,column4&lt;/value&gt;
+&lt;/property&gt;
+&lt;property&gt;
+  &lt;name&gt;apex.operator.{OperatorName}.prop.key&lt;/name&gt;
+  &lt;value&gt;keycolumn&lt;/value&gt;
+&lt;/property&gt;
+&lt;property&gt;
+  &lt;name&gt;apex.operator.{OperatorName}.prop.store.databaseDriver&lt;/name&gt;
+  &lt;value&gt;com.mysql.jdbc.Driver&lt;/value&gt;
+&lt;/property&gt;
+&lt;property&gt;
+  &lt;name&gt;apex.operator.{OperatorName}.prop.store.databaseUrl&lt;/name&gt;
+  &lt;value&gt;jdbc:mysql://localhost:3306/mydb&lt;/value&gt;
+&lt;/property&gt;
+&lt;property&gt;
+  &lt;name&gt;apex.operator.{OperatorName}.prop.store.connectionProps&lt;/name&gt;
+  &lt;value&gt;user:myuser,password:mypassword&lt;/value&gt;
+&lt;/property&gt;
+</code></pre>
+
+<ul>
+<li>If you need to filter the table records, set <code>whereCondition</code> which will be added to the generated SQL query.</li>
+<li>If you have a table with a very large number or rows to scan set <code>partitionsCount</code> to a higher number to increase read parallelism.</li>
+<li>The operator uses PreparedStatement, a precompiled SQL statement to fetch records from database table. You can set <code>fetchSize</code> as a hint to the database driver to restrict number of rows to fetch in one call. The remaining rows will be fetched in subsequent calls. Please note, some of the database drivers may not honor this hint. Please refer to database driver documentation to know recommended value.</li>
+<li>The platform invokes the <code>emitTuples()</code> callback multiple time in each streaming window; within a single such call, if a large number of tuples are emitted, there is some risk that they may overwhelm the downstream operators especially if they are performing some compute intensive operation. For such cases, output can be throttled by reducing the value of the <code>batchSize</code> property. Conversely, if the downstream operators can handle the load, increase the value to [...]
+<li>If there is high rate of incoming records in your table and you want to process them as soon as they appear, use lower value of <code>pollInterval</code>; if they appear rarely or if some delay in processing new records is acceptable, increase it.</li>
+<li>After reading the records from the table they are held in memory for some time till they are emitted to next operator. The records are kept in a blocking queue. The capacity of this blocking queue can be changed using parameter <code>queueCapacity</code>. You can use larger size of queue when your reader thread is very fast and you want to read more data in memory to keep it ready for emission.</li>
+</ul>
+<p><strong>Note</strong>: Please set right store object instance to JDBC Poller Input Operator using your application code. It's recommended to use <a href="https://github.com/apache/apex-malhar/blob/master/library/src/main/java/com/datatorrent/lib/db/jdbc/JdbcStore.java">JdbcStore</a> for this operator.</p>
+<h3 id="abstract-methods">Abstract Methods</h3>
+<p><code>void emitTuple(T tuple)</code>: Abstract method that emits tuple extracted from JDBC store.</p>
+<p><code>T getTuple(ResultSet result)</code>: Abstract method to extract the tuple from the JDBC ResultSet object and convert it to the required type (T).</p>
+<h2 id="concrete-classes">Concrete Classes</h2>
+<h3 id="1-jdbcpojopollinputoperator">1. JdbcPOJOPollInputOperator</h3>
+<p>This implementation converts JDBC store records to <a href="https://en.wikipedia.org/wiki/Plain_Old_Java_Object">POJO</a> and emits POJO on output port.</p>
+<h4 id="properties-of-jdbcpojopollinputoperator"><a name="JdbcPOJOPollInputOperatorProps"></a>Properties of JdbcPOJOPollInputOperator</h4>
+<p>This operator defines following additional properties beyond those defined in the <a href="#AbstractJdbcPollInputOperatorProps">parent class</a>.</p>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>fieldInfos</em></td>
+<td>Maps columns to POJO field names.</td>
+<td>List</td>
+<td>Yes</td>
+<td>N/A</td>
+</tr>
+</tbody>
+</table>
+<h4 id="platform-attributes-that-influence-operator-behavior">Platform Attributes that influence operator behavior</h4>
+<table>
+<thead>
+<tr>
+<th><strong>Attribute</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>outputPort.TUPLE_CLASS</em></td>
+<td>TUPLE_CLASS attribute on output port which tells operator the class of POJO which need to be emitted</td>
+<td>Class or FQCN (Fully Qualified Class Name)</td>
+<td>Yes</td>
+</tr>
+</tbody>
+</table>
+<h4 id="ports">Ports</h4>
+<table>
+<thead>
+<tr>
+<th><strong>Port</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>outputPort</em></td>
+<td>Tuples that are read from JDBC store are emitted from on this port</td>
+<td>Object (POJO)</td>
+<td>No</td>
+</tr>
+</tbody>
+</table>
+<h3 id="2-jdbcpollinputoperator">2. JdbcPollInputOperator</h3>
+<p>This implementation converts JDBC store records to comma separated CSV records. This operator is normally used when you just want to copy the data from database to somewhere else and don't want to do much of processing.</p>
+<h4 id="properties-of-jdbcpollinputoperator"><a name="props"></a>Properties of JdbcPollInputOperator</h4>
+<p>This operator defines no additional properties beyond those defined in the <a href="#AbstractJdbcPollInputOperatorProps">parent class</a>.</p>
+<h4 id="ports_1">Ports</h4>
+<table>
+<thead>
+<tr>
+<th><strong>Port</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>outputPort</em></td>
+<td>Tuples that are read from JDBC store are emitted on this port</td>
+<td>String</td>
+<td>No</td>
+</tr>
+</tbody>
+</table>
+<h2 id="limitations">Limitations</h2>
+<p>Out of order insertion/deletion won't be supported.</p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../jmsInputOperator/" class="btn btn-neutral float-right" title="JMS Input">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../AbstractJdbcTransactionableOutputOperator/" class="btn btn-neutral" title="Jdbc Output Operator"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../AbstractJdbcTransactionableOutputOperator/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../jmsInputOperator/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/jmsInputOperator/index.html b/content/docs/malhar-3.8/operators/jmsInputOperator/index.html
new file mode 100644
index 0000000..cd564a4
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/jmsInputOperator/index.html
@@ -0,0 +1,532 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>JMS Input - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "JMS Input";
+    var mkdocs_page_input_path = "operators/jmsInputOperator.md";
+    var mkdocs_page_url = "/operators/jmsInputOperator/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">JMS Input</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#jms-input-operator">JMS INPUT OPERATOR</a></li>
+                
+                    <li><a class="toctree-l4" href="#introduction-about-the-jms-input-operator">Introduction: About the JMS Input Operator</a></li>
+                
+                    <li><a class="toctree-l4" href="#why-is-it-needed">Why is it needed ?</a></li>
+                
+                    <li><a class="toctree-l4" href="#jmsbase">JMSBase</a></li>
+                
+                    <li><a class="toctree-l4" href="#abstractjmsinputoperator">AbstractJMSInputOperator</a></li>
+                
+                    <li><a class="toctree-l4" href="#concrete-classes">Concrete Classes</a></li>
+                
+                    <li><a class="toctree-l4" href="#application-examples">Application Examples</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>JMS Input</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="jms-input-operator">JMS INPUT OPERATOR</h1>
+<h3 id="introduction-about-the-jms-input-operator">Introduction: About the JMS Input Operator</h3>
+<p>The JMS input operator consumes data from a messaging system using the JMS client API. JMS not being a communication protocol, the operator needs an underlying JMS client API library to talk to a messaging system. Currently the operator has been tested with the Amazon SQS and Apache ActiveMQ System brokers via their respective JMS client API libraries.</p>
+<h3 id="why-is-it-needed">Why is it needed ?</h3>
+<p>You will need the operator to read data from a messaging system (e.g. Apache ActiveMQ) via the JMS client API. The operator supports both the publish-subscribe (topics) and point-to-point (queues) modes. The operator currently does not support partitioning and dynamic scalability.</p>
+<h3 id="jmsbase">JMSBase</h3>
+<p>This class encapsulates various JMS properties and behaviors and maintains connections with the JMS broker. This is the base class for JMS input and output adaptor operators. Operators should not directly subclass JMSBase but one of the JMS input or output operators.</p>
+<h3 id="abstractjmsinputoperator">AbstractJMSInputOperator</h3>
+<p>This abstract implementation serves as the base class for consuming generic messages from an external messaging system. Concrete subclasses implement conversion and emit methods to emit tuples for a concrete type. JMSStringInputOperator is one such subclass in the library used for String messages. JMSObjectInputOperator is another one used for multiple message types where the user has the ability to get String, byte array, Map or POJO messages on the respective output ports.</p>
+<h4 id="configuration-parameters">Configuration Parameters</h4>
+<p>Common configuration parameters are described here.
+<table>
+<col width="25%" />
+<col width="75%" />
+<tbody>
+<tr class="odd">
+<td align="left"><p>Parameter</p></td>
+<td align="left"><p>Description</p></td>
+</tr>
+<tr class="even">
+<td align="left"><p>windowDataManager</p></td>
+<td align="left"><p>This is an instance of <code>WindowDataManager</code> that implements idempotency. Idempotency ensures that an operator will process the same set of messages in a window before and after a failure. For example, say the operator completed window 10 and failed before or during window 11. If the operator gets restored at window 10, it will replay the messages of window 10 which were saved from the previous run before the failure. Although important, idempotency comes at  [...]
+</tr>
+<tr class="odd">
+<td align="left"><p>connectionFactoryBuilder</p></td>
+<td align="left"><p>The operator uses the builder pattern that requires the user to specify an instance of <code>com.datatorrent.lib.io.jms.JMSBase.ConnectionFactoryBuilder</code>. This builder creates the connection factory that encapsulates the underlying JMS client API library (e.g. ActiveMQ or Amazon SQS). By default the operator uses <code>com.datatorrent.lib.io.jms.JMSBase.DefaultConnectionFactoryBuilder</code> which is used for ActiveMQ. One of the examples below describes the Ama [...]
+</tr>
+</tbody>
+</table></p>
+<h4 id="abstract-methods">Abstract Methods</h4>
+<p>The following abstract methods need to be implemented by concrete subclasses.</p>
+<p>T convert(Message message): This method converts a JMS Message object to type T.</p>
+<p>void emit(T payload): This method emits a tuple given the payload extracted from a JMS message.</p>
+<h3 id="concrete-classes">Concrete Classes</h3>
+<ol>
+<li>
+<p>JMSStringInputOperator :
+This class extends AbstractJMSInputOperator to deliver String payloads in the tuple.</p>
+</li>
+<li>
+<p>JMSObjectInputOperator:
+This class extends AbstractJMSInputOperator to deliver String, byte array, Map or POJO payloads in the tuple.</p>
+</li>
+</ol>
+<h3 id="application-examples">Application Examples</h3>
+<h4 id="activemq-example">ActiveMQ Example</h4>
+<p>The source code for the tutorial can be found here:</p>
+<p><a href="https://github.com/DataTorrent/examples/tree/master/tutorials/jmsActiveMQ">https://github.com/DataTorrent/examples/tree/master/tutorials/jmsActiveMQ</a></p>
+<p>The following code snippet from the example illustrates how the DAG is created:</p>
+<pre><code class="java">  @Override
+  public void populateDAG(DAG dag, Configuration conf)
+  {
+    JMSStringInputOperator amqInput = dag.addOperator(&quot;amqIn&quot;, 
+        new JMSStringInputOperator());
+
+    LineOutputOperator out = dag.addOperator(&quot;fileOut&quot;, new LineOutputOperator());
+
+    dag.addStream(&quot;data&quot;, amqInput.output, out.input);
+  }
+</code></pre>
+
+<p>The DAG consists of only 2 operators: the <code>JMSStringInputOperator</code> which is the input operator that feeds received ActiveMQ messages into the output operator <code>LineOutputOperator</code> which outputs these messages into a file or files.</p>
+<p>The default connectionFactoryBuilder supports ActiveMQ so there is no need to set this value. However the following ActiveMQ related values need to be set either from properties files or using the appropriate setter methods in the code:</p>
+<table>
+<col width="25%" />
+<col width="75%" />
+<tbody>
+<tr class="odd">
+<td align="left"><p>Value</p></td>
+<td align="left"><p>Description</p></td>
+</tr>
+<tr class="even">
+<td align="left"><p>connectionFactoryProperties</p></td>
+<td align="left"><p>This is a Map of key and value strings and can be set directly from configuration as in the example above. The table below describes the most important properties.</p></td>
+</tr>
+<tr class="odd">
+<td align="left"><p>topic</p></td>
+<td align="left"><p>This boolean value is set to true for the publish-subscribe case and false for the PTP (point-to-point) case.</p></td>
+</tr>
+<tr class="even">
+<td align="left"><p>subject</p></td>
+<td align="left"><p>This is the queue name for PTP (point-to-point) use-case and topic name for the publish-subscribe use case.</p></td>
+</tr>
+<tr class="odd">
+<td align="left"><p>durable</p></td>
+<td align="left"><p>This boolean value is set to true for durable subscriptionss, false otherwise. Durable subscriptions save messages to persistent storage until consumed. Used only when the clientId (see below) is set.</p></td>
+</tr>
+<tr class="even">
+<td align="left"><p>clientId</p></td>
+<td align="left"><p>The client-ID for this ActiveMQ consumer in the durable subscription mode as described above.</p></td>
+</tr>
+<tr class="odd">
+<td align="left"><p>transacted</p></td>
+<td align="left"><p>This boolean value is set to true for transacted JMS sessions as described in 
+<a href="https://docs.oracle.com/javaee/7/api/javax/jms/Session.html">Session</a>.</p></td>
+</tr>
+<tr class="even">
+<td align="left"><p>ackMode</p></td>
+<td align="left"><p>This string value sets the acknowledgement mode as described in 
+<a href="https://docs.oracle.com/javaee/7/api/javax/jms/Session.html#field.summary">Session fields</a>.</p></td>
+</tr>
+</tbody>
+</table>
+
+<p>The following table describes the string properties to be set in the map that is passed in the connectionFactoryProperties value described above.</p>
+<table>
+<col width="25%" />
+<col width="75%" />
+<tbody>
+<tr class="odd">
+<td align="left"><p>Property Name</p></td>
+<td align="left"><p>Description</p></td>
+</tr>
+<tr class="even">
+<td align="left"><p>brokerURL</p></td>
+<td align="left"><p>The <a href="http://activemq.apache.org/configuring-transports.html">connection URL</a> 
+used to connect to the ActiveMQ broker</p></td></tr>
+<tr class="even">
+<td align="left"><p>userName</p></td>
+<td align="left"><p>The JMS userName used by connections created by this factory (optional when anonymous access is used)</p></td>
+</tr>
+<tr class="even">
+<td align="left"><p>password</p></td>
+<td align="left"><p>The JMS password used for connections created from this factory (optional when anonymous access is used)</p></td>
+</tr>
+</tbody>
+</table>
+
+<p>These properties can be set from the properties.xml file as shown below 
+(from the example <a href="https://github.com/DataTorrent/examples/tree/master/tutorials/jmsActiveMQ">https://github.com/DataTorrent/examples/tree/master/tutorials/jmsActiveMQ</a> ).</p>
+<pre><code class="xml">&lt;configuration&gt;
+  &lt;property&gt;
+    &lt;name&gt;dt.operator.amqIn.prop.connectionFactoryProperties.brokerURL&lt;/name&gt;
+    &lt;value&gt;vm://localhost&lt;/value&gt;
+  &lt;/property&gt;
+  &lt;property&gt;
+    &lt;name&gt;dt.operator.amqIn.prop.subject&lt;/name&gt;
+    &lt;value&gt;jms4Amq&lt;/value&gt;
+  &lt;/property&gt;
+&lt;/configuration&gt;                                                                                                        
+</code></pre>
+
+<h4 id="sqs-example">SQS Example</h4>
+<p>The source code for the tutorial can be found here:</p>
+<p><a href="https://github.com/DataTorrent/examples/tree/master/tutorials/jmsSqs">https://github.com/DataTorrent/examples/tree/master/tutorials/jmsSqs</a></p>
+<p>The following code snippet from the example illustrates how the DAG is created:</p>
+<pre><code class="java"> @Override
+ public void populateDAG(DAG dag, Configuration conf)
+ {
+
+   JMSStringInputOperator sqsInput = dag.addOperator(&quot;sqsIn&quot;, 
+       new JMSStringInputOperator());
+
+   MyConnectionFactoryBuilder factoryBuilder = new MyConnectionFactoryBuilder();
+
+   factoryBuilder.sqsDevCredsFilename = conf.get(SQSDEV_CREDS_FILENAME_PROPERTY);
+
+   sqsInput.setConnectionFactoryBuilder(factoryBuilder);
+
+   LineOutputOperator out = dag.addOperator(&quot;fileOut&quot;, new LineOutputOperator());
+
+   dag.addStream(&quot;data&quot;, sqsInput.output, out.input);
+ }
+</code></pre>
+
+<p>The DAG consists of only 2 operators: the <code>JMSStringInputOperator</code> which is the input operator that feeds received SQS messages into the output operator <code>LineOutputOperator</code> which outputs these messages into a file or files. The code also shows how the AWS/SQS credentials are initialized in the factory builder. </p>
+<p>For SQS you will have to provide a custom connectionFactoryBuilder as shown in the example above and in <a href="https://github.com/awslabs/amazon-sqs-java-messaging-lib/blob/master/src/main/java/com/amazon/sqs/javamessaging/SQSConnectionFactory.java">SQSConnectionFactory.java</a>. The builder is typically used to supply AWS region and credential information that cannot be supplied via any JMS interfaces.</p>
+<p>The following code snippet shows a typical Builder implementation that can be supplied to the operator. The AWS credentials are supplied via a <a href="http://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/PropertiesFileCredentialsProvider.html">PropertiesFileCredentialsProvider</a> object in which sqsCredsFilename is the fully qualified path to a properties file from which the AWS security credentials are to be loaded. For example <code>/etc/somewhere/credentials.pr [...]
+<pre><code class="java">static class MyConnectionFactoryBuilder implements JMSBase.ConnectionFactoryBuilder {
+
+String sqsCredsFilename;
+
+MyConnectionFactoryBuilder()
+{
+}
+
+@Override
+public ConnectionFactory buildConnectionFactory() 
+{
+  // Create the connection factory using the properties file credential provider.
+  // Connections this factory creates can talk to the queues in us-east-1 region. 
+  SQSConnectionFactory connectionFactory =
+    SQSConnectionFactory.builder()
+      .withRegion(Region.getRegion(Regions.US_EAST_1))
+      .withAWSCredentialsProvider(new PropertiesFileCredentialsProvider(sqsCredsFilename))
+      .build();
+    return connectionFactory;
+  }
+}
+</code></pre>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../jsonFormatter/" class="btn btn-neutral float-right" title="JSON Formatter">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../jdbcPollInputOperator/" class="btn btn-neutral" title="JDBC Poller Input"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../jdbcPollInputOperator/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../jsonFormatter/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/jsonFormatter/index.html b/content/docs/malhar-3.8/operators/jsonFormatter/index.html
new file mode 100644
index 0000000..1a6a45d
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/jsonFormatter/index.html
@@ -0,0 +1,478 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>JSON Formatter - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "JSON Formatter";
+    var mkdocs_page_input_path = "operators/jsonFormatter.md";
+    var mkdocs_page_url = "/operators/jsonFormatter/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">JSON Formatter</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#json-formatter">Json Formatter</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-objective">Operator Objective</a></li>
+                
+                    <li><a class="toctree-l4" href="#class-diagram">Class Diagram</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-information">Operator Information</a></li>
+                
+                    <li><a class="toctree-l4" href="#properties-attributes-and-ports">Properties, Attributes and Ports</a></li>
+                
+                    <li><a class="toctree-l4" href="#partitioning">Partitioning</a></li>
+                
+                    <li><a class="toctree-l4" href="#example">Example</a></li>
+                
+                    <li><a class="toctree-l4" href="#advance-features">Advance Features</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>JSON Formatter</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="json-formatter">Json Formatter</h1>
+<h2 id="operator-objective">Operator Objective</h2>
+<p>Purpose of JsonFormatter is to consume Plain Old Java Object ("POJO") and write them as JSON.
+Json Formatter is <strong>idempotent</strong>, <strong>fault-tolerance</strong> &amp; <strong>statically/dynamically partitionable</strong>.</p>
+<h2 id="class-diagram">Class Diagram</h2>
+<p><img alt="JsonFormatter" src="../images/jsonFormatter/JsonFormatter.png" /></p>
+<h2 id="operator-information">Operator Information</h2>
+<ol>
+<li>Operator location:<strong>_malhar-library</strong></li>
+<li>Available since:<strong><em>3.2.0</em></strong></li>
+<li>Operator state:<strong><em>Evolving</em></strong></li>
+<li>Java Package:<a href="https://github.com/apache/apex-malhar/blob/master/library/src/main/java/com/datatorrent/lib/formatter/JsonFormatter.java">com.datatorrent.lib.formatter.JsonFormatter</a></li>
+</ol>
+<h2 id="properties-attributes-and-ports">Properties, Attributes and Ports</h2>
+<h3 id="platform-attributes-that-influences-operator-behavior">Platform Attributes that influences operator behavior</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Attribute</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>in.TUPLE_CLASS</em></td>
+<td>TUPLE_CLASS attribute on input port which tells operator the class of incoming POJO</td>
+<td>Class or FQCN</td>
+<td>Yes</td>
+</tr>
+</tbody>
+</table>
+<h3 id="ports">Ports</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Port</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>in</em></td>
+<td>Tuples that needs to be formatted are recieved on this port</td>
+<td>Object (POJO)</td>
+<td>Yes</td>
+</tr>
+<tr>
+<td><em>out</em></td>
+<td>Valid Tuples that are emitted as JSON</td>
+<td>String</td>
+<td>No</td>
+</tr>
+<tr>
+<td><em>err</em></td>
+<td>Invalid Tuples are emitted on this port</td>
+<td>Object</td>
+<td>No</td>
+</tr>
+</tbody>
+</table>
+<h2 id="partitioning">Partitioning</h2>
+<p>JSON Formatter is both statically and dynamically partitionable.</p>
+<h3 id="static-partitioning">Static Partitioning</h3>
+<p>This can be achieved in 2 ways</p>
+<ol>
+<li>Specifying the partitioner and number of partitions in the populateDAG() method</li>
+</ol>
+<pre><code class="java">JsonFormatter jsonFormatter = dag.addOperator(&quot;jsonFormatter&quot;, JsonFormatter.class);
+StatelessPartitioner&lt;JsonFormatter&gt; partitioner1 = new StatelessPartitioner&lt;JsonFormatter&gt;(2);
+dag.setAttribute(jsonFormatter, Context.OperatorContext.PARTITIONER, partitioner1 );
+</code></pre>
+
+<ol>
+<li>Specifying the partitioner in properties file.</li>
+</ol>
+<pre><code class="xml"> &lt;property&gt;
+   &lt;name&gt;dt.operator.{OperatorName}.attr.PARTITIONER&lt;/name&gt;
+   &lt;value&gt;com.datatorrent.common.partitioner.StatelessPartitioner:2&lt;/value&gt;
+ &lt;/property&gt;
+</code></pre>
+
+<p>where {OperatorName} is the name of the JsonFormatter operator.
+ Above lines will partition JsonFormatter statically 2 times. Above value can be changed accordingly to change the number of static partitions.</p>
+<h3 id="dynamic-paritioning">Dynamic Paritioning</h3>
+<p>JsonFormatter can be dynamically partitioned using an out-of-the-box partitioner:</p>
+<h4 id="throughput-based">Throughput based</h4>
+<p>Following code can be added to populateDAG method of application to dynamically partition JsonFormatter:</p>
+<pre><code class="java">JsonFormatter jsonFormatter = dag.addOperator(&quot;jsonFormatter&quot;, JsonFormatter.class);
+StatelessThroughputBasedPartitioner&lt;JsonFormatter&gt; partitioner = new StatelessThroughputBasedPartitioner&lt;&gt;();
+partitioner.setCooldownMillis(conf.getLong(COOL_DOWN_MILLIS, 10000));
+partitioner.setMaximumEvents(conf.getLong(MAX_THROUGHPUT, 30000));
+partitioner.setMinimumEvents(conf.getLong(MIN_THROUGHPUT, 10000));
+dag.setAttribute(JsonFormatter, OperatorContext.STATS_LISTENERS, Arrays.asList(new StatsListener[]{partitioner}));
+dag.setAttribute(JsonFormatter, OperatorContext.PARTITIONER, partitioner);
+</code></pre>
+
+<p>Above code will dynamically partition JsonFormatter when the throughput changes.
+If the overall throughput of JsonFormatter goes beyond 30000 or less than 10000, the platform will repartition JsonFormatter
+to balance throughput of a single partition to be between 10000 and 30000.
+CooldownMillis of 10000 will be used as the threshold time for which the throughput change is observed.</p>
+<h2 id="example">Example</h2>
+<p>Example for Json Formatter can be found at: <a href="https://github.com/DataTorrent/examples/tree/master/tutorials/parser">https://github.com/DataTorrent/examples/tree/master/tutorials/parser</a></p>
+<h2 id="advance-features">Advance Features</h2>
+<p>JsonFormatter is based on <a href="https://github.com/FasterXML/jackson-databind">jackson-databind</a> and so users can make use of <a href="https://github.com/FasterXML/jackson-annotations">annotations</a> in POJO class. Here are few annotations that are relavant while using JsonFormatter
+1. <strong>@JsonProperty</strong> : Sometimes POJOs contain properties that has different name from incoming POJOs.You can specify names as:</p>
+<pre><code class="java">public class Ad{
+  @JsonProperty(&quot;desc&quot;)
+  public String description;
+  public List&lt;String&gt; sizes;
+}
+</code></pre>
+
+<ol>
+<li><strong>@JsonIgnore</strong> : Sometimes POJOs contain properties that you do not want to write out, so you can do:</li>
+</ol>
+<pre><code class="java">public class Value {
+  public int value;
+  @JsonIgnore
+  public int internalValue;
+}
+</code></pre>
+
+<ol>
+<li><strong>@JsonFormat</strong> :  Sometimes Date fields need to be printed in custom format, so you can do:</li>
+</ol>
+<pre><code class="java">public class Ad{
+  @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = &quot;EEE, d MMM yyyy HH:mm:ss&quot;)
+   public Date startDate;
+}
+</code></pre>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../jsonParser/" class="btn btn-neutral float-right" title="JSON Parser">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../jmsInputOperator/" class="btn btn-neutral" title="JMS Input"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../jmsInputOperator/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../jsonParser/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/jsonParser/index.html b/content/docs/malhar-3.8/operators/jsonParser/index.html
new file mode 100644
index 0000000..b12ab7b
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/jsonParser/index.html
@@ -0,0 +1,475 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>JSON Parser - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "JSON Parser";
+    var mkdocs_page_input_path = "operators/jsonParser.md";
+    var mkdocs_page_url = "/operators/jsonParser/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">JSON Parser</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#json-parser">Json Parser</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-objective">Operator Objective</a></li>
+                
+                    <li><a class="toctree-l4" href="#class-diagram">Class Diagram</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-information">Operator Information</a></li>
+                
+                    <li><a class="toctree-l4" href="#properties-attributes-and-ports">Properties, Attributes and Ports</a></li>
+                
+                    <li><a class="toctree-l4" href="#partitioning">Partitioning</a></li>
+                
+                    <li><a class="toctree-l4" href="#example">Example</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>JSON Parser</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="json-parser">Json Parser</h1>
+<h2 id="operator-objective">Operator Objective</h2>
+<p>Purpose of JsonParser operator is to parse JSON records and construct a Plain Old Java Object ("POJO") out of it. The operator also emits each record as JSONObject if the relevant output port is connected. User can also provide a schema describing JSON data to validate incoming JSON records. Valid records will be emitted as POJO / JSONObject while invalid ones are emitted on error port with the error message if the error port is connected.</p>
+<p>Json Parser is <strong>idempotent</strong>, <strong>fault-tolerant</strong> &amp; <strong>statically/dynamically partitionable</strong>.</p>
+<h2 id="class-diagram">Class Diagram</h2>
+<p><img alt="" src="../images/jsonParser/JsonParser.png" /></p>
+<h2 id="operator-information">Operator Information</h2>
+<ol>
+<li>Operator location:<strong><em>malhar-contrib</em></strong></li>
+<li>Available since:<strong><em>3.2.0</em></strong></li>
+<li>Operator state:<strong><em>Evolving</em></strong></li>
+<li>Java Package:<a href="https://github.com/apache/apex-malhar/blob/master/contrib/src/main/java/com/datatorrent/contrib/parser/JsonParser.java">com.datatorrent.contrib.parser.JsonParser</a></li>
+</ol>
+<h2 id="properties-attributes-and-ports">Properties, Attributes and Ports</h2>
+<h3 id="properties-of-json-parser"><a name="props"></a>Properties of Json Parser</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>jsonSchema</em></td>
+<td><a href="http://json-schema.org/">Schema</a>  describing JSON data. Incoming records can be validated using the jsonSchema. If the data is not as per the requirements specified in jsonSchema, they are emitted on the error port.This is an optional property. If the schema is not provided, incoming tuples are simply converted to POJO or JSONObject without any validations</td>
+<td>String</td>
+<td>NO</td>
+<td>N/A</td>
+</tr>
+</tbody>
+</table>
+<h3 id="platform-attributes-that-influences-operator-behavior">Platform Attributes that influences operator behavior</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Attribute</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>out.TUPLE_CLASS</em></td>
+<td>TUPLE_CLASS attribute on output port which tells operator the class of POJO which needs to be emitted. The name of the field members of the class must match with the names in incoming POJO. The operator ignores unknown properties.</td>
+<td>Class or FQCN</td>
+<td>Yes</td>
+</tr>
+</tbody>
+</table>
+<h3 id="ports">Ports</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Port</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>in</em></td>
+<td>Tuples that needs to be parsed are recieved on this port</td>
+<td>byte[]</td>
+<td>Yes</td>
+</tr>
+<tr>
+<td><em>out</em></td>
+<td>Valid Tuples that are emitted as pojo. Tuples are converted to POJO only if the port is connected.</td>
+<td>Object (POJO)</td>
+<td>No</td>
+</tr>
+<tr>
+<td><em>parsedOutput</em></td>
+<td>Valid Tuples that are emitted as JSONObject. Tuples are converted to JSONObject only if the port is connected.</td>
+<td>JSONObject</td>
+<td>No</td>
+</tr>
+<tr>
+<td><em>err</em></td>
+<td>Invalid Tuples are emitted with error message. Invaid tuples are discarded if the port is not connected.</td>
+<td>KeyValPair &lt;String, String></td>
+<td>No</td>
+</tr>
+</tbody>
+</table>
+<h2 id="partitioning">Partitioning</h2>
+<p>JSON Parser is both statically and dynamically partitionable.</p>
+<h3 id="static-partitioning">Static Partitioning</h3>
+<p>This can be achieved in 2 ways</p>
+<ol>
+<li>Specifying the partitioner and number of partitions in the populateDAG() method</li>
+</ol>
+<pre><code class="java">JsonParser jsonParser = dag.addOperator(&quot;jsonParser&quot;, JsonParser.class);
+StatelessPartitioner&lt;JsonParser&gt; partitioner1 = new StatelessPartitioner&lt;JsonParser&gt;(2);
+dag.setAttribute(jsonParser, Context.OperatorContext.PARTITIONER, partitioner1 );
+</code></pre>
+
+<ol>
+<li>Specifying the partitioner in properties file.</li>
+</ol>
+<pre><code class="xml"> &lt;property&gt;
+   &lt;name&gt;dt.operator.{OperatorName}.attr.PARTITIONER&lt;/name&gt;
+   &lt;value&gt;com.datatorrent.common.partitioner.StatelessPartitioner:2&lt;/value&gt;
+ &lt;/property&gt;
+</code></pre>
+
+<p>where {OperatorName} is the name of the JsonParser operator.
+ Above lines will partition JsonParser statically 2 times. Above value can be changed accordingly to change the number of static partitions.</p>
+<h3 id="dynamic-paritioning">Dynamic Paritioning</h3>
+<p>JsonParser can be dynamically partitioned using an out-of-the-box partitioner:</p>
+<h4 id="throughput-based">Throughput based</h4>
+<p>Following code can be added to populateDAG method of application to dynamically partition JsonParser:</p>
+<pre><code class="java">JsonParser jsonParser = dag.addOperator(&quot;jsonParser&quot;, JsonParser.class);
+StatelessThroughputBasedPartitioner&lt;JsonParser&gt; partitioner = new StatelessThroughputBasedPartitioner&lt;&gt;();
+partitioner.setCooldownMillis(conf.getLong(COOL_DOWN_MILLIS, 10000));
+partitioner.setMaximumEvents(conf.getLong(MAX_THROUGHPUT, 30000));
+partitioner.setMinimumEvents(conf.getLong(MIN_THROUGHPUT, 10000));
+dag.setAttribute(jsonParser, OperatorContext.STATS_LISTENERS, Arrays.asList(new StatsListener[]{partitioner}));
+dag.setAttribute(jsonParser, OperatorContext.PARTITIONER, partitioner);
+</code></pre>
+
+<p>Above code will dynamically partition JsonParser when the throughput changes.
+If the overall throughput of JsonParser goes beyond 30000 or less than 10000, the platform will repartition JsonParser
+to balance throughput of a single partition to be between 10000 and 30000.
+CooldownMillis of 10000 will be used as the threshold time for which the throughput change is observed.</p>
+<h2 id="example">Example</h2>
+<p>Example for Json Parser can be found at: <a href="https://github.com/DataTorrent/examples/tree/master/tutorials/parser">https://github.com/DataTorrent/examples/tree/master/tutorials/parser</a></p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../kafkaInputOperator/" class="btn btn-neutral float-right" title="Kafka Input">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../jsonFormatter/" class="btn btn-neutral" title="JSON Formatter"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../jsonFormatter/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../kafkaInputOperator/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/kafkaInputOperator/index.html b/content/docs/malhar-3.8/operators/kafkaInputOperator/index.html
new file mode 100644
index 0000000..ee26ef2
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/kafkaInputOperator/index.html
@@ -0,0 +1,770 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>Kafka Input - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "Kafka Input";
+    var mkdocs_page_input_path = "operators/kafkaInputOperator.md";
+    var mkdocs_page_url = "/operators/kafkaInputOperator/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">Kafka Input</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#kafka-input-operator">KAFKA INPUT OPERATOR</a></li>
+                
+                    <li><a class="toctree-l4" href="#introduction">Introduction</a></li>
+                
+                    <li><a class="toctree-l4" href="#kafka-input-operator-for-kafka-08x">Kafka Input Operator for Kafka 0.8.x</a></li>
+                
+                    <li><a class="toctree-l4" href="#abstractkafkainputoperator">AbstractKafkaInputOperator</a></li>
+                
+                    <li><a class="toctree-l4" href="#kafkaconsumer">KafkaConsumer</a></li>
+                
+                    <li><a class="toctree-l4" href="#pre-requisites">Pre-requisites</a></li>
+                
+                    <li><a class="toctree-l4" href="#offsetmanager">OffsetManager</a></li>
+                
+                    <li><a class="toctree-l4" href="#partitioning">Partitioning</a></li>
+                
+                    <li><a class="toctree-l4" href="#abstractsingleportkafkainputoperator">AbstractSinglePortKafkaInputOperator</a></li>
+                
+                    <li><a class="toctree-l4" href="#concrete-classes">Concrete Classes</a></li>
+                
+                    <li><a class="toctree-l4" href="#application-example">Application Example</a></li>
+                
+                    <li><a class="toctree-l4" href="#kafka-input-operator-for-kafka-09x">Kafka Input Operator for Kafka 0.9.x</a></li>
+                
+                    <li><a class="toctree-l4" href="#abstractkafkainputoperator_1">AbstractKafkaInputOperator</a></li>
+                
+                    <li><a class="toctree-l4" href="#concrete-classes_1">Concrete Classes</a></li>
+                
+                    <li><a class="toctree-l4" href="#application-example_1">Application Example</a></li>
+                
+                    <li><a class="toctree-l4" href="#security">Security</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>Kafka Input</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="kafka-input-operator">KAFKA INPUT OPERATOR</h1>
+<h3 id="introduction">Introduction</h3>
+<p><a href="http://kafka.apache.org">Apache Kafka</a> is a pull-based and distributed publish subscribe messaging system,
+topics are partitioned and replicated across nodes. </p>
+<p>The Kafka input operator consumes data from the partitions of a Kafka topic for processing in Apex. 
+The operator has the ability to automatically scale with the Kafka partitioning for high throughput. 
+It is fault-tolerant (consumer offset checkpointing) and guarantees idempotency to allow exactly-once results in the downstream pipeline.</p>
+<p>For more information about the operator design see this <a href="http://www.slideshare.net/ApacheApex/apache-apex-kafka-input-operator">presentation</a>
+and for processing guarantees this <a href="https://www.datatorrent.com/blog/end-to-end-exactly-once-with-apache-apex/">blog</a>.</p>
+<p>There are two separate implementations of the input operator,
+one built against Kafka 0.8 client and a newer version for the
+Kafka 0.9 consumer API that also works with MapR Streams.
+These reside in different packages and are described separately below.</p>
+<h3 id="kafka-input-operator-for-kafka-08x">Kafka Input Operator for Kafka 0.8.x</h3>
+<p>Package: <code>com.datatorrent.contrib.kafka</code></p>
+<p>Maven artifact: <a href="https://mvnrepository.com/artifact/org.apache.apex/malhar-contrib">malhar-contrib</a></p>
+<h3 id="abstractkafkainputoperator">AbstractKafkaInputOperator</h3>
+<p>This is the abstract implementation that serves as base class for consuming messages from Kafka messaging system. This class doesn’t have any ports.</p>
+<p><img alt="AbstractKafkaInput.png" src="../images/kafkainput/image00.png" /></p>
+<h4 id="configuration-parameters">Configuration Parameters</h4>
+<p><table>
+<col width="25%" />
+<col width="75%" />
+<tbody>
+<tr class="odd">
+<td align="left"><p>Parameter</p></td>
+<td align="left"><p>Description</p></td>
+</tr>
+<tr class="even">
+<td align="left"><p>maxTuplesPerWindow</p></td>
+<td align="left"><p>Controls the maximum number of messages emitted in each streaming window from this operator. Minimum value is 1. Default value = MAX_VALUE </p></td>
+</tr>
+<tr class="odd">
+<td align="left"><p>idempotentStorageManager</p></td>
+<td align="left"><p>This is an instance of IdempotentStorageManager. Idempotency ensures that the operator will process the same set of messages in a window before and after a failure. For example, let's say the operator completed window 10 and failed somewhere between window 11. If the operator gets restored at window 10 then it will process the same messages again in window 10 which it did in the previous run before the failure. Idempotency is important but comes with higher cost becau [...]
+</tr>
+<tr class="even">
+<td align="left"><p>strategy</p></td>
+<td align="left"><p>Operator supports two types of partitioning strategies, ONE_TO_ONE and ONE_TO_MANY.</p>
+<p>ONE_TO_ONE: If this is enabled, the AppMaster creates one input operator instance per Kafka topic partition. So the number of Kafka topic partitions equals the number of operator instances.</p>
+<p>ONE_TO_MANY: The AppMaster creates K = min(initialPartitionCount, N) Kafka input operator instances where N is the number of Kafka topic partitions. If K is less than N, the remaining topic partitions are assigned to the K operator instances in round-robin fashion. If K is less than initialPartitionCount, the AppMaster creates one input operator instance per Kafka topic partition. For example, if initialPartitionCount = 5 and number of Kafka partitions(N) = 2 then AppMaster creates 2  [...]
+Default Value = ONE_TO_ONE</p></td>
+</tr>
+<tr class="odd">
+<td align="left"><p>msgRateUpperBound</p></td>
+<td align="left"><p>Maximum messages upper bound. Operator repartitions when the <em>msgProcessedPS</em> exceeds this bound. <em>msgProcessedPS</em> is the average number of messages processed per second by this operator.</p></td>
+</tr>
+<tr class="even">
+<td align="left"><p>byteRateUpperBound</p></td>
+<td align="left"><p>Maximum bytes upper bound. Operator repartitions when the <em>bytesPS</em> exceeds this bound. <em>bytesPS</em> is the average number of bytes processed per second by this operator.</p>
+<p></p></td>
+</tr>
+<tr class="odd">
+<td align="left"><p>offsetManager</p></td>
+<td align="left"><p>This is an optional parameter that is useful when the application restarts or start at specific offsets (offsets are explained below)</p></td>
+</tr>
+<tr class="even">
+<td align="left"><p>repartitionInterval</p></td>
+<td align="left"><p>Interval specified in milliseconds. This value specifies the minimum time required between two repartition actions. Default Value = 30 Seconds</p></td>
+</tr>
+<tr class="odd">
+<td align="left"><p>repartitionCheckInterval</p></td>
+<td align="left"><p>Interval specified in milliseconds. This value specifies the minimum interval between two offset updates. Default Value = 5 Seconds</p></td>
+</tr>
+<tr class="even">
+<td align="left"><p>initialPartitionCount</p></td>
+<td align="left"><p>When the ONE_TO_MANY partition strategy is enabled, this value indicates the number of Kafka input operator instances. Default Value = 1</p></td>
+</tr>
+<tr class="odd">
+<td align="left"><p>consumer</p></td>
+<td align="left"><p>This is an instance of com.datatorrent.contrib.kafka.KafkaConsumer. Default Value = Instance of SimpleKafkaConsumer.</p></td>
+</tr>
+</tbody>
+</table></p>
+<h4 id="abstract-methods">Abstract Methods</h4>
+<p><code>void emitTuple(Message message)</code>: Abstract method that emits tuples extracted from Kafka message.</p>
+<h3 id="kafkaconsumer">KafkaConsumer</h3>
+<p>This is an abstract implementation of Kafka consumer. It sends the fetch
+requests to the leading brokers of Kafka partitions. For each request,
+it receives the set of messages and stores them into the buffer which is
+ArrayBlockingQueue. SimpleKafkaConsumer which extends
+KafkaConsumer and serves the functionality of Simple Consumer API and
+HighLevelKafkaConsumer which extends KafkaConsumer and  serves the
+functionality of High Level Consumer API.</p>
+<h3 id="pre-requisites">Pre-requisites</h3>
+<p>This operator uses the Kafka 0.8.2.1 client consumer API
+and will work with 0.8.x and 0.7.x versions of Kafka broker.</p>
+<h4 id="configuration-parameters_1">Configuration Parameters</h4>
+<table>
+<col width="15%" />
+<col width="15%" />
+<col width="15%" />
+<col width="55%" />
+<tbody>
+<tr class="odd">
+<td align="left"><p>Parameter</p></td>
+<td align="left"><p>Type</p></td>
+<td align="left"><p>Default</p></td>
+<td align="left"><p>Description</p></td>
+</tr>
+<tr class="even">
+<td align="left"><p>zookeeper</p></td>
+<td align="left"><p>String</p></td>
+<td align="left"><p></p></td>
+<td align="left"><p>Specifies the zookeeper quorum of Kafka clusters that you want to consume messages from. zookeeper  is a string in the form of hostname1:port1,hostname2:port2,hostname3:port3  where hostname1,hostname2,hostname3 are hosts and port1,port2,port3 are ports of zookeeper server.  If the topic name is the same across the Kafka clusters and want to consume data from these clusters, then configure the zookeeper as follows: c1::hs1:p1,hs2:p2,hs3:p3;c2::hs4:p4,hs5:p5,c3::hs6:p6</p>
+<p>where</p>
+<p>c1,c2,c3 indicates the cluster names, hs1,hs2,hs3,hs4,hs5,hs6 are zookeeper hosts and p1,p2,p3,p4,p5,p6 are corresponding ports. Here, cluster name is optional in case of single cluster</p></td>
+</tr>
+<tr class="odd">
+<td align="left"><p>cacheSize</p></td>
+<td align="left"><p>int</p></td>
+<td align="left"><p>1024</p></td>
+<td align="left"><p>Maximum of buffered messages hold in memory.</p></td>
+</tr>
+<tr class="even">
+<td align="left"><p>topic</p></td>
+<td align="left"><p>String</p></td>
+<td align="left"><p>default_topic</p></td>
+<td align="left"><p>Indicates the name of the topic.</p></td>
+</tr>
+<tr class="odd">
+<td align="left"><p>initialOffset</p></td>
+<td align="left"><p>String</p></td>
+<td align="left"><p>latest</p></td>
+<td align="left"><p>Indicates the type of offset i.e, “earliest or latest”. If initialOffset is “latest”, then the operator consumes messages from latest point of Kafka queue. If initialOffset is “earliest”, then the operator consumes messages starting from message queue. This can be overridden by OffsetManager.</p></td>
+</tr>
+</tbody>
+</table>
+
+<h4 id="abstract-methods_1">Abstract Methods</h4>
+<ol>
+<li>void commitOffset(): Commit the offsets at checkpoint.</li>
+<li>Map &lt;KafkaPartition, Long&gt; getCurrentOffsets(): Return the current
+    offset status.</li>
+<li>resetPartitionsAndOffset(Set &lt;KafkaPartition&gt; partitionIds,
+    Map &lt;KafkaPartition, Long&gt; startOffset): Reset the partitions with
+    parittionIds and offsets with startOffset.</li>
+</ol>
+<h4 id="configuration-parameters-for-simplekafkaconsumer">Configuration Parameters for SimpleKafkaConsumer</h4>
+<table>
+<col width="25%" />
+<col width="15%" />
+<col width="15%" />
+<col width="45%" />
+<tbody>
+<tr class="odd">
+<td align="left"><p>Parameter</p></td>
+<td align="left"><p>Type</p></td>
+<td align="left"><p>Default</p></td>
+<td align="left"><p>Description</p></td>
+</tr>
+<tr class="even">
+<td align="left"><p>bufferSize</p></td>
+<td align="left"><p>int</p></td>
+<td align="left"><p>1 MB</p></td>
+<td align="left"><p>Specifies the maximum total size of messages for each fetch request.</p></td>
+</tr>
+<tr class="odd">
+<td align="left"><p>metadataRefreshInterval</p></td>
+<td align="left"><p>int</p></td>
+<td align="left"><p>30 Seconds</p></td>
+<td align="left"><p>Interval in between refresh the metadata change(broker change) in milliseconds. Enabling metadata refresh guarantees an automatic reconnect when a new broker is elected as the host. A value of -1 disables this feature.</p></td>
+</tr>
+<tr class="even">
+<td align="left"><p>metadataRefreshRetryLimit</p></td>
+<td align="left"><p>int</p></td>
+<td align="left"><p>-1</p></td>
+<td align="left"><p>Specifies the maximum brokers' metadata refresh retry limit. -1 means unlimited retry.</p></td>
+</tr>
+</tbody>
+</table>
+
+<h3 id="offsetmanager">OffsetManager</h3>
+<p>This is an interface for offset management and is useful when consuming data
+from specified offsets. Updates the offsets for all the Kafka partitions
+periodically. Below is the code snippet:        </p>
+<pre><code class="java">public interface OffsetManager
+{
+  public Map&lt;KafkaPartition, Long&gt; loadInitialOffsets();
+  public void updateOffsets(Map&lt;KafkaPartition, Long&gt; offsetsOfPartitions);
+}
+</code></pre>
+
+<h4 id="abstract-methods_2">Abstract Methods</h4>
+<p><code>Map &lt;KafkaPartition, Long&gt; loadInitialOffsets()</code>: Specifies the initial offset for consuming messages; called at the activation stage.</p>
+<p><code>updateOffsets(Map&lt;KafkaPartition, Long&gt; offsetsOfPartitions)</code>:  This
+method is called at every repartitionCheckInterval to update offsets.</p>
+<h3 id="partitioning">Partitioning</h3>
+<p>The logical instance of the KafkaInputOperator acts as the Partitioner
+as well as a StatsListener. This is because the
+AbstractKafkaInputOperator implements both the
+com.datatorrent.api.Partitioner and com.datatorrent.api.StatsListener
+interfaces and provides an implementation of definePartitions(...) and
+processStats(...) which makes it auto-scalable.</p>
+<h4 id="response-processstatsbatchedoperatorstats-stats">Response processStats(BatchedOperatorStats stats)</h4>
+<p>The application master invokes this method on the logical instance with
+the stats (tuplesProcessedPS, bytesPS, etc.) of each partition.
+Re-partitioning happens based on whether any new Kafka partitions added for
+the topic or bytesPS and msgPS cross their respective upper bounds.</p>
+<h4 id="definepartitions">DefinePartitions</h4>
+<p>Based on the repartitionRequired field of the Response object which is
+returned by processStats(...) method, the application master invokes
+definePartitions(...) on the logical instance which is also the
+partitioner instance. Dynamic partition can be disabled by setting the
+parameter repartitionInterval value to a negative value.</p>
+<h3 id="abstractsingleportkafkainputoperator">AbstractSinglePortKafkaInputOperator</h3>
+<p>This class extends AbstractKafkaInputOperator to emit messages through single output port.</p>
+<h4 id="ports">Ports</h4>
+<p><code>outputPort &lt;T&gt;</code>: Tuples extracted from Kafka messages are emitted through this port.</p>
+<h4 id="abstract-methods_3">Abstract Methods</h4>
+<p><code>T getTuple(Message msg)</code>: Converts the Kafka message to tuple.</p>
+<h3 id="concrete-classes">Concrete Classes</h3>
+<ol>
+<li>KafkaSinglePortStringInputOperator: extends <code>AbstractSinglePortKafkaInputOperator</code>, extracts string from Kafka message.</li>
+<li>KafkaSinglePortByteArrayInputOperator: extends <code>AbstractSinglePortKafkaInputOperator</code>, extracts byte array from Kafka message.</li>
+</ol>
+<h3 id="application-example">Application Example</h3>
+<p>This section builds an Apex application using Kafka input operator.
+Below is the code snippet:</p>
+<pre><code class="java">@ApplicationAnnotation(name = &quot;KafkaApp&quot;)
+public class ExampleKafkaApplication implements StreamingApplication
+{
+  @Override
+  public void populateDAG(DAG dag, Configuration entries)
+  {
+    KafkaSinglePortByteArrayInputOperator input =  dag.addOperator(&quot;MessageReader&quot;, new KafkaSinglePortByteArrayInputOperator());
+    ConsoleOutputOperator output = dag.addOperator(&quot;Output&quot;, new ConsoleOutputOperator());
+    dag.addStream(&quot;MessageData&quot;, input.outputPort, output.input);
+  }
+}
+</code></pre>
+
+<p>Below is the configuration for “test” Kafka topic name and
+“localhost:2181” is the zookeeper forum:</p>
+<pre><code class="xml">&lt;property&gt;
+  &lt;name&gt;dt.operator.MessageReader.prop.topic&lt;/name&gt;
+  &lt;value&gt;test&lt;/value&gt;
+&lt;/property&gt;
+
+&lt;property&gt;
+  &lt;name&gt;dt.operator.KafkaInputOperator.prop.zookeeper&lt;/nam&gt;
+  &lt;value&gt;localhost:2181&lt;/value&gt;
+&lt;/property&gt;
+</code></pre>
+
+<h3 id="kafka-input-operator-for-kafka-09x">Kafka Input Operator for Kafka 0.9.x</h3>
+<p>Package: <code>org.apache.apex.malhar.kafka</code></p>
+<p>Maven Artifact: <a href="https://mvnrepository.com/artifact/org.apache.apex/malhar-kafka">malhar-kafka</a></p>
+<p>This version uses the new 0.9 version of consumer API and works with Kafka broker version 0.9 and later.
+The operator is fault-tolerant, scalable and supports input from multiple clusters and multiple topics in a single operator instance.</p>
+<h4 id="pre-requisites_1">Pre-requisites</h4>
+<p>This operator requires version 0.9.0 or later of the Kafka Consumer API.</p>
+<h3 id="abstractkafkainputoperator_1">AbstractKafkaInputOperator</h3>
+<h4 id="ports_1">Ports</h4>
+<hr />
+<p>This abstract class doesn't have any ports.</p>
+<h4 id="configuration-properties">Configuration properties</h4>
+<hr />
+<ul>
+<li>
+<p><strong><em>clusters</em></strong> - String[]</p>
+<ul>
+<li>Mandatory Parameter.</li>
+<li>Specifies the Kafka clusters that you want to consume messages from. To configure multi-cluster support, you need to specify the clusters separated by ";".</li>
+</ul>
+</li>
+<li>
+<p><strong><em>topics</em></strong> - String[]</p>
+<ul>
+<li>Mandatory Parameter.</li>
+<li>Specified the Kafka topics that you want to consume messages from. If you want multi-topic support, then specify the topics separated by ",".</li>
+</ul>
+</li>
+<li>
+<p><strong><em>strategy</em></strong> - PartitionStrategy</p>
+<ul>
+<li>
+<p>Operator supports two types of partitioning strategies, <code>ONE_TO_ONE</code> and <code>ONE_TO_MANY</code>.</p>
+<p><code>ONE_TO_ONE</code>: If this is enabled, the AppMaster creates one input operator instance per Kafka topic partition. So the number of Kafka topic partitions equals the number of operator instances.
+<code>ONE_TO_MANY</code>: The AppMaster creates K = min(initialPartitionCount, N) Kafka input operator instances where N is the number of Kafka topic partitions. If K is less than N, the remaining topic partitions are assigned to the K operator instances in round-robin fashion. If K is less than initialPartitionCount, the AppMaster creates one input operator instance per Kafka topic partition. For example, if initialPartitionCount = 5 and number of Kafka partitions(N) = 2 then AppMaster  [...]
+Default Value = <code>PartitionStrategy.ONE_TO_ONE</code>.</p>
+</li>
+</ul>
+</li>
+<li>
+<p><strong><em>initialPartitionCount</em></strong> - Integer</p>
+<ul>
+<li>When the ONE_TO_MANY partition strategy is enabled, this value indicates the number of Kafka input operator instances. 
+    Default Value = 1.</li>
+</ul>
+</li>
+<li>
+<p><strong><em>repartitionInterval</em></strong> - Long</p>
+<ul>
+<li>Interval specified in milliseconds. This value specifies the minimum time required between two repartition actions. 
+    Default Value = 30 Seconds.</li>
+</ul>
+</li>
+<li>
+<p><strong><em>repartitionCheckInterval</em></strong> - Long</p>
+<ul>
+<li>Interval specified in milliseconds. This value specifies the minimum interval between two stat checks.
+    Default Value = 5 Seconds.</li>
+</ul>
+</li>
+<li>
+<p><strong><em>maxTuplesPerWindow</em></strong> - Integer</p>
+<ul>
+<li>Controls the maximum number of messages emitted in each streaming window from this operator. Minimum value is 1. 
+    Default value = <code>MAX_VALUE</code> </li>
+</ul>
+</li>
+<li>
+<p><strong><em>initialOffset</em></strong> - InitialOffset</p>
+<ul>
+<li>Indicates the type of offset i.e, <code>EARLIEST</code> or <code>LATEST</code> or <code>APPLICATION_OR_EARLIEST</code> or <code>APPLICATION_OR_LATEST</code>. 
+    <code>LATEST</code> =&gt; Consume new messages from latest offset in the topic. 
+    <code>EARLIEST</code> =&gt; Consume all messages available in the topic.
+    <code>APPLICATION_OR_EARLIEST</code> =&gt; Consume messages from committed position from last run. If there is no committed offset, then start consuming from beginning.
+    <code>APPLICATION_OR_LATEST</code> =&gt; Consumes messages from committed position from last run. If a committed offset is unavailable, then start consuming from latest position.
+    Default value = <code>InitialOffset.APPLICATION_OR_LATEST</code></li>
+</ul>
+</li>
+<li>
+<p><strong><em>metricsRefreshInterval</em></strong> - Long</p>
+<ul>
+<li>Interval specified in milliseconds. This value specifies the minimum interval between two metric stat updates.
+    Default value = 5 Seconds.</li>
+</ul>
+</li>
+<li>
+<p><strong><em>consumerTimeout</em></strong> - Long</p>
+<ul>
+<li>Indicates the <a href="http://kafka.apache.org/090/javadoc/org/apache/kafka/clients/consumer/KafkaConsumer.html#poll">time waiting in poll</a> when data is not available.
+    Default value = 5 Seconds.</li>
+</ul>
+</li>
+<li>
+<p><strong><em>holdingBufferSize</em></strong> - Long</p>
+<ul>
+<li>Indicates the maximum number of messages kept in memory for emitting.
+    Default value = 1024.</li>
+</ul>
+</li>
+<li>
+<p><strong><em>consumerProps</em></strong> - Properties</p>
+<ul>
+<li>Specify the [consumer properties[(http://kafka.apache.org/090/documentation.html#newconsumerconfigs) which are not yet set to the operator.</li>
+</ul>
+</li>
+<li>
+<p><strong><em>windowDataManager</em></strong> - WindowDataManager</p>
+<ul>
+<li>If set to a value other than the default, such as <code>FSWindowDataManager</code>, specifies that the operator will process the same set of messages in a window before and after a failure. This is important but it comes with higher cost because at the end of each window the operator needs to persist some state with respect to that window.
+    Default value = <code>WindowDataManager.NoopWindowDataManager</code>.</li>
+</ul>
+</li>
+</ul>
+<h4 id="abstract-methods_4">Abstract Methods</h4>
+<p><code>void emitTuple(String cluster, ConsumerRecord&lt;byte[], byte[]&gt; message)</code>: Abstract method that emits tuples
+extracted from Kafka message.</p>
+<h3 id="concrete-classes_1">Concrete Classes</h3>
+<h4 id="kafkasingleportinputoperator">KafkaSinglePortInputOperator</h4>
+<p>This class extends from AbstractKafkaInputOperator and defines the <code>getTuple()</code> method which extracts byte array from Kafka message.</p>
+<h4 id="ports_2">Ports</h4>
+<p><code>outputPort &lt;byte[]&gt;</code>: Tuples extracted from Kafka messages are emitted through this port.</p>
+<h3 id="application-example_1">Application Example</h3>
+<p>This section builds an Apex application using Kafka input operator.
+Below is the code snippet:</p>
+<pre><code class="java">@ApplicationAnnotation(name = &quot;KafkaApp&quot;)
+public class ExampleKafkaApplication implements StreamingApplication
+{
+  @Override
+  public void populateDAG(DAG dag, Configuration entries)
+  {
+    KafkaSinglePortInputOperator input =  dag.addOperator(&quot;MessageReader&quot;, new KafkaSinglePortInputOperator());
+    ConsoleOutputOperator output = dag.addOperator(&quot;Output&quot;, new ConsoleOutputOperator());
+    dag.addStream(&quot;MessageData&quot;, input.outputPort, output.input);
+  }
+}
+</code></pre>
+
+<p>Below is the configuration for topic “test” and broker “localhost:9092”:</p>
+<pre><code class="xml">&lt;property&gt;
+  &lt;name&gt;apex.operator.MessageReader.prop.topics&lt;/name&gt;
+  &lt;value&gt;test&lt;/value&gt;
+&lt;/property&gt;
+
+&lt;property&gt;
+  &lt;name&gt;apex.operator.KafkaInputOperator.prop.clusters&lt;/name&gt;
+  &lt;value&gt;localhost:9092&lt;/value&gt;
+&lt;/property&gt;
+</code></pre>
+
+<p>Multiple topics can be specified as a comma-separated list; similarly, multiple clusters can be specified as a semicolon-separated list; for example:</p>
+<pre><code class="xml">&lt;property&gt;
+  &lt;name&gt;apex.operator.MessageReader.prop.topics&lt;/name&gt;
+  &lt;value&gt;test1, test2&lt;/value&gt;
+&lt;/property&gt;
+
+&lt;property&gt;
+  &lt;name&gt;apex.operator.KafkaInputOperator.prop.clusters&lt;/nam&gt;
+  &lt;value&gt;localhost:9092; localhost:9093; localhost:9094&lt;/value&gt;
+&lt;/property&gt;
+</code></pre>
+
+<p>A full example application project can be found <a href="https://github.com/apache/apex-malhar/tree/master/examples/kafka">here</a>.</p>
+<h3 id="security">Security</h3>
+<p>Kafka from 0.9.x onwards supports <a href="https://kafka.apache.org/090/documentation.html#security_overview">Authentication, Encryption and Authorization</a>.</p>
+<p>See <a href="http://apache-apex-users-list.78494.x6.nabble.com/kafka-td1089.html">here</a> for more information.</p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../regexparser/" class="btn btn-neutral float-right" title="Regex Parser">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../jsonParser/" class="btn btn-neutral" title="JSON Parser"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../jsonParser/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../regexparser/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/regexparser/index.html b/content/docs/malhar-3.8/operators/regexparser/index.html
new file mode 100644
index 0000000..08c8ca7
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/regexparser/index.html
@@ -0,0 +1,647 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>Regex Parser - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "Regex Parser";
+    var mkdocs_page_input_path = "operators/regexparser.md";
+    var mkdocs_page_url = "/operators/regexparser/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">Regex Parser</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#regex-parser-operator">Regex Parser Operator</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-objective">Operator Objective</a></li>
+                
+                    <li><a class="toctree-l4" href="#overview">Overview</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-information">Operator Information</a></li>
+                
+                    <li><a class="toctree-l4" href="#platform-attributes-that-influence-operator-behavior">Platform Attributes that influence operator behavior</a></li>
+                
+                    <li><a class="toctree-l4" href="#ports">Ports</a></li>
+                
+                    <li><a class="toctree-l4" href="#partitioning">Partitioning</a></li>
+                
+                    <li><a class="toctree-l4" href="#example">Example</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>Regex Parser</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="regex-parser-operator">Regex Parser Operator</h1>
+<h2 id="operator-objective">Operator Objective</h2>
+<p><strong>RegexParser</strong> is designed to parse records based on a regex pattern and construct a concrete java class also known as <a href="https://en.wikipedia.org/wiki/Plain_Old_Java_Object">"POJO"</a> out of it. User needs to provide the regex pattern and schema definition to describe the data pattern. Based on regex pattern, the operator will split the data and then schema definition will be used to map the incoming record to POJO. User can also provide date format if any, in th [...]
+<p>The regex pattern has to match the tuple in its entirety. Valid records will be emitted as POJOs while invalid ones are emitted on the error port with an error message if the corresponding ports are connected.</p>
+<p><strong>Note</strong>: field names of POJO must match field names in schema and in the same order as it appears in the incoming data.</p>
+<h2 id="overview">Overview</h2>
+<p>The operator is <strong>idempotent</strong>, <strong>fault-tolerant</strong> and <strong>partitionable</strong>.</p>
+<h2 id="operator-information">Operator Information</h2>
+<ol>
+<li>Operator location: <strong><em>malhar-contrib</em></strong></li>
+<li>Available since: <strong><em>3.7.0</em></strong></li>
+<li>Operator state: <strong><em>Evolving</em></strong></li>
+<li>Java Package: <a href="https://github.com/apache/apex-malhar/blob/master/contrib/src/main/java/com/datatorrent/contrib/parser/RegexParser.java">com.datatorrent.contrib.parser.RegexParser</a></li>
+</ol>
+<h2 id="properties-of-regexparser"><a name="props"></a>Properties of RegexParser</h2>
+<p>User needs to set the <code>schema</code> JSON string which describes data as well as specify the format on date fields if any.
+<strong>Note</strong>: In the examples below {ApplicationName} and {OperatorName} are placeholders for the respective names of the application and the operator.</p>
+<p>e.g.</p>
+<pre><code class="xml">    &lt;property&gt;
+        &lt;name&gt;dt.application.{ApplicationName}.operator.{OperatorName}.prop.schema&lt;/name&gt;
+        &lt;value&gt;{
+                 &quot;fields&quot;: [
+                   {
+                     &quot;name&quot;: &quot;date&quot;,
+                     &quot;type&quot;: &quot;Date&quot;,
+                     &quot;constraints&quot;: {
+                       &quot;format&quot;: &quot;yyyy:MM:dd:hh:mm:ss&quot;
+                     }
+                   },
+                   {
+                     &quot;name&quot;: &quot;id&quot;,
+                     &quot;type&quot;: &quot;Integer&quot;
+                   },
+                   {
+                     &quot;name&quot;: &quot;signInId&quot;,
+                     &quot;type&quot;: &quot;String&quot;
+                   },
+                   {
+                     &quot;name&quot;: &quot;ipAddress&quot;,
+                     &quot;type&quot;: &quot;String&quot;
+                   },
+                   {
+                     &quot;name&quot;: &quot;serviceId&quot;,
+                     &quot;type&quot;: &quot;Double&quot;
+                   },
+                   {
+                     &quot;name&quot;: &quot;accountId&quot;,
+                     &quot;type&quot;: &quot;Long&quot;
+                   },
+                   {
+                     &quot;name&quot;: &quot;platform&quot;,
+                     &quot;type&quot;: &quot;Boolean&quot;
+                   }
+                 ]
+               }
+        &lt;/value&gt;
+    &lt;/property&gt;
+</code></pre>
+
+<p>Note that <code>Boolean</code> type in the above example accepts case insensitive values for either true or false.</p>
+<p>User needs to set the <code>splitRegexPattern</code> property whose value is the regular expression that describes the pattern of the incoming data.
+Below is the example for setting <code>splitRegexPattern</code> from <code>properties.xml</code> of the application.</p>
+<pre><code class="xml">    &lt;property&gt;
+        &lt;name&gt;dt.application.{ApplicationName}.operator.{OperatorName}.prop.splitRegexPattern&lt;/name&gt;
+        &lt;value&gt;.+\[SEQ=\w+\]\s*(\d+:[\d\d:]+)\s(\d+)\s* sign-in_id=(\S+) .*ip_address=(\S+).* service_id=(\S+).*account_id=(\S+).*platform=(\S+)&lt;/value&gt;
+    &lt;/property&gt;
+</code></pre>
+
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>schema</em></td>
+<td><a href="https://github.com/apache/apex-malhar/blob/master/contrib/src/main/java/com/datatorrent/contrib/parser/DelimitedSchema.java">Schema</a>  describing data (see example above)</td>
+<td>String</td>
+<td>YES</td>
+<td>N/A</td>
+</tr>
+<tr>
+<td><em>splitRegexPattern</em></td>
+<td>regex expression that describes the pattern of incoming data</td>
+<td>String</td>
+<td>YES</td>
+<td>N/A</td>
+</tr>
+</tbody>
+</table>
+<h2 id="platform-attributes-that-influence-operator-behavior">Platform Attributes that influence operator behavior</h2>
+<table>
+<thead>
+<tr>
+<th><strong>Attribute</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>TUPLE_CLASS</em></td>
+<td>TUPLE_CLASS attribute on output port which tells operator the class of POJO which need to be emitted</td>
+<td>Class</td>
+<td>Yes</td>
+</tr>
+</tbody>
+</table>
+<p>The operator takes care of converting the byte array received on the input port to a string by decoding using the JVM's default <code>Charset</code>. Then, splits the string using the <code>splitRegexPattern</code> and populates an object using the <code>schema</code>. Apex platform converts this object to the object of <code>TUPLE_CLASS</code> attribute value while emitting.</p>
+<p>Below is the example for setting <code>TUPLE_CLASS</code> attribute on output port from <code>properties.xml</code> file of the application.</p>
+<pre><code class="xml">    &lt;property&gt;
+        &lt;name&gt;dt.application.{ApplicationName}.operator.{OperatorName}.port.out.attr.TUPLE_CLASS&lt;/name&gt;
+        &lt;value&gt;com.datatorrent.tutorial.regexparser.ServerLog&lt;/value&gt;
+    &lt;/property&gt;
+</code></pre>
+
+<p>Below is the example for setting <code>TUPLE_CLASS</code> attribute on output port from <code>Application.java</code> file of the application.</p>
+<pre><code class="java">RegexParser regexParser = dag.addOperator(&quot;regexParser&quot;, RegexParser.class);
+dag.setOutputPortAttribute(regexParser.out, Context.PortContext.TUPLE_CLASS, ServerLog.class);
+</code></pre>
+
+<p>where the value (ServerLog) set above is the expected output POJO class from the operator and example is as below.</p>
+<pre><code class="java">    public class ServerLog
+    {
+      private Date date;
+      private int id;
+      private String signInId;
+      private String ipAddress;
+      private double serviceId;
+      private long accountId;
+      private boolean platform;
+      public int getId()
+      {
+        return id;
+      }
+      public void setId(int id)
+      {
+        this.id = id;
+      }
+      public Date getDate()
+      {
+        return date;
+      }
+      public void setDate(Date date)
+      {
+        this.date = date;
+      }
+      public String getSignInId()
+      {
+        return signInId;
+      }
+      public void setSignInId(String signInId)
+      {
+        this.signInId = signInId;
+      }
+      public String getIpAddress()
+      {
+        return ipAddress;
+      }
+      public void setIpAddress(String ipAddress)
+      {
+        this.ipAddress = ipAddress;
+      }
+      public double getServiceId()
+      {
+        return serviceId;
+      }
+      public void setServiceId(double serviceId)
+      {
+        this.serviceId = serviceId;
+      }
+      public long getAccountId()
+      {
+        return accountId;
+      }
+      public void setAccountId(long accountId)
+      {
+        this.accountId = accountId;
+      }
+      public boolean getPlatform()
+      {
+        return platform;
+      }
+      public void setPlatform(boolean platform)
+      {
+        this.platform = platform;
+      }
+    }
+</code></pre>
+
+<p>Let us look at how the data gets populated into the POJO using the example <code>schema</code>, <code>splitRegexPattern</code> and <code>TUPLE_CLASS</code> definitions given above.</p>
+<p>Consider sample event log as below that matches with the <code>splitRegexPattern</code>.</p>
+<pre><code>2015-10-01T03:14:49.000-07:00 lvn-d1-dev DevServer[9876]: INFO: [EVENT][SEQ=248717] 2015:10:01:03:14:49 101 sign-in_id=11111@psop.com ip_address=1.1.1.1  service_id=IP1234-NPB12345_00 result=RESULT_SUCCES console_id=0000000138e91b4e58236bf32besdafasdfasdfasdfsadf  account_id=11111  platform=pik
+</code></pre>
+
+<p>The below images depict the expression match on the data. The parentheses corresponding to <a href="https://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html#cg">capturing groups</a> are highlighted in green and each such group corresponds to one field of the POJO. There will be 7 such groups in the current example.</p>
+<p><img alt="Regular Expression pattern match" src="../images/regexparser/regexcapturedgroups.png" /></p>
+<p>The matched data in the event log is highlighted with 7 different colors below.</p>
+<p><img alt="Matched Log Data" src="../images/regexparser/logcapturedgroups.png" /></p>
+<p>The matched fields above will be populated onto an object based on the <code>schema</code> definition defined above. Object population will be based on one to one mapping from matched data to <code>schema</code> definition fields in the match order. Once the object is populated, it will be converted to the <code>TUPLE_CLASS</code> type while emitting on the output port <code>out</code> by the Apex platform.</p>
+<h2 id="supported-datatypes-in-schema"><a name="dataTypes"></a>Supported DataTypes in Schema</h2>
+<ul>
+<li>Integer</li>
+<li>Long</li>
+<li>Double</li>
+<li>Character</li>
+<li>String</li>
+<li>Boolean</li>
+<li>Date</li>
+<li>Float</li>
+</ul>
+<h2 id="schema-constraints"><a name="constraints"></a>Schema Constraints</h2>
+<p>Only Date constraints are supported by the operator as of now.</p>
+<table>
+<thead>
+<tr>
+<th><strong>DataType</strong></th>
+<th><strong>Constraints</strong></th>
+<th><strong>Description</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>Date</em></td>
+<td>format</td>
+<td>A simple date format as specified in the <a href="http://docs.oracle.com/javase/8/docs/api/java/text/SimpleDateFormat.html?is-external=true">SimpleDateFormat</a> class</td>
+</tr>
+</tbody>
+</table>
+<h2 id="ports">Ports</h2>
+<table>
+<thead>
+<tr>
+<th><strong>Port</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>in</em></td>
+<td>Tuples that needs to be parsed are received on this port</td>
+<td>byte[]</td>
+<td>Yes</td>
+</tr>
+<tr>
+<td><em>out</em></td>
+<td>Valid tuples that are emitted as POJO</td>
+<td>Object (POJO)</td>
+<td>No</td>
+</tr>
+<tr>
+<td><em>err</em></td>
+<td>Invalid tuples are emitted with error message</td>
+<td>KeyValPair &lt;String, String></td>
+<td>No</td>
+</tr>
+</tbody>
+</table>
+<h2 id="partitioning">Partitioning</h2>
+<p>Regex Parser can be statically or dynamically partitioned.</p>
+<h3 id="static-partitioning">Static Partitioning</h3>
+<p>This can be achieved in the below 2 ways.</p>
+<p>Specifying the partitioner and number of partitions in the populateDAG() method.</p>
+<pre><code class="java">    RegexParser regexParser = dag.addOperator(&quot;regexParser&quot;, RegexParser.class);
+    StatelessPartitioner&lt;RegexParser&gt; partitioner = new StatelessPartitioner&lt;RegexParser&gt;(2);
+    dag.setAttribute(regexParser, Context.OperatorContext.PARTITIONER, partitioner);
+</code></pre>
+
+<p>Specifying the partitioner in properties file.</p>
+<pre><code class="xml">    &lt;property&gt;
+        &lt;name&gt;dt.application.{ApplicationName}.operator.{OperatorName}.attr.PARTITIONER&lt;/name&gt;
+        &lt;value&gt;com.datatorrent.common.partitioner.StatelessPartitioner:2&lt;/value&gt;
+    &lt;/property&gt;
+</code></pre>
+
+<p>Above lines will partition RegexParser statically 2 times. Above value can be changed accordingly to change the number of static partitions.</p>
+<h3 id="dynamic-partitioning">Dynamic Partitioning</h3>
+<p>RegexParser can be dynamically partitioned using the out-of-the-box partitioner:</p>
+<h4 id="throughput-based">Throughput based</h4>
+<p>Following code can be added to the <code>populateDAG</code> method of application to dynamically partition RegexParser:</p>
+<pre><code class="java">    RegexParser regexParser = dag.addOperator(&quot;regexParser&quot;, RegexParser.class);
+    StatelessThroughputBasedPartitioner&lt;RegexParser&gt; partitioner = new StatelessThroughputBasedPartitioner&lt;&gt;();
+    partitioner.setCooldownMillis(conf.getLong(COOL_DOWN_MILLIS, 10000));
+    partitioner.setMaximumEvents(conf.getLong(MAX_THROUGHPUT, 30000));
+    partitioner.setMinimumEvents(conf.getLong(MIN_THROUGHPUT, 10000));
+    dag.setAttribute(regexParser, OperatorContext.STATS_LISTENERS, Arrays.asList(new StatsListener[]{partitioner}));
+    dag.setAttribute(regexParser, OperatorContext.PARTITIONER, partitioner);
+</code></pre>
+
+<p>Above code will dynamically partition RegexParser when the throughput changes.
+If the overall throughput of regexParser goes beyond 30000 or less than 10000, the platform will repartition RegexParser
+to balance throughput of a single partition to be between 10000 and 30000.
+CooldownMillis of 10000 will be used as the threshold time for which the throughput change is observed.</p>
+<h2 id="example">Example</h2>
+<p>Coming Soon</p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../s3outputmodule/" class="btn btn-neutral float-right" title="S3 Output Module">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../kafkaInputOperator/" class="btn btn-neutral" title="Kafka Input"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../kafkaInputOperator/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../s3outputmodule/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/s3outputmodule/index.html b/content/docs/malhar-3.8/operators/s3outputmodule/index.html
new file mode 100644
index 0000000..6b46d99
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/s3outputmodule/index.html
@@ -0,0 +1,578 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>S3 Output Module - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "S3 Output Module";
+    var mkdocs_page_input_path = "operators/s3outputmodule.md";
+    var mkdocs_page_url = "/operators/s3outputmodule/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">S3 Output Module</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#s3outputmodule">S3OutputModule</a></li>
+                
+                    <li><a class="toctree-l4" href="#about-amazon-s3">About Amazon S3</a></li>
+                
+                    <li><a class="toctree-l4" href="#s3-output-module">S3 Output Module</a></li>
+                
+                    <li><a class="toctree-l4" href="#overview">Overview</a></li>
+                
+                    <li><a class="toctree-l4" href="#multipart-upload-feature">Multipart Upload Feature</a></li>
+                
+                    <li><a class="toctree-l4" href="#module">Module</a></li>
+                
+                    <li><a class="toctree-l4" href="#operators-in-s3outputmodule">Operators in S3OutputModule</a></li>
+                
+                    <li><a class="toctree-l4" href="#configuration-parameters">Configuration Parameters</a></li>
+                
+                    <li><a class="toctree-l4" href="#ports">Ports</a></li>
+                
+                    <li><a class="toctree-l4" href="#application-example">Application Example</a></li>
+                
+                    <li><a class="toctree-l4" href="#partitioning">Partitioning</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>S3 Output Module</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="s3outputmodule">S3OutputModule</h1>
+<h3 id="about-amazon-s3">About Amazon S3</h3>
+<hr />
+<p><strong>Amazon S3 (Simple Storage Service)</strong> is an object storage system with a web service interface to store and retrieve any amount of data at any time from anywhere on the web, offered by Amazon Web Services.</p>
+<h3 id="s3-output-module">S3 Output Module</h3>
+<hr />
+<p>Purpose of S3Output module is to upload files/directories into an Amazon S3 bucket using the multipart upload feature(see below).</p>
+<p>S3Output module is <strong>fault-tolerant</strong>, <strong>statically/dynamically partitionable</strong> and has <strong>exactly once</strong> semantics.</p>
+<p>Module class is <strong>S3OutputModule</strong> located in the package <strong>org.apache.apex.malhar.lib.fs.s3</strong>; please refer to <a href="https://github.com/apache/apex-malhar/blob/master/library/src/main/java/org/apache/apex/malhar/lib/fs/s3/S3OutputModule.java">github URL</a>.</p>
+<h3 id="overview">Overview</h3>
+<hr />
+<p>File upload to S3 can also be done using <strong>AbstractFileOutputOperator</strong> but that operator uploads large files sequentially; the current module in contrast can substantially improve the upload speed of large files by reading and uploading their constituent blocks in parallel. </p>
+<p>The table below lists additional benefits of this module over <strong>AbstractFileOutputOperator</strong>.</p>
+<table>
+<thead>
+<tr>
+<th><strong>S3OutputModule</strong></th>
+<th><strong>AbstractFileOutputOperator</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td>Maximum upload file size is 5TB.</td>
+<td>Maximum upload file size is 5GB.</td>
+</tr>
+<tr>
+<td>Best fit for both large and small files.</td>
+<td>Best fit for small files.</td>
+</tr>
+<tr>
+<td>Module uses AmazonS3Client API's to upload objects into S3. Large files will upload using multipart feature and small files(single block) will upload using <strong>putObject(...)</strong> API</td>
+<td>Operator uses Hadoop filesystems like <strong>S3AFileSystem</strong>. Consists of couple of steps to upload object into S3: (1) Write the data into the local filesystem. (2) When the stream closes, filesystem uploads the local object into S3.</td>
+</tr>
+<tr>
+<td>If a block fails to upload then you need to re-upload the data for that block only</td>
+<td>If a file fails to upload then you need to re-upload the complete file.</td>
+</tr>
+</tbody>
+</table>
+<h3 id="multipart-upload-feature">Multipart Upload Feature</h3>
+<hr />
+<p>Uploading parts of a file is done via the <a href="http://docs.aws.amazon.com/AmazonS3/latest/dev/mpuoverview.html">multipart feature</a>; using this feature, each part of a file can be uploaded independently.
+After all parts of a file are uploaded successfully, Amazon S3 combines the parts as a single object.</p>
+<p>Please refer to the <a href="http://docs.aws.amazon.com/AmazonS3/latest/dev/llJavaUploadFile.html">Java code</a> for uploading file into Amazon S3 bucket using multipart feature.</p>
+<h3 id="module">Module</h3>
+<hr />
+<p>A <strong>module</strong> is a group of operators pre-wired together so they work as a single conceptual entity in an application. Typically, a module will contain a set of input ports, output ports and configuration properties. The operators internal to the module will be automatically configured based on the supplied module properties.</p>
+<h3 id="operators-in-s3outputmodule">Operators in S3OutputModule</h3>
+<hr />
+<p>Following diagram illustrates the DAG in this module:</p>
+<p><img alt="" src="../images/s3output/s3outputmodule.png" /></p>
+<ul>
+<li>
+<p><strong><em>S3InitiateFileUploadOperator</em></strong></p>
+<ul>
+<li>Initiate the upload for the file using <strong>AmazonS3Client.initiateMultipartUpload(...)</strong> method only if the number of blocks for a file is greater than 1. By successfully initiating the upload, S3 returns a response of type <strong>InitiateMultipartUploadResult</strong>, which includes the <strong>upload ID</strong>, which is the unique identifier for the multipart upload. This <strong>upload ID</strong> must be included in each operation like block upload and upload compl [...]
+If the file has single block then the operator emits an empty string, this is an indication to downstream operators to not use the multi-part feature.</li>
+<li>This operator emits the pair <strong>(filemetadata, uploadId)</strong> to <strong>S3FileMerger</strong> and the triple <strong>(filePath, metadata, uploadId)</strong> to <strong>S3BlockUploadOperator</strong>.</li>
+</ul>
+</li>
+<li>
+<p><strong><em>S3BlockUploadOperator</em></strong></p>
+<ul>
+<li>This operator upload the blocks into S3 using different calls which depend on number of blocks of a file.
+If the file has single block then upload the block using <strong>AmazonS3Client.putObject(...)</strong> call. S3 returns a response of type <strong>PutObjectResult</strong> which includes the <strong>ETag</strong>.
+If the file has more blocks then upload the block using <strong>AmazonS3Client.uploadPart(...)</strong> call. S3 returns a response of type <strong>UploadPartResult</strong> which includes the <strong>ETag</strong>. This <strong>ETag</strong> value must be included in the request to complete multipart upload.</li>
+<li><strong>S3BlockUploadOperator</strong> emits the pair <strong>(path, ETag)</strong> to <strong>s3FileMerger</strong>.</li>
+</ul>
+</li>
+<li>
+<p><strong><em>S3FileMerger</em></strong></p>
+<ul>
+<li>Complete multipart upload request using <strong>AmazonS3Client.completeMultipartUpload(...)</strong>. This call must include the <strong>upload ID</strong> and a list of both part numbers and corresponding <strong>ETag</strong> values. <strong>S3FileMerger</strong> sends the complete multi-part upload request to S3 once it has all the <strong>part ETag's</strong> of a file. 
+<strong>Amazon S3</strong> creates an object by concatenating the parts in ascending order based on part number. After a successful upload request, the parts no longer exist and S3 response includes an <strong>ETag</strong> which uniquely identifies the combined object data. </li>
+</ul>
+</li>
+</ul>
+<h3 id="configuration-parameters">Configuration Parameters</h3>
+<hr />
+<ul>
+<li>
+<p><strong><em>accessKey</em></strong> -   String</p>
+<ul>
+<li>Mandatory Parameter    </li>
+<li>Specifies the AWS access key to access Amazon S3 and has permissions to access the specified bucket.</li>
+<li>Example value = AKIAJVAGFANC2LSZCJ4Q</li>
+</ul>
+</li>
+<li>
+<p><strong><em>secretAccessKey</em></strong>   -   String</p>
+<ul>
+<li>Mandatory Parameter</li>
+<li>Specifies the AWS secret access key to access Amazon S3 and has permissions to access the specified bucket.</li>
+<li>Example value = wpVr3U82RmCKJoY007YfkaawT7CenhTcK1B8clue</li>
+</ul>
+</li>
+<li>
+<p><strong><em>endPoint</em></strong>  -   String</p>
+<ul>
+<li>Endpoint is the URL for the entry point for a web service. Specify the valid endpoint to access S3 bucket.</li>
+<li>This is an optional parameter. If the bucket is accessed only from specific end point then the user has to specify this parameter.</li>
+<li>Please refer to <a href="http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region">endPoint</a> table about the endpoints supported by S3. </li>
+<li>Example value = s3.amazonaws.com </li>
+</ul>
+</li>
+<li>
+<p><strong><em>bucketName</em></strong>    -   String</p>
+<ul>
+<li>Mandatory Parameter</li>
+<li>S3 buckets are used to store objects which consists of data and metadata that describes the data. Specify the name of the bucket.</li>
+<li>Example value = apex.app.test.s3</li>
+</ul>
+</li>
+<li>
+<p><strong><em>outputDirectoryPath</em></strong>   -   String</p>
+<ul>
+<li>Mandatory Parameter</li>
+<li>Specifies the path of the output directory. </li>
+<li>Example value = dt/app/output</li>
+</ul>
+</li>
+<li>
+<p><strong><em>mergerCount</em></strong>   -   int</p>
+<ul>
+<li>Specify the number of instances of S3FileMerger operator.</li>
+<li>Default value = 1</li>
+</ul>
+</li>
+<li>
+<p><strong><em>timeOutWIndowCount</em></strong>    -   int</p>
+<ul>
+<li>This property maps to the <a href="https://github.com/apache/apex-core/blob/master/api/src/main/java/com/datatorrent/api/Context.java">OperatorContext.TIMEOUT_WINDOW_COUNT</a> attribute and is a count of streaming windows. If specified, it will be set on all the operators of this module. Since these operators interact with S3, there may be additional latencies that cause the platform to kill them because they are considered stalled. Increasing this value prevents this and allows the  [...]
+<li>Default value = 6000</li>
+</ul>
+</li>
+</ul>
+<h3 id="ports">Ports</h3>
+<hr />
+<ul>
+<li>
+<p><strong><em>filesMetadataInput</em></strong>    -   AbstractFileSplitter.FileMetadata</p>
+<ul>
+<li>Input port for files metadata.</li>
+<li>Mandatory</li>
+</ul>
+</li>
+<li>
+<p><strong><em>blocksMetadataInput</em></strong>   -   BlockMetadata.FileBlockMetadata</p>
+<ul>
+<li>Input port for blocks metadata.</li>
+<li>Mandatory</li>
+</ul>
+</li>
+<li>
+<p><strong><em>blockData</em></strong> -   AbstractBlockReader.ReaderRecord<Slice></p>
+<ul>
+<li>Input port for blocks data.</li>
+<li>Mandatory</li>
+</ul>
+</li>
+</ul>
+<h3 id="application-example">Application Example</h3>
+<hr />
+<p>Please refer to <a href="https://github.com/DataTorrent/examples/tree/master/tutorials/s3output">Example</a> for S3OutputModule sample application.</p>
+<h3 id="partitioning">Partitioning</h3>
+<hr />
+<p>Partitioning the module means that the operators in the module can be partitioned.</p>
+<h4 id="stateless-partitioning">Stateless Partitioning</h4>
+<hr />
+<p>Partitioning the operator in module can be achieved as follows:</p>
+<h5 id="s3initiatefileuploadoperator">S3InitiateFileUploadOperator</h5>
+<hr />
+<p>Partition of this operator is achieved indirectly as follows:</p>
+<pre><code class="xml">&lt;property&gt;
+  &lt;name&gt;dt.operator.{ModuleName}#InitiateUpload.attr.PARTITIONER&lt;/name&gt;
+  &lt;value&gt;com.datatorrent.common.partitioner.StatelessPartitioner:{N}&lt;/value&gt;
+&lt;/property&gt;     
+</code></pre>
+
+<p>where {ModuleName} is the name of the S3OutputModule and
+      {N} is the number of static partitions.
+Above lines will partition S3InitiateFileUploadOperator statically {N} times.</p>
+<h5 id="s3blockuploadoperator">S3BlockUploadOperator</h5>
+<hr />
+<p>Locality of S3BlockUploadOperator with upstream operator (FSInputModule/BlockReader) must set to PARTITION_PARALLEL for performance benefits by avoiding serialization/deserialization of objects. So, partitioning of this operator depends on upstream operator which is of type FSInputModule/BlockReader.</p>
+<h5 id="s3filemerger">S3FileMerger</h5>
+<hr />
+<p>By setting the parameter "mergerCount", <strong>S3FileMerger</strong> be statically partitioned. This can be achieved by two ways:</p>
+<p>(a) Following code can be added to populateDAG(DAG dag, Configuration conf) method of application to statically partitioning <strong>S3FileMerger</strong> {N} times:</p>
+<pre><code class="java">  FSInputModule inputModule = dag.addModule(&quot;HDFSInputModule&quot;, new FSInputModule());
+  S3OutputModule outputModule = dag.addModule(&quot;S3OutputModule&quot;, new S3OutputModule());
+  outputModule.setMergerCount({N});
+</code></pre>
+
+<p>(b) By setting the parameter in properties file as follows</p>
+<pre><code class="xml">  &lt;property&gt;
+    &lt;name&gt;dt.operator.{ModuleName}.prop.mergerCount&lt;/name&gt;
+    &lt;value&gt;{N}&lt;/value&gt;
+  &lt;/property&gt;
+</code></pre>
+
+<p>where {ModuleName} is the name of the S3OutputModule and {N} is the number of static partitions.
+Above lines will partition <strong>S3FileMerger</strong> statically {N} times.          </p>
+<h4 id="dynamic-partitioning">Dynamic Partitioning</h4>
+<hr />
+<p>Dynamic partitioning is a feature of Apex platform which changes the number of partitions of an operator at run time.
+Locality of <strong>S3BlockUploadOperator</strong> with upstream operator(FSInputModule/BlockReader) must set to PARTITION_PARALLEL for performance benefits by avoiding serialization/deserialization of objects. So, dynamic partitioning of this operator depends on upstream operator which is of type FSInputModule/BlockReader.</p>
+<p>From the example application, by setting the maxReaders and minReaders value to FSInputModule, <strong>S3BlockUploadOperator</strong> dynamically partitioned between minReaders and maxReaders. This can be achieved by two ways:
+(a) Following code can be added to <strong>populateDAG(DAG dag, Configuration conf)</strong> method of application to dynamically partitioned <strong>S3BlockUploadOperator</strong> between {N1} and {N2} times:</p>
+<pre><code class="java">FSInputModule inputModule = dag.addModule(&quot;HDFSInputModule&quot;, new FSInputModule());
+inputModule.setMinReaders({N1});
+inputModule.setMaxReaders({N2});
+S3OutputModule outputModule = dag.addModule(&quot;S3OutputModule&quot;, new S3OutputModule());
+</code></pre>
+
+<p>(b) By setting the parameter in properties file as follows:</p>
+<pre><code class="xml">&lt;property&gt;
+  &lt;name&gt;dt.operator.HDFSInputModule.prop.minReaders&lt;/name&gt;
+  &lt;value&gt;{N1}&lt;/value&gt;
+&lt;/property&gt;
+&lt;property&gt;
+  &lt;name&gt;dt.operator.HDFSInputModule.prop.maxReaders&lt;/name&gt;
+  &lt;value&gt;{N2}&lt;/value&gt;
+&lt;/property&gt;         
+</code></pre>
+
+<p>{N1} and {N2} represents the number of minimum and maximum partitions of BlockReader.
+Above lines will dynamically partitioned the <strong>S3BlockUploadOperator</strong> between {N1} and {N2} times. </p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../transform/" class="btn btn-neutral float-right" title="Transformer">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../regexparser/" class="btn btn-neutral" title="Regex Parser"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../regexparser/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../transform/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/transform/index.html b/content/docs/malhar-3.8/operators/transform/index.html
new file mode 100644
index 0000000..640a55b
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/transform/index.html
@@ -0,0 +1,591 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>Transformer - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "Transformer";
+    var mkdocs_page_input_path = "operators/transform.md";
+    var mkdocs_page_url = "/operators/transform/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">Transformer</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#transform-operator-documentation">Transform - Operator Documentation</a></li>
+                
+                    <li><a class="toctree-l4" href="#about-transform-operator">About Transform operator</a></li>
+                
+                    <li><a class="toctree-l4" href="#use-case">Use Case</a></li>
+                
+                    <li><a class="toctree-l4" href="#configuration-parameters">Configuration Parameters</a></li>
+                
+                    <li><a class="toctree-l4" href="#configuration-example">Configuration Example</a></li>
+                
+                    <li><a class="toctree-l4" href="#ports">Ports</a></li>
+                
+                    <li><a class="toctree-l4" href="#attributes">Attributes</a></li>
+                
+                    <li><a class="toctree-l4" href="#application-example">Application Example</a></li>
+                
+                    <li><a class="toctree-l4" href="#partitioning">Partitioning</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>Transformer</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="transform-operator-documentation">Transform - Operator Documentation</h1>
+<h3 id="about-transform-operator">About Transform operator</h3>
+<hr />
+<p>Transform means mapping of field expression from input to output or conversion of fields from one type to another.
+This operator is stateless. This operator receives objects on its input port; for each such input object, it creates a new output object whose fields are computed as expressions involving fields of the input object. 
+The types of the input and output objects are configurable as are the expressions used to compute the output fields. </p>
+<p>The operator class is <code>TransformOperator</code> located in the package <code>com.datatorrent.lib.transform</code>.
+Please refer to <a href="https://github.com/apache/apex-malhar/blob/master/library/src/main/java/com/datatorrent/lib/transform/TransformOperator.java">github URL</a> for <code>TransformOperator</code>.</p>
+<h3 id="use-case">Use Case</h3>
+<hr />
+<p>Consider the data that needs to be transformed as per output schema.</p>
+<p>Consider input objects with these fields:</p>
+<table>
+<thead>
+<tr>
+<th>Name</th>
+<th>Type</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td>FirstName</td>
+<td>String</td>
+</tr>
+<tr>
+<td>LastName</td>
+<td>String</td>
+</tr>
+<tr>
+<td>Phone</td>
+<td>String</td>
+</tr>
+<tr>
+<td>DateOfBirth</td>
+<td>java.util.Date</td>
+</tr>
+<tr>
+<td>Address</td>
+<td>String</td>
+</tr>
+</tbody>
+</table>
+<p>and output objects with fields: </p>
+<table>
+<thead>
+<tr>
+<th>Name</th>
+<th>Type</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td>Name</td>
+<td>String</td>
+</tr>
+<tr>
+<td>Phone</td>
+<td>String</td>
+</tr>
+<tr>
+<td>Age</td>
+<td>Integer</td>
+</tr>
+<tr>
+<td>Address</td>
+<td>String</td>
+</tr>
+</tbody>
+</table>
+<p>Suppose <code>Name</code> is a concatenation of <code>FirstName</code> and <code>LastName</code> and 
+        <code>Age</code> is computed by subtracting the <code>DateOfBirth</code> from the current year.</p>
+<p>These simple computations can be expressed as Java expressions where the input object is
+represented by $ and provided as configuration parameters as follows:</p>
+<pre><code>Name =&gt; {$.FirstName}.concat(\&quot; \&quot;).concat({$.LastName})
+Age =&gt; (new java.util.Date()).getYear() - {$.dateOfBirth}.getYear()
+</code></pre>
+
+<h3 id="configuration-parameters">Configuration Parameters</h3>
+<hr />
+<ul>
+<li>
+<p><strong><em>expressionMap</em></strong> -   Map<String, String></p>
+<ul>
+<li>Mandatory Parameter</li>
+<li>Specifies the map between the output field (key) and the expression used to compute it (value) using fields of the input Java object.</li>
+</ul>
+</li>
+<li>
+<p><strong><em>expressionFunctions</em></strong> -   List<String></p>
+<ul>
+<li>List of imported classes or methods should be made available to expression to use. It overrides the default list.</li>
+<li>Default Value = {java.lang.Math.<em>, org.apache.commons.lang3.StringUtils.</em>, org.apache.commons.lang3.StringEscapeUtils.<em>, org.apache.commons.lang3.time.DurationFormatUtils.</em>, org.apache.commons.lang3.time.DateFormatUtils.*}</li>
+</ul>
+</li>
+<li>
+<p><strong><em>copyMatchingFields</em></strong> -   boolean</p>
+<ul>
+<li>Specifies whether matching fields should be copied; here matching means the name and type of an input field is the same as the name and type of an output field. 
+    If the matching field appears in <code>expressionMap</code> then it ignores copy to output object.</li>
+<li>Default Value = true.</li>
+</ul>
+</li>
+</ul>
+<h3 id="configuration-example">Configuration Example</h3>
+<hr />
+<p>Consider input object with fields:</p>
+<table>
+<thead>
+<tr>
+<th>Name</th>
+<th>Type</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td>FirstName</td>
+<td>String</td>
+</tr>
+<tr>
+<td>LastName</td>
+<td>String</td>
+</tr>
+<tr>
+<td>StartDate</td>
+<td>org.joda.time.DateTime</td>
+</tr>
+</tbody>
+</table>
+<p>and output objects with fields:</p>
+<table>
+<thead>
+<tr>
+<th>Name</th>
+<th>Type</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td>Name</td>
+<td>String</td>
+</tr>
+<tr>
+<td>isLeapYear</td>
+<td>Boolean</td>
+</tr>
+</tbody>
+</table>
+<p>Note: <code>org.joda.time.DateTime</code> class is not present in the default list. So, we need to add this library to <code>expressionFunctions</code> as below in populateDAG method:</p>
+<pre><code class="java">TransformOperator operator = dag.addOperator(&quot;transform&quot;, new TransformOperator());
+operator.setExpressionFunctions(Arrays.asList(&quot;org.joda.time.DateTime&quot;, org.apache.commons.lang3.StringUtils));
+Map&lt;String,String&gt; expressionMap = new HashMap&lt;&gt;();
+expressionMap.put(isLeapYear, {$.StartDate}.year().isLeap());
+expressionMap.put(Name, org.apache.commons.lang3.StringUtils.joinWith(\&quot; \&quot;, {$.FirstName},{$.LastName});
+operator.setExpressionMap(expressionMap);
+</code></pre>
+
+<p>Above Properties also can be set in properties file as follows:</p>
+<pre><code class="xml">&lt;property&gt;
+  &lt;name&gt;dt.operator.transform.expressionFunctions[0]&lt;/name&gt;
+  &lt;value&gt;org.joda.time.DateTime&lt;/value&gt;
+&lt;/property&gt;     
+&lt;property&gt;
+  &lt;name&gt;dt.operator.transform.expressionFunctions[1]&lt;/name&gt;
+  &lt;value&gt;org.apache.commons.lang3.StringUtils&lt;/value&gt;
+&lt;/property&gt;
+&lt;property&gt;
+  &lt;name&gt;dt.operator.transform.expressionMap(isLeapYear)&lt;/name&gt;
+  &lt;value&gt;{$.StartDate}.year().isLeap()&lt;/value&gt;
+&lt;/property&gt;
+&lt;property&gt;
+  &lt;name&gt;dt.operator.transform.expressionMap(Name)&lt;/name&gt;
+  &lt;value&gt;org.apache.commons.lang3.StringUtils.joinWith(\&quot; \&quot;, {$.FirstName}, {$.LastName})&lt;/value&gt;
+&lt;/property&gt;
+</code></pre>
+
+<h3 id="ports">Ports</h3>
+<hr />
+<ul>
+<li>
+<p><strong><em>input</em></strong> -   Port for input tuples.</p>
+<ul>
+<li>Mandatory input port</li>
+</ul>
+</li>
+<li>
+<p><strong><em>output</em></strong>    -   Port for transformed output tuples.</p>
+<ul>
+<li>Mandatory output port</li>
+</ul>
+</li>
+</ul>
+<h3 id="attributes">Attributes</h3>
+<hr />
+<ul>
+<li>
+<p><strong><em>Input port Attribute - input.TUPLE_CLASS</em></strong> - Fully qualified class name and class should be Kryo serializable.</p>
+<ul>
+<li>Mandatory attribute</li>
+<li>Type of input tuple.</li>
+</ul>
+</li>
+<li>
+<p><strong><em>Output port Attribute - output.TUPLE_CLASS</em></strong> - Fully qualified class name and class should be Kryo serializable.</p>
+<ul>
+<li>Mandatory attribute</li>
+<li>Type of output tuple.</li>
+</ul>
+</li>
+</ul>
+<h3 id="application-example">Application Example</h3>
+<hr />
+<p>Please refer <a href="https://github.com/DataTorrent/examples/tree/master/tutorials/transform">Example</a> for transform sample application.</p>
+<h3 id="partitioning">Partitioning</h3>
+<hr />
+<p>Being stateless, this operator can be partitioned using any of the built-in partitioners present in the Malhar library by setting a few properties as follows:</p>
+<h4 id="stateless-partitioning">Stateless partitioning</h4>
+<p>Stateless partitioning will ensure that TransformOperator will be partitioned right at the starting of the application and will remain partitioned throughout the lifetime of the DAG.
+TransformOperator can be stateless partitioned by adding following lines to properties.xml:</p>
+<pre><code class="xml">  &lt;property&gt;
+    &lt;name&gt;dt.operator.{OperatorName}.attr.PARTITIONER&lt;/name&gt;
+    &lt;value&gt;com.datatorrent.common.partitioner.StatelessPartitioner:{N}/value&gt;
+  &lt;/property&gt;
+</code></pre>
+
+<p>where {OperatorName} is the name of the TransformOperator operator and
+      {N} is the number of static partitions.
+Above lines will partition TransformOperator statically {N} times. </p>
+<h4 id="dynamic-partitioning">Dynamic Partitioning</h4>
+<p>Dynamic partitioning is a feature of Apex platform which changes the partition of the operator based on certain condition.
+TransformOperator can be dynamically partitioned using the below two partitioners:</p>
+<h5 id="throughput-based">Throughput based</h5>
+<p>Following code can be added to populateDAG(DAG dag, Configuration conf) method of application to dynamically partitioning TransformOperator:</p>
+<pre><code class="java">StatelessThroughputBasedPartitioner&lt;TransformOperator&gt; partitioner = new StatelessThroughputBasedPartitioner&lt;&gt;();
+partitioner.setCooldownMillis(10000);
+partitioner.setMaximumEvents(30000);
+partitioner.setMinimumEvents(10000);
+dag.setAttribute(transform, OperatorContext.STATS_LISTENERS, Arrays.asList(new StatsListener[]{partitioner}));
+dag.setAttribute(transform, OperatorContext.PARTITIONER, partitioner);
+</code></pre>
+
+<p>Above code will dynamically partition TransformOperator when the throughput changes.
+If the overall throughput of TransformOperator goes beyond 30000 or less than 10000, the platform will repartition TransformOperator 
+to balance throughput of a single partition to be between 10000 and 30000.
+CooldownMillis of 10000 will be used as the threshold time for which the throughout change is observed.</p>
+<p>Source code for this dynamic application can be found <a href="https://github.com/DataTorrent/examples/blob/master/tutorials/transform/src/main/java/com/example/transform/DynamicTransformApplication.java">here</a>.</p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../windowedOperator/" class="btn btn-neutral float-right" title="Windowed Operator">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../s3outputmodule/" class="btn btn-neutral" title="S3 Output Module"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../s3outputmodule/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../windowedOperator/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/windowedOperator/index.html b/content/docs/malhar-3.8/operators/windowedOperator/index.html
new file mode 100644
index 0000000..fae2fc6
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/windowedOperator/index.html
@@ -0,0 +1,540 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>Windowed Operator - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "Windowed Operator";
+    var mkdocs_page_input_path = "operators/windowedOperator.md";
+    var mkdocs_page_url = "/operators/windowedOperator/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">Windowed Operator</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#windowed-operator">WINDOWED OPERATOR</a></li>
+                
+                    <li><a class="toctree-l4" href="#introduction">Introduction</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-overview">Operator Overview</a></li>
+                
+                    <li><a class="toctree-l4" href="#keyed-or-not-keyed">Keyed or Not Keyed</a></li>
+                
+                    <li><a class="toctree-l4" href="#window-option">Window Option</a></li>
+                
+                    <li><a class="toctree-l4" href="#timestamp-extractor">Timestamp Extractor</a></li>
+                
+                    <li><a class="toctree-l4" href="#watermarks">Watermarks</a></li>
+                
+                    <li><a class="toctree-l4" href="#allowed-lateness">Allowed Lateness</a></li>
+                
+                    <li><a class="toctree-l4" href="#accumulation">Accumulation</a></li>
+                
+                    <li><a class="toctree-l4" href="#triggers">Triggers</a></li>
+                
+                    <li><a class="toctree-l4" href="#accumulation-mode">Accumulation Mode</a></li>
+                
+                    <li><a class="toctree-l4" href="#window-propagation">Window Propagation</a></li>
+                
+                    <li><a class="toctree-l4" href="#state-storage">State Storage</a></li>
+                
+                    <li><a class="toctree-l4" href="#merging-two-streams">Merging two streams</a></li>
+                
+                    <li><a class="toctree-l4" href="#usage-examples">Usage Examples</a></li>
+                
+                    <li><a class="toctree-l4" href="#advanced-topics">Advanced Topics</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>Windowed Operator</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="windowed-operator">WINDOWED OPERATOR</h1>
+<h2 id="introduction">Introduction</h2>
+<p>The <code>WindowedOperator</code> is an operator in the Apex Malhar Library that supports the windowing semantics outlined by Apache Beam, including the notions of watermarks, triggers, accumulation modes, and allowed lateness. It currently supports event time windows, sliding event time windows, session windows, and global window. The reader of this document is encouraged to read this <a href="https://www.oreilly.com/ideas/the-world-beyond-batch-streaming-101">blog</a> for the basic  [...]
+<p>Our High-Level API supports event-time processing through the WindowedOperator. If you'd like to process tuples based on event time, you are encouraged to use this operator either directly with our DAG-level API, or indirectly through our High-Level API.</p>
+<p>It is important to note that the word "windows" in this document is unrelated to "streaming windows" or "application windows" in Apex, which are based on ingression time. For more information about "streaming windows" and "application windows" in Apex, please refer to <a href="http://apex.apache.org/docs/apex/application_development/#streaming-window">this document</a>.</p>
+<h2 id="operator-overview">Operator Overview</h2>
+<p>In this document, we will explore the following features in the WindowedOperator.</p>
+<ol>
+<li>Keyed or Not Keyed</li>
+<li>Window Option</li>
+<li>Timestamp Extractor</li>
+<li>Watermarks</li>
+<li>Allowed Lateness</li>
+<li>Accumulation</li>
+<li>Triggers</li>
+<li>Accumulation Mode</li>
+<li>Window Propagation</li>
+<li>Merging two streams</li>
+</ol>
+<h2 id="keyed-or-not-keyed">Keyed or Not Keyed</h2>
+<p>One of the first things the user of the operator has to decide is whether the operator is keyed (<a href="https://github.com/apache/apex-malhar/blob/master/library/src/main/java/org/apache/apex/malhar/lib/window/impl/KeyedWindowedOperatorImpl.java">KeyedWindowedOperatorImpl</a>) or not keyed (<a href="https://github.com/apache/apex-malhar/blob/master/library/src/main/java/org/apache/apex/malhar/lib/window/impl/WindowedOperatorImpl.java">WindowedOperatorImpl</a>). State storage, accumu [...]
+<p>Here are examples of creating a windowed operator.</p>
+<p>Non-keyed:</p>
+<pre><code class="java">// Creating a non-keyed windowed operator
+WindowedOperatorImpl&lt;InputType, AccumulationType, OutputType&gt; windowedOperator = new WindowedOperatorImpl&lt;&gt;();
+</code></pre>
+
+<p>Keyed:</p>
+<pre><code class="java">// Creating a keyed windowed operator
+KeyedWindowedOperatorImpl&lt;KeyType, InputValueType, AccumulationType, OutputValueType&gt; windowedOperator = new KeyedWindowedOperatorImpl&lt;&gt;();
+</code></pre>
+
+<p>We will cover the concepts of <a href="#state_storage">state storage</a>, <a href="#accumulation">accumulation</a> and <a href="#triggers">triggers</a> later in this document.</p>
+<h2 id="window-option">Window Option</h2>
+<p>Each incoming tuple of the WindowedOperator is assigned to one or more windows. The <code>WindowOption</code> provides the way to specify what constitutes a window. The following <code>WindowOption</code>s are supported.</p>
+<h3 id="globalwindow"><code>GlobalWindow</code></h3>
+<p>There is only one window for the entire duration of the application. All tuples are assigned to this one window.</p>
+<pre><code class="java">// Setting the global window option
+windowedOperator.setWindowOption(new WindowOption.GlobalWindow());
+</code></pre>
+
+<h3 id="timewindows"><code>TimeWindows</code></h3>
+<p>A tuple is assigned to exactly one window based on event time, and each window has a fixed duration. One window is followed immediately by another window, and they do not overlap. As a result, one timestamp can only belong to one window.</p>
+<p><img alt="" src="../images/windowedOperator/time-windows.png" /> </p>
+<pre><code class="java">// Setting a time window option with a duration of 10 minutes
+windowedOperator.setWindowOption(new WindowOption.TimeWindows(Duration.standardMinutes(10)));
+</code></pre>
+
+<h3 id="slidingtimewindows"><code>SlidingTimeWindows</code></h3>
+<p>Similar to <code>TimeWindow</code>, each window has a fixed duration. But it takes an additional duration parameter <code>slideBy</code> which must be smaller than the window duration and the window duration must be divisible by the <code>slideBy</code> duration. Each window overlaps with multiple windows. In this case, since one timestamp belongs to multiple windows, a tuple is assigned to multiple windows. The number of windows a tuple belongs to is exactly the window duration divid [...]
+<p><img alt="" src="../images/windowedOperator/sliding-time-windows.png" /> </p>
+<pre><code class="java">// Setting a sliding time window option with a duration of 10 minutes and a slideBy duration of 2 minutes
+windowedOperator.setWindowOption(new WindowOption.TimeWindows(Duration.standardMinutes(10)).slideBy(Duration.standardMinutes(2)));
+</code></pre>
+
+<h3 id="sessionwindows"><code>SessionWindows</code></h3>
+<p><code>SessionWindow</code>s have variable durations and are based on the key of the tuple. Each tuple is assigned to exactly one window. It takes a duration parameter <code>minGap</code>, which specifies the minimum time gap between two tuples that belong to two <em>different</em> <code>SessionWindows</code> of the same key. <code>minGap</code> is also the duration of the "proto-session" window for a single timestamp, and it is the minimum duration of any session window.</p>
+<pre><code class="java">// Setting a session window option with a minimum gap of one hour
+windowedOperator.setWindowOption(new WindowOption.SessionWindows(Duration.standardHours(1)));
+</code></pre>
+
+<p>Upon arrival of a tuple, a proto-session window is created. A proto-session window for a tuple is a temporary session window with begin timestamp being the tuple timestamp and the duration being <code>minGap</code>.</p>
+<p><img alt="" src="../images/windowedOperator/session-windows-4.png" /></p>
+<p>To ensure that there are no two tuples of the same key in different session windows that are less than <code>minGap</code> apart, the <code>WindowedOperator</code> does the following checks:</p>
+<h4 id="the-proto-session-window-can-be-fit-into-an-existing-sessionwindow-of-the-same-key-without-change">The proto-session window can be fit into an existing <code>SessionWindow</code> of the same key without change</h4>
+<p>The new tuple is simply applied to the state of the existing <code>SessionWindow</code>.</p>
+<p><img alt="" src="../images/windowedOperator/session-windows-3.png" /></p>
+<h4 id="the-proto-session-window-overlaps-with-two-existing-session-windows-of-the-same-key">The proto-session window overlaps with two existing session windows of the same key</h4>
+<p>A new Session Window is created with the merged state of the two existing <code>SessionWindow</code>s, plus the new tuple. The two existing <code>SessionWindow</code>s will be deleted and retraction triggers for the two deleted windows will be fired. (Please see <a href="#triggers">here</a> for details on <code>Trigger</code>s)</p>
+<p><img alt="" src="../images/windowedOperator/session-windows-1.png" /> </p>
+<h4 id="the-proto-session-window-overlaps-with-one-existing-session-window-of-the-same-key">The proto-session window overlaps with one existing session window of the same key</h4>
+<p>A new <code>SessionWindow</code> is created with the state of the existing <code>SessionWindow</code>, plus the new tuple, with a longer duration than the existing <code>SessionWindow</code> and possibly an earlier begin timestamp to cover the new tuple. The existing <code>SessionWindow</code> will be deleted and a retraction trigger for the old window will be fired.</p>
+<p><img alt="" src="../images/windowedOperator/session-windows-2.png" /> </p>
+<h4 id="all-of-the-above-checks-return-false">All of the above checks return false</h4>
+<p>The proto-session window is in effect and the new tuple is assigned to that window.</p>
+<h2 id="timestamp-extractor">Timestamp Extractor</h2>
+<p>The <code>WindowedOperator</code> expects a timestamp extractor. This is for <code>WindowedOperator</code> to extract the timestamp from the tuple for window assignment.</p>
+<pre><code class="java">// Setting a time extractor
+windowedOperator.setTimestampExtractor(new Function&lt;InputTupleType, Long&gt;()
+{
+  @Override
+  public Long apply(InputTupleType tuple)
+  {
+    return tuple.timestamp;
+  }
+});
+
+</code></pre>
+
+<h2 id="watermarks">Watermarks</h2>
+<p>Watermarks are control tuples that include a timestamp. A watermark tells <code>WindowedOperator</code> that all windows that lie completely before the given timestamp are considered late, and the rest of the windows are considered early. </p>
+<h3 id="fixed-watermark">Fixed Watermark</h3>
+<p>If watermarks are not available from upstream, the user of the WindowedOperator can set a fixed watermark. The fixed watermark represents the number of milliseconds before the timestamp derived from the Apex streaming window ID. Note that the Apex streaming window ID is an implicit timestamp that more or less represents the ingression time of the tuple.</p>
+<pre><code class="java">// Setting a fixed watermark that is 10 seconds behind the ingression time
+windowedOperator.setFixedWatermark(10000);
+</code></pre>
+
+<h2 id="allowed-lateness">Allowed Lateness</h2>
+<p>Allowed Lateness specifies the lateness horizon from the watermark. If a tuple has a timestamp that lies beyond the lateness horizon, it is dropped by the <code>WindowedOperator</code>. Also, if a window completely lies beyond the lateness horizon as a result of the arrival of a new watermark, the window along with its state is purged from <code>WindowedOperator</code>.</p>
+<p><img alt="" src="../images/windowedOperator/allowed-lateness.png" /> </p>
+<pre><code class="java">// Setting allowed lateness to be one hour
+windowedOperator.setAllowedLateness(Duration.standardHours(1));
+</code></pre>
+
+<h2 id="accumulation">Accumulation</h2>
+<p>The Accumulation object tells the <code>WindowedOperator</code> how the operator state is accumulated. It tells the <code>WindowedOperator</code> what to do with its state upon arrival of an incoming tuple. This is where the business logic goes. Please refer to the interface definition <a href="https://github.com/apache/apex-malhar/blob/master/library/src/main/java/org/apache/apex/malhar/lib/window/Accumulation.java">here</a> in github. For non-keyed WindowedOperator, the state is per [...]
+<pre><code class="java">// Setting the accumulation to be the sum for longs, assuming both the input type and the output type are a long
+windowedOperator.setAccumulation(new SumLong());
+</code></pre>
+
+<p>The user of this operator can use one of the existing accumulation implementations <a href="https://github.com/apache/apex-malhar/tree/master/library/src/main/java/org/apache/apex/malhar/lib/window/accumulation">here</a>, or provides their own custom accumulation that reflects their business logic. </p>
+<h2 id="triggers">Triggers</h2>
+<p>Triggers are tuples emitted to downstream by the <code>WindowedOperator</code>. The data in the tuples are based on the state of <code>WindowedOperator</code> governed by the Accumulation object. There are two types of trigger: time-based triggers and count-based triggers. Time-based triggers are triggers that get fired in a regular time interval, and count-based triggers are triggers that get fired based on the number of tuples received. The user of WindowedOperator can specify diffe [...]
+<p>Also, by default, a trigger is fired for a window when the window is flipped from being early to being late. This is also called an "on-time" trigger.</p>
+<pre><code class="java">// Creating a trigger option that tells the windowed operator to fire a trigger when the window is flipped from early to late, and fire a trigger every 10 seconds when it's early, and fire a trigger every time a tuple is received when it's late
+TriggerOption triggerOption = TriggerOption.AtWatermark().withEarlyFiringsAtEvery(Duration.standardSeconds(10)).withLateFiringsAtEvery(1);
+
+// Setting the trigger option for the windowed operator
+windowedOperator.setTriggerOption(triggerOption);
+</code></pre>
+
+<p>Note that for the non-keyed <code>WindowedOperator</code>, triggers are fired on a per-window basis. For the keyed <code>WindowedOperator</code>, triggers are fired on a per-key-per-window basis.</p>
+<p>There is also an option the user can set (<code>fireOnlyUpdatedPanes</code>) to make the <code>WindowedOperator</code> not fire a trigger if the trigger value is the same as the value of the previous trigger. </p>
+<pre><code class="java">// Telling the windowed operator to fire a trigger only if the state has been changed since the last trigger
+triggerOption.fireOnlyUpdatedPanes();
+</code></pre>
+
+<h2 id="accumulation-mode">Accumulation Mode</h2>
+<p>Accumulation Mode tells the operator what to do with the state of the window when a trigger is fired.
+There are three supported accumulation mode: <code>ACCUMULATING</code>, <code>DISCARDING</code>, and <code>ACCUMULATING_AND_DISCARDING</code>.</p>
+<ul>
+<li><code>ACCUMULATING</code>: The state of the window is preserved until purged</li>
+<li><code>DISCARDING</code>: The state of the window is discarded after firing of a trigger</li>
+<li><code>ACCUMULATING_AND_RETRACTING</code>: The state of the window is preserved until purged, but if the state has changed upon a trigger compared to the previous trigger, an additional retraction trigger is fired.</li>
+</ul>
+<pre><code class="java">// Setting accumulation mode to be ACCUMULATING
+triggerOption.accumulatingFiredPanes();
+
+// Setting accumulation mode to be DISCARDING
+triggerOption.discardingFiredPanes();
+
+// Setting accumulation mode to be ACCUMULATING_AND_RETRACTING
+triggerOption.accumulatingAndRetractingFiredPanes();
+</code></pre>
+
+<h2 id="window-propagation">Window Propagation</h2>
+<p>It is possible to chain multiple instances of <code>WindowedOperator</code> and have only the most upstream instance assign the windows and have all downstream instances inherit the same windows of the triggers from the upstream instance. If WindowOption is <code>null</code> (i.e. <code>setWindowOption</code> is not called), the <code>WindowedOperator</code> assumes that the incoming tuples are <code>WindowedTuple</code>s that contain the information of the window assignment for each  [...]
+<h2 id="state-storage">State Storage</h2>
+<p>One of the most important elements of the <code>WindowedOperator</code> is the state storage. Each window in the operator (or each window/key pair if the operator is keyed) has its own state and how the state is stored and checkpointed is likely to be the most important factor for performance.</p>
+<p>The <code>WindowedOperator</code> currently supports two different state storage mechanisms.</p>
+<p><a href="https://github.com/apache/apex-malhar/blob/master/library/src/main/java/org/apache/apex/malhar/lib/window/impl/InMemoryWindowedStorage.java">In-Memory Windowed Storage</a> stores the operator state only in memory and the entire state is copied to DFS at checkpoint. This storage is useful only if the state is expected to be small and the cardinality of valid windows and keys is small.</p>
+<pre><code class="java">// Setting the data storage for windowed operator to be an in-memory storage
+windowedOperator.setDataStorage(new InMemoryWindowedStorage&lt;AccumulationType&gt;());
+</code></pre>
+
+<p><a href="https://github.com/apache/apex-malhar/blob/master/library/src/main/java/org/apache/apex/malhar/lib/window/impl/SpillableWindowedPlainStorage.java">Spillable Windowed Storage</a> stores the operator state in DFS with a cache in memory. This storage mechanism handles large states and incremental checkpointing. </p>
+<pre><code class="java">// Setting the data storage for windowed operator to be a spillable storage
+SpillableWindowedPlainStorage&lt;MutableLong&gt; dataStorage = new SpillableWindowedPlainStorage&lt;&gt;();
+// setup of the spillable storage omitted here for brevity
+windowedOperator.setDataStorage(dataStorage);
+</code></pre>
+
+<h2 id="merging-two-streams">Merging two streams</h2>
+<p>The <code>WindowedMergeOperator</code> is a <code>WindowedOperator</code> that takes two incoming data streams. It takes a <a href="https://github.com/apache/apex-malhar/blob/master/library/src/main/java/org/apache/apex/malhar/lib/window/MergeAccumulation.java"><code>MergeAccumulation</code></a> instead of a regular Accumulation. The user of this operator can implement their custom merge or join accumulation based on their business logic. Examples of this type of accumulation are <a h [...]
+<p>The <code>WindowedMergeOperator</code> has its own watermark. Its watermark timestamp is the earlier watermark timestamp between the two input streams. When that value changes upon the arrival of incoming watermarks, a watermark control tuple with that timestamp value will be fired to downstream.</p>
+<h2 id="usage-examples">Usage Examples</h2>
+<p>For an example usage of the <code>WindowedOperator</code> via the High level API, click <a href="https://github.com/apache/apex-malhar/blob/master/examples/highlevelapi/src/main/java/org/apache/apex/malhar/stream/sample/WindowedWordCount.java">here</a>.</p>
+<p>For an example usage of the <code>WindowedOperator</code> via the DAG level API, click <a href="https://github.com/apache/apex-malhar/blob/master/library/src/test/java/org/apache/apex/malhar/lib/window/sample/wordcount/Application.java">here</a>.</p>
+<h2 id="advanced-topics">Advanced Topics</h2>
+<h3 id="idempotency-considerations">Idempotency Considerations</h3>
+<p>For the <code>WindowedOperator</code> to be <a href="http://apex.apache.org/docs/apex/development_best_practices/#idempotence">idempotent</a>, both data tuples and watermarks must be emitted deterministically. i.e. When replaying from a checkpoint, the same tuples and watermarks must be emitted in the same Apex streaming window as before the failure happens.</p>
+<p>In order to achieve this, the <code>WindowedOperator</code> has the following behavior:</p>
+<ol>
+<li>
+<p>The time-based triggers are fired based on the implicit timestamp from the Apex streaming windows, not based on the wall clock time. And the time-based triggers are fired only at the Apex streaming window boundary. This means that the Apex streaming window width should be smaller than or equal to the interval duration of any time-based trigger, and ideally the interval duration of the time-based trigger can be expressed as an integer multiple of the Apex streaming window width.</p>
+</li>
+<li>
+<p>The processing of the incoming watermarks is only done at the Apex streaming window boundary. This includes the change of status from early to late for windows that lie beyond the watermark timestamp, the purging of windows from the state for windows that lie completely beyond the lateness horizon, and the propagation of watermark tuples to downstream.</p>
+</li>
+</ol>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+        <a href="../xmlParserOperator/" class="btn btn-neutral float-right" title="XML Parser">Next <span class="icon icon-circle-arrow-right"></span></a>
+      
+      
+        <a href="../transform/" class="btn btn-neutral" title="Transformer"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../transform/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+        <span style="margin-left: 15px"><a href="../xmlParserOperator/" style="color: #fcfcfc">Next &raquo;</a></span>
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/operators/xmlParserOperator/index.html b/content/docs/malhar-3.8/operators/xmlParserOperator/index.html
new file mode 100644
index 0000000..77ff431
--- /dev/null
+++ b/content/docs/malhar-3.8/operators/xmlParserOperator/index.html
@@ -0,0 +1,471 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>XML Parser - Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="../../favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="../../css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="../../css/highlight.css">
+
+  
+  <script>
+    // Current page data
+    var mkdocs_page_name = "XML Parser";
+    var mkdocs_page_input_path = "operators/xmlParserOperator.md";
+    var mkdocs_page_url = "/operators/xmlParserOperator/";
+  </script>
+  
+  <script src="../../js/jquery-2.1.1.min.js"></script>
+  <script src="../../js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="../../js/highlight.pack.js"></script>
+  <script src="../../js/theme.js"></script> 
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="../.." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="../../search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href="../..">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../../apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="../windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 current">
+        <a class="current" href="./">XML Parser</a>
+        
+            <ul>
+            
+                <li class="toctree-l3"><a href="#xml-parser">Xml Parser</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-objective">Operator Objective</a></li>
+                
+                    <li><a class="toctree-l4" href="#class-diagram">Class Diagram</a></li>
+                
+                    <li><a class="toctree-l4" href="#operator-information">Operator Information</a></li>
+                
+                    <li><a class="toctree-l4" href="#properties-attributes-and-ports">Properties, Attributes and Ports</a></li>
+                
+                    <li><a class="toctree-l4" href="#partitioning">Partitioning</a></li>
+                
+                    <li><a class="toctree-l4" href="#example">Example</a></li>
+                
+            
+            </ul>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href="../..">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href="../..">Docs</a> &raquo;</li>
+    
+      
+        
+          <li>Operators &raquo;</li>
+        
+      
+    
+    <li>XML Parser</li>
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+                <h1 id="xml-parser">Xml Parser</h1>
+<h2 id="operator-objective">Operator Objective</h2>
+<p>The XmlParser operator parses XML records and constructs POJOs ("Plain Old Java Objects") from them. The operator also emits each record as a DOM Document if the relevant output port is connected. User can also provide a XSD (XML Schema Definition) to validate incoming XML records. Valid records will be emitted as POJOs / DOM Document while invalid ones are emitted on error port with an error message if the error port is connected.</p>
+<p>XmlParser is <strong>idempotent</strong>, <strong>fault-tolerant</strong> and <strong>statically/dynamically partitionable</strong>.</p>
+<h2 id="class-diagram">Class Diagram</h2>
+<p><img alt="" src="../images/xmlParser/XmlParser.png" /></p>
+<h2 id="operator-information">Operator Information</h2>
+<ol>
+<li>Operator location: <strong><em>malhar-library</em></strong></li>
+<li>Available since: <strong><em>3.2.0</em></strong></li>
+<li>Operator state: <strong><em>Evolving</em></strong></li>
+<li>Java Package: <a href="https://github.com/apache/apex-malhar/blob/master/library/src/main/java/com/datatorrent/lib/parser/XmlParser.java">com.datatorrent.lib.parser.XmlParser</a></li>
+</ol>
+<h2 id="properties-attributes-and-ports">Properties, Attributes and Ports</h2>
+<h3 id="properties-of-xml-parser"><a name="props"></a>Properties of Xml Parser</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Property</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+<th><strong>Default Value</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>schemaXSDFile</em></td>
+<td>[XSD] describing XML data. Incoming records can be validated using the schemaXSDFile. If the data is not as per the requirements specified in schemaXSDFile, they are emitted on the error port. This is an optional property. If the XSD is not provided, incoming tuples are simply converted to POJOs or DOM Documents without any validations</td>
+<td>String</td>
+<td>No</td>
+<td>N/A</td>
+</tr>
+</tbody>
+</table>
+<h3 id="platform-attributes-that-influence-operator-behavior">Platform Attributes that influence operator behavior</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Attribute</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>out.TUPLE_CLASS</em></td>
+<td>TUPLE_CLASS attribute on output port which tells operator the class of POJO which needs to be emitted. The name of the field members of the class must match with the names in incoming POJO. The operator ignores unknown properties i.e. fields present in POJO but not in TUPLE_CLASS or vice versa.</td>
+<td>Class or FQCN</td>
+<td>Yes</td>
+</tr>
+</tbody>
+</table>
+<h3 id="ports">Ports</h3>
+<table>
+<thead>
+<tr>
+<th><strong>Port</strong></th>
+<th><strong>Description</strong></th>
+<th><strong>Type</strong></th>
+<th><strong>Mandatory</strong></th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><em>in</em></td>
+<td>Tuples that needs to be parsed are received on this port</td>
+<td>byte[]</td>
+<td>Yes</td>
+</tr>
+<tr>
+<td><em>out</em></td>
+<td>Valid Tuples that are emitted as pojo. Tuples are converted to POJO only if the port is connected.</td>
+<td>Object (POJO)</td>
+<td>No</td>
+</tr>
+<tr>
+<td><em>parsedOutput</em></td>
+<td>Valid Tuples that are emitted as DOM Document. Tuples are converted to DOM Document only if the port is connected.</td>
+<td>DOM Document</td>
+<td>No</td>
+</tr>
+<tr>
+<td><em>err</em></td>
+<td>Invalid Tuples are emitted with error message. Invalid tuples are discarded if the port is not connected.</td>
+<td>KeyValPair &lt;String, String></td>
+<td>No</td>
+</tr>
+</tbody>
+</table>
+<h2 id="partitioning">Partitioning</h2>
+<p>XML Parser is both statically and dynamically partitionable.</p>
+<h3 id="static-partitioning">Static Partitioning</h3>
+<p>This can be achieved in 2 ways</p>
+<ol>
+<li>Specifying the partitioner and number of partitions in the 'populateDAG()' method.</li>
+</ol>
+<pre><code class="java">XmlParser xmlParser = dag.addOperator(&quot;xmlParser&quot;, XmlParser.class);
+StatelessPartitioner&lt;XmlParser&gt; partitioner1 = new StatelessPartitioner&lt;XmlParser&gt;(2);
+dag.setAttribute(xmlParser, Context.OperatorContext.PARTITIONER, partitioner1 );
+</code></pre>
+
+<ol>
+<li>Specifying the partitioner and number of partitions in properties file.</li>
+</ol>
+<pre><code class="xml"> &lt;property&gt;
+   &lt;name&gt;dt.operator.{OperatorName}.attr.PARTITIONER&lt;/name&gt;
+   &lt;value&gt;com.datatorrent.common.partitioner.StatelessPartitioner:2&lt;/value&gt;
+ &lt;/property&gt;
+</code></pre>
+
+<p>where {OperatorName} is the name of the XmlParser operator.
+ Above lines will partition XmlParser statically 2 times. Above value can be changed accordingly to change the number of static partitions.</p>
+<h3 id="dynamic-partitioning">Dynamic Partitioning</h3>
+<p>XmlParser can be dynamically partitioned using an out-of-the-box partitioner:</p>
+<h4 id="throughput-based">Throughput based</h4>
+<p>Following code can be added to 'populateDAG' method of application to dynamically partition XmlParser:</p>
+<pre><code class="java">XmlParser xmlParser = dag.addOperator(&quot;xmlParser&quot;, XmlParser.class);
+StatelessThroughputBasedPartitioner&lt;XmlParser&gt; partitioner = new StatelessThroughputBasedPartitioner&lt;&gt;();
+partitioner.setCooldownMillis(conf.getLong(&quot;dt.cooldown&quot;, 10000));
+partitioner.setMaximumEvents(conf.getLong(&quot;dt.maxThroughput&quot;, 30000));
+partitioner.setMinimumEvents(conf.getLong(&quot;dt.minThroughput&quot;, 10000));
+dag.setAttribute(xmlParser, OperatorContext.STATS_LISTENERS, Arrays.asList(new StatsListener[]{partitioner}));
+dag.setAttribute(xmlParser, OperatorContext.PARTITIONER, partitioner);
+</code></pre>
+
+<p>Above code will dynamically partition XmlParser when the throughput changes.
+If the overall throughput of XmlParser goes beyond 30000 or less than 10000, the platform will repartition XmlParser
+to balance throughput of a single partition to be between 10000 and 30000.
+'dt.cooldown' of 10000 will be used as the threshold time for which the throughput change is observed.</p>
+<h2 id="example">Example</h2>
+<p>Example for Xml Parser can be found at: <a href="https://github.com/DataTorrent/examples/tree/master/tutorials/parser">https://github.com/DataTorrent/examples/tree/master/tutorials/parser</a></p>
+              
+            </div>
+          </div>
+          <footer>
+  
+    <div class="rst-footer-buttons" role="navigation" aria-label="footer navigation">
+      
+      
+        <a href="../windowedOperator/" class="btn btn-neutral" title="Windowed Operator"><span class="icon icon-circle-arrow-left"></span> Previous</a>
+      
+    </div>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+        <span><a href="../windowedOperator/" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/search.html b/content/docs/malhar-3.8/search.html
new file mode 100644
index 0000000..b4ad638
--- /dev/null
+++ b/content/docs/malhar-3.8/search.html
@@ -0,0 +1,312 @@
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  <meta http-equiv="X-UA-Compatible" content="IE=edge">
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  
+  
+  <title>Apache Apex Malhar Documentation</title>
+  
+
+  <link rel="shortcut icon" href="favicon.ico">
+  
+
+  
+  <link href='https://fonts.googleapis.com/css?family=Lato:400,700|Roboto+Slab:400,700|Inconsolata:400,700' rel='stylesheet' type='text/css'>
+
+  <link rel="stylesheet" href="./css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="./css/theme_extra.css" type="text/css" />
+  <link rel="stylesheet" href="./css/highlight.css">
+
+  
+  <script src="./js/jquery-2.1.1.min.js"></script>
+  <script src="./js/modernizr-2.8.3.min.js"></script>
+  <script type="text/javascript" src="./js/highlight.pack.js"></script>
+  <script src="./js/theme.js"></script>
+  <script>var base_url = '.';</script>
+  <script data-main="./mkdocs/js/search.js" src="./mkdocs/js/require.js"></script>
+
+
+  
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side stickynav">
+      <div class="wy-side-nav-search">
+        <a href="." class="icon icon-home"> Apache Apex Malhar Documentation</a>
+        <div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="./search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
+      </div>
+
+      <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+        <ul class="current">
+          
+            <li>
+    <li class="toctree-l1 ">
+        <a class="" href=".">Apache Apex Malhar</a>
+        
+    </li>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>APIs</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="apis/calcite/">SQL</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+            <li>
+    <ul class="subnav">
+    <li><span>Operators</span></li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/block_reader/">Block Reader</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/csvformatter/">CSV Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/csvParserOperator/">CSV Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/deduper/">Deduper</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/enricher/">Enricher</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/fsInputOperator/">File Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/file_output/">File Output</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/file_splitter/">File Splitter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/filter/">Filter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/fixedWidthParserOperator/">Fixed Width Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/ftpInputOperator/">FTP Input Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/AbstractJdbcTransactionableOutputOperator/">Jdbc Output Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/jdbcPollInputOperator/">JDBC Poller Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/jmsInputOperator/">JMS Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/jsonFormatter/">JSON Formatter</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/jsonParser/">JSON Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/kafkaInputOperator/">Kafka Input</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/regexparser/">Regex Parser</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/s3outputmodule/">S3 Output Module</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/transform/">Transformer</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/windowedOperator/">Windowed Operator</a>
+        
+    </li>
+
+        
+            
+    <li class="toctree-l1 ">
+        <a class="" href="operators/xmlParserOperator/">XML Parser</a>
+        
+    </li>
+
+        
+    </ul>
+<li>
+          
+        </ul>
+      </div>
+      &nbsp;
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+        <a href=".">Apache Apex Malhar Documentation</a>
+      </nav>
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          <div role="navigation" aria-label="breadcrumbs navigation">
+  <ul class="wy-breadcrumbs">
+    <li><a href=".">Docs</a> &raquo;</li>
+    
+    
+    <li class="wy-breadcrumbs-aside">
+      
+    </li>
+  </ul>
+  <hr/>
+</div>
+          <div role="main">
+            <div class="section">
+              
+
+  <h1 id="search">Search Results</h1>
+
+  <form id="content_search" action="search.html">
+    <span role="status" aria-live="polite" class="ui-helper-hidden-accessible"></span>
+    <input name="q" id="mkdocs-search-query" type="text" class="search_input search-query ui-autocomplete-input" placeholder="Search the Docs" autocomplete="off" autofocus>
+  </form>
+
+  <div id="mkdocs-search-results">
+    Searching...
+  </div>
+
+
+            </div>
+          </div>
+          <footer>
+  
+
+  <hr/>
+
+  <div role="contentinfo">
+    <!-- Copyright etc -->
+    
+  </div>
+
+  Built with <a href="http://www.mkdocs.org">MkDocs</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+</footer>
+	  
+        </div>
+      </div>
+
+    </section>
+
+  </div>
+
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      
+      
+      
+    </span>
+</div>
+
+</body>
+</html>
diff --git a/content/docs/malhar-3.8/searchbox.html b/content/docs/malhar-3.8/searchbox.html
new file mode 100644
index 0000000..177fcb3
--- /dev/null
+++ b/content/docs/malhar-3.8/searchbox.html
@@ -0,0 +1,5 @@
+<div role="search">
+  <form id ="rtd-search-form" class="wy-form" action="{{ base_url }}/search.html" method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+  </form>
+</div>
diff --git a/content/docs/malhar-3.8/sitemap.xml b/content/docs/malhar-3.8/sitemap.xml
new file mode 100644
index 0000000..b8d3591
--- /dev/null
+++ b/content/docs/malhar-3.8/sitemap.xml
@@ -0,0 +1,158 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
+
+    
+    <url>
+     <loc>/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+    
+
+    
+        
+    <url>
+     <loc>/apis/calcite/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    
+
+    
+        
+    <url>
+     <loc>/operators/block_reader/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/csvformatter/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/csvParserOperator/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/deduper/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/enricher/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/fsInputOperator/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/file_output/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/file_splitter/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/filter/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/fixedWidthParserOperator/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/ftpInputOperator/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/AbstractJdbcTransactionableOutputOperator/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/jdbcPollInputOperator/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/jmsInputOperator/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/jsonFormatter/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/jsonParser/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/kafkaInputOperator/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/regexparser/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/s3outputmodule/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/transform/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/windowedOperator/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    <url>
+     <loc>/operators/xmlParserOperator/</loc>
+     <lastmod>2017-11-05</lastmod>
+     <changefreq>daily</changefreq>
+    </url>
+        
+    
+
+</urlset>
\ No newline at end of file
diff --git a/content/docs/malhar-3.8/toc.html b/content/docs/malhar-3.8/toc.html
new file mode 100644
index 0000000..6cd2fc9
--- /dev/null
+++ b/content/docs/malhar-3.8/toc.html
@@ -0,0 +1,23 @@
+{% if nav_item.children %}
+    <ul class="subnav">
+    <li><span>{{ nav_item.title }}</span></li>
+
+        {% for nav_item in nav_item.children %}
+            {% include 'toc.html' %}
+        {% endfor %}
+    </ul>
+{% else %}
+    <li class="toctree-l1 {% if nav_item.active%}current{%endif%}">
+        <a class="{% if nav_item.active%}current{%endif%}" href="{{ nav_item.url }}">{{ nav_item.title }}</a>
+        {% if nav_item == current_page %}
+            <ul>
+            {% for toc_item in toc %}
+                <li class="toctree-l3"><a href="{{ toc_item.url }}">{{ toc_item.title }}</a></li>
+                {% for toc_item in toc_item.children %}
+                    <li><a class="toctree-l4" href="{{ toc_item.url }}">{{ toc_item.title }}</a></li>
+                {% endfor %}
+            {% endfor %}
+            </ul>
+        {% endif %}
+    </li>
+{% endif %}
diff --git a/content/docs/malhar-3.8/versions.html b/content/docs/malhar-3.8/versions.html
new file mode 100644
index 0000000..d12d197
--- /dev/null
+++ b/content/docs/malhar-3.8/versions.html
@@ -0,0 +1,15 @@
+<div class="rst-versions" role="note" style="cursor: pointer">
+    <span class="rst-current-version" data-toggle="rst-current-version">
+      {% if repo_name == 'GitHub' %}
+          <a href="{{ repo_url }}" class="icon icon-github" style="float: left; color: #fcfcfc"> GitHub</a>
+      {% elif repo_name == 'Bitbucket' %}
+          <a href="{{ repo_url }}" class="icon icon-bitbucket" style="float: left; color: #fcfcfc"> BitBucket</a>
+      {% endif %}
+      {% if previous_page %}
+        <span><a href="{{ previous_page.url }}" style="color: #fcfcfc;">&laquo; Previous</a></span>
+      {% endif %}
+      {% if next_page %}
+        <span style="margin-left: 15px"><a href="{{ next_page.url }}" style="color: #fcfcfc">Next &raquo;</a></span>
+      {% endif %}
+    </span>
+</div>

-- 
To stop receiving notification emails like this one, please contact
"commits@apex.apache.org" <co...@apex.apache.org>.